stage2: add interned AIR tag

This required additionally passing the `InternPool` into some AIR
methods.

Also, implement `Type.isNoReturn` for interned types.
This commit is contained in:
Andrew Kelley 2023-05-02 14:37:33 -07:00
parent c7e84ddb72
commit 00f82f1c46
17 changed files with 1050 additions and 961 deletions

View File

@ -11,6 +11,7 @@ const Air = @This();
const Value = @import("value.zig").Value;
const Type = @import("type.zig").Type;
const InternPool = @import("InternPool.zig");
const Module = @import("Module.zig");
instructions: std.MultiArrayList(Inst).Slice,
/// The meaning of this data is determined by `Inst.Tag` value.
@ -401,6 +402,9 @@ pub const Inst = struct {
constant,
/// A comptime-known type. Uses the `ty` field.
const_ty,
/// A comptime-known value via an index into the InternPool.
/// Uses the `interned` field.
interned,
/// Notes the beginning of a source code statement and marks the line and column.
/// Result type is always void.
/// Uses the `dbg_stmt` field.
@ -928,6 +932,7 @@ pub const Inst = struct {
pub const Data = union {
no_op: void,
un_op: Ref,
interned: InternPool.Index,
bin_op: struct {
lhs: Ref,
@ -1147,18 +1152,15 @@ pub fn getMainBody(air: Air) []const Air.Inst.Index {
return air.extra[extra.end..][0..extra.data.body_len];
}
pub fn typeOf(air: Air, inst: Air.Inst.Ref) Type {
pub fn typeOf(air: Air, inst: Air.Inst.Ref, ip: InternPool) Type {
const ref_int = @enumToInt(inst);
if (ref_int < InternPool.static_keys.len) {
return .{
.ip_index = InternPool.static_keys[ref_int].typeOf(),
.legacy = undefined,
};
return InternPool.static_keys[ref_int].typeOf().toType();
}
return air.typeOfIndex(ref_int - ref_start_index);
return air.typeOfIndex(ref_int - ref_start_index, ip);
}
pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
pub fn typeOfIndex(air: Air, inst: Air.Inst.Index, ip: InternPool) Type {
const datas = air.instructions.items(.data);
switch (air.instructions.items(.tag)[inst]) {
.add,
@ -1200,7 +1202,7 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
.div_exact_optimized,
.rem_optimized,
.mod_optimized,
=> return air.typeOf(datas[inst].bin_op.lhs),
=> return air.typeOf(datas[inst].bin_op.lhs, ip),
.sqrt,
.sin,
@ -1218,7 +1220,7 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
.trunc_float,
.neg,
.neg_optimized,
=> return air.typeOf(datas[inst].un_op),
=> return air.typeOf(datas[inst].un_op, ip),
.cmp_lt,
.cmp_lte,
@ -1280,6 +1282,8 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
.try_ptr,
=> return air.getRefType(datas[inst].ty_pl.ty),
.interned => return ip.indexToKey(datas[inst].interned).typeOf().toType(),
.not,
.bitcast,
.load,
@ -1371,33 +1375,33 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
.tag_name, .error_name => return Type.initTag(.const_slice_u8_sentinel_0),
.call, .call_always_tail, .call_never_tail, .call_never_inline => {
const callee_ty = air.typeOf(datas[inst].pl_op.operand);
const callee_ty = air.typeOf(datas[inst].pl_op.operand, ip);
return callee_ty.fnReturnType();
},
.slice_elem_val, .ptr_elem_val, .array_elem_val => {
const ptr_ty = air.typeOf(datas[inst].bin_op.lhs);
const ptr_ty = air.typeOf(datas[inst].bin_op.lhs, ip);
return ptr_ty.elemType();
},
.atomic_load => {
const ptr_ty = air.typeOf(datas[inst].atomic_load.ptr);
const ptr_ty = air.typeOf(datas[inst].atomic_load.ptr, ip);
return ptr_ty.elemType();
},
.atomic_rmw => {
const ptr_ty = air.typeOf(datas[inst].pl_op.operand);
const ptr_ty = air.typeOf(datas[inst].pl_op.operand, ip);
return ptr_ty.elemType();
},
.reduce, .reduce_optimized => return air.typeOf(datas[inst].reduce.operand).childType(),
.reduce, .reduce_optimized => return air.typeOf(datas[inst].reduce.operand, ip).childType(),
.mul_add => return air.typeOf(datas[inst].pl_op.operand),
.mul_add => return air.typeOf(datas[inst].pl_op.operand, ip),
.select => {
const extra = air.extraData(Air.Bin, datas[inst].pl_op.payload).data;
return air.typeOf(extra.lhs);
return air.typeOf(extra.lhs, ip);
},
.@"try" => {
const err_union_ty = air.typeOf(datas[inst].pl_op.operand);
const err_union_ty = air.typeOf(datas[inst].pl_op.operand, ip);
return err_union_ty.errorUnionPayload();
},
@ -1465,7 +1469,7 @@ pub fn refToIndex(inst: Air.Inst.Ref) ?Air.Inst.Index {
}
/// Returns `null` if runtime-known.
pub fn value(air: Air, inst: Air.Inst.Ref, mod: *const @import("Module.zig")) ?Value {
pub fn value(air: Air, inst: Air.Inst.Ref, mod: *const Module) ?Value {
const ref_int = @enumToInt(inst);
if (ref_int < ref_start_index) {
const ip_index = @intToEnum(InternPool.Index, ref_int);
@ -1476,7 +1480,7 @@ pub fn value(air: Air, inst: Air.Inst.Ref, mod: *const @import("Module.zig")) ?V
switch (air.instructions.items(.tag)[inst_index]) {
.constant => return air.values[air_datas[inst_index].ty_pl.payload],
.const_ty => unreachable,
else => return air.typeOfIndex(inst_index).onePossibleValue(mod),
else => return air.typeOfIndex(inst_index, mod.intern_pool).onePossibleValue(mod),
}
}
@ -1489,10 +1493,11 @@ pub fn nullTerminatedString(air: Air, index: usize) [:0]const u8 {
return bytes[0..end :0];
}
/// Returns whether the given instruction must always be lowered, for instance because it can cause
/// side effects. If an instruction does not need to be lowered, and Liveness determines its result
/// is unused, backends should avoid lowering it.
pub fn mustLower(air: Air, inst: Air.Inst.Index) bool {
/// Returns whether the given instruction must always be lowered, for instance
/// because it can cause side effects. If an instruction does not need to be
/// lowered, and Liveness determines its result is unused, backends should
/// avoid lowering it.
pub fn mustLower(air: Air, inst: Air.Inst.Index, ip: InternPool) bool {
const data = air.instructions.items(.data)[inst];
return switch (air.instructions.items(.tag)[inst]) {
.arg,
@ -1631,6 +1636,7 @@ pub fn mustLower(air: Air, inst: Air.Inst.Index) bool {
.cmp_vector_optimized,
.constant,
.const_ty,
.interned,
.is_null,
.is_non_null,
.is_null_ptr,
@ -1699,8 +1705,8 @@ pub fn mustLower(air: Air, inst: Air.Inst.Index) bool {
=> false,
.assembly => @truncate(u1, air.extraData(Air.Asm, data.ty_pl.payload).data.flags >> 31) != 0,
.load => air.typeOf(data.ty_op.operand).isVolatilePtr(),
.slice_elem_val, .ptr_elem_val => air.typeOf(data.bin_op.lhs).isVolatilePtr(),
.atomic_load => air.typeOf(data.atomic_load.ptr).isVolatilePtr(),
.load => air.typeOf(data.ty_op.operand, ip).isVolatilePtr(),
.slice_elem_val, .ptr_elem_val => air.typeOf(data.bin_op.lhs, ip).isVolatilePtr(),
.atomic_load => air.typeOf(data.atomic_load.ptr, ip).isVolatilePtr(),
};
}

View File

@ -241,6 +241,11 @@ pub const Item = struct {
/// When adding a tag to this enum, consider adding a corresponding entry to
/// `primitives` in AstGen.zig.
pub const Index = enum(u32) {
pub const first_type: Index = .u1_type;
pub const last_type: Index = .empty_struct_type;
pub const first_value: Index = .undef;
pub const last_value: Index = .empty_struct;
u1_type,
u8_type,
i8_type,
@ -329,6 +334,7 @@ pub const Index = enum(u32) {
bool_false,
/// `.{}` (untyped)
empty_struct,
/// Used for generic parameters where the type and value
/// is not known until generic function instantiation.
generic_poison,

View File

@ -131,7 +131,7 @@ fn LivenessPassData(comptime pass: LivenessPass) type {
};
}
pub fn analyze(gpa: Allocator, air: Air) Allocator.Error!Liveness {
pub fn analyze(gpa: Allocator, air: Air, intern_pool: *const InternPool) Allocator.Error!Liveness {
const tracy = trace(@src());
defer tracy.end();
@ -144,6 +144,7 @@ pub fn analyze(gpa: Allocator, air: Air) Allocator.Error!Liveness {
),
.extra = .{},
.special = .{},
.intern_pool = intern_pool,
};
errdefer gpa.free(a.tomb_bits);
errdefer a.special.deinit(gpa);
@ -322,6 +323,7 @@ pub fn categorizeOperand(
.ret_ptr,
.constant,
.const_ty,
.interned,
.trap,
.breakpoint,
.dbg_stmt,
@ -820,6 +822,7 @@ pub const BigTomb = struct {
const Analysis = struct {
gpa: Allocator,
air: Air,
intern_pool: *const InternPool,
tomb_bits: []usize,
special: std.AutoHashMapUnmanaged(Air.Inst.Index, u32),
extra: std.ArrayListUnmanaged(u32),
@ -971,6 +974,7 @@ fn analyzeInst(
.constant,
.const_ty,
.interned,
=> unreachable,
.trap,
@ -1255,6 +1259,7 @@ fn analyzeOperands(
) Allocator.Error!void {
const gpa = a.gpa;
const inst_tags = a.air.instructions.items(.tag);
const ip = a.intern_pool;
switch (pass) {
.loop_analysis => {
@ -1265,7 +1270,7 @@ fn analyzeOperands(
// Don't compute any liveness for constants
switch (inst_tags[operand]) {
.constant, .const_ty => continue,
.constant, .const_ty, .interned => continue,
else => {},
}
@ -1290,7 +1295,7 @@ fn analyzeOperands(
// If our result is unused and the instruction doesn't need to be lowered, backends will
// skip the lowering of this instruction, so we don't want to record uses of operands.
// That way, we can mark as many instructions as possible unused.
if (!immediate_death or a.air.mustLower(inst)) {
if (!immediate_death or a.air.mustLower(inst, ip.*)) {
// Note that it's important we iterate over the operands backwards, so that if a dying
// operand is used multiple times we mark its last use as its death.
var i = operands.len;
@ -1301,7 +1306,7 @@ fn analyzeOperands(
// Don't compute any liveness for constants
switch (inst_tags[operand]) {
.constant, .const_ty => continue,
.constant, .const_ty, .interned => continue,
else => {},
}
@ -1821,6 +1826,7 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type {
/// Must be called with operands in reverse order.
fn feed(big: *Self, op_ref: Air.Inst.Ref) !void {
const ip = big.a.intern_pool;
// Note that after this, `operands_remaining` becomes the index of the current operand
big.operands_remaining -= 1;
@ -1834,14 +1840,14 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type {
// Don't compute any liveness for constants
const inst_tags = big.a.air.instructions.items(.tag);
switch (inst_tags[operand]) {
.constant, .const_ty => return,
.constant, .const_ty, .interned => return,
else => {},
}
// If our result is unused and the instruction doesn't need to be lowered, backends will
// skip the lowering of this instruction, so we don't want to record uses of operands.
// That way, we can mark as many instructions as possible unused.
if (big.will_die_immediately and !big.a.air.mustLower(big.inst)) return;
if (big.will_die_immediately and !big.a.air.mustLower(big.inst, ip.*)) return;
const extra_byte = (big.operands_remaining - (bpi - 1)) / 31;
const extra_bit = @intCast(u5, big.operands_remaining - (bpi - 1) - extra_byte * 31);

View File

@ -5,6 +5,7 @@ air: Air,
liveness: Liveness,
live: LiveMap = .{},
blocks: std.AutoHashMapUnmanaged(Air.Inst.Index, LiveMap) = .{},
intern_pool: *const InternPool,
pub const Error = error{ LivenessInvalid, OutOfMemory };
@ -27,10 +28,11 @@ pub fn verify(self: *Verify) Error!void {
const LiveMap = std.AutoHashMapUnmanaged(Air.Inst.Index, void);
fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void {
const ip = self.intern_pool;
const tag = self.air.instructions.items(.tag);
const data = self.air.instructions.items(.data);
for (body) |inst| {
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) {
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst, ip.*)) {
// This instruction will not be lowered and should be ignored.
continue;
}
@ -42,6 +44,7 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void {
.ret_ptr,
.constant,
.const_ty,
.interned,
.breakpoint,
.dbg_stmt,
.dbg_inline_begin,
@ -554,7 +557,7 @@ fn verifyDeath(self: *Verify, inst: Air.Inst.Index, operand: Air.Inst.Index) Err
fn verifyOperand(self: *Verify, inst: Air.Inst.Index, op_ref: Air.Inst.Ref, dies: bool) Error!void {
const operand = Air.refToIndex(op_ref) orelse return;
switch (self.air.instructions.items(.tag)[operand]) {
.constant, .const_ty => {},
.constant, .const_ty, .interned => {},
else => {
if (dies) {
if (!self.live.remove(operand)) return invalid("%{}: dead operand %{} reused and killed again", .{ inst, operand });
@ -576,7 +579,7 @@ fn verifyInst(
}
const tag = self.air.instructions.items(.tag);
switch (tag[inst]) {
.constant, .const_ty => unreachable,
.constant, .const_ty, .interned => unreachable,
else => {
if (self.liveness.isUnused(inst)) {
assert(!self.live.contains(inst));
@ -604,4 +607,5 @@ const log = std.log.scoped(.liveness_verify);
const Air = @import("../Air.zig");
const Liveness = @import("../Liveness.zig");
const InternPool = @import("../InternPool.zig");
const Verify = @This();

View File

@ -4397,7 +4397,7 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void {
if (no_bin_file and !dump_air and !dump_llvm_ir) return;
log.debug("analyze liveness of {s}", .{decl.name});
var liveness = try Liveness.analyze(gpa, air);
var liveness = try Liveness.analyze(gpa, air, &mod.intern_pool);
defer liveness.deinit(gpa);
if (dump_air) {
@ -4414,6 +4414,7 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void {
.gpa = gpa,
.air = air,
.liveness = liveness,
.intern_pool = &mod.intern_pool,
};
defer verify.deinit();

View File

@ -33007,7 +33007,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
/// Returns the type of the AIR instruction.
fn typeOf(sema: *Sema, inst: Air.Inst.Ref) Type {
return sema.getTmpAir().typeOf(inst);
return sema.getTmpAir().typeOf(inst, sema.mod.intern_pool);
}
pub fn getTmpAir(sema: Sema) Air {
@ -33019,88 +33019,14 @@ pub fn getTmpAir(sema: Sema) Air {
}
pub fn addType(sema: *Sema, ty: Type) !Air.Inst.Ref {
switch (ty.ip_index) {
.u1_type => return .u1_type,
.u8_type => return .u8_type,
.i8_type => return .i8_type,
.u16_type => return .u16_type,
.i16_type => return .i16_type,
.u29_type => return .u29_type,
.u32_type => return .u32_type,
.i32_type => return .i32_type,
.u64_type => return .u64_type,
.i64_type => return .i64_type,
.u80_type => return .u80_type,
.u128_type => return .u128_type,
.i128_type => return .i128_type,
.usize_type => return .usize_type,
.isize_type => return .isize_type,
.c_char_type => return .c_char_type,
.c_short_type => return .c_short_type,
.c_ushort_type => return .c_ushort_type,
.c_int_type => return .c_int_type,
.c_uint_type => return .c_uint_type,
.c_long_type => return .c_long_type,
.c_ulong_type => return .c_ulong_type,
.c_longlong_type => return .c_longlong_type,
.c_ulonglong_type => return .c_ulonglong_type,
.c_longdouble_type => return .c_longdouble_type,
.f16_type => return .f16_type,
.f32_type => return .f32_type,
.f64_type => return .f64_type,
.f80_type => return .f80_type,
.f128_type => return .f128_type,
.anyopaque_type => return .anyopaque_type,
.bool_type => return .bool_type,
.void_type => return .void_type,
.type_type => return .type_type,
.anyerror_type => return .anyerror_type,
.comptime_int_type => return .comptime_int_type,
.comptime_float_type => return .comptime_float_type,
.noreturn_type => return .noreturn_type,
.anyframe_type => return .anyframe_type,
.null_type => return .null_type,
.undefined_type => return .undefined_type,
.enum_literal_type => return .enum_literal_type,
.atomic_order_type => return .atomic_order_type,
.atomic_rmw_op_type => return .atomic_rmw_op_type,
.calling_convention_type => return .calling_convention_type,
.address_space_type => return .address_space_type,
.float_mode_type => return .float_mode_type,
.reduce_op_type => return .reduce_op_type,
.call_modifier_type => return .call_modifier_type,
.prefetch_options_type => return .prefetch_options_type,
.export_options_type => return .export_options_type,
.extern_options_type => return .extern_options_type,
.type_info_type => return .type_info_type,
.manyptr_u8_type => return .manyptr_u8_type,
.manyptr_const_u8_type => return .manyptr_const_u8_type,
.single_const_pointer_to_comptime_int_type => return .single_const_pointer_to_comptime_int_type,
.const_slice_u8_type => return .const_slice_u8_type,
.anyerror_void_error_union_type => return .anyerror_void_error_union_type,
.generic_poison_type => return .generic_poison_type,
.var_args_param_type => return .var_args_param_type,
.empty_struct_type => return .empty_struct_type,
// values
.undef => unreachable,
.zero => unreachable,
.zero_usize => unreachable,
.one => unreachable,
.one_usize => unreachable,
.calling_convention_c => unreachable,
.calling_convention_inline => unreachable,
.void_value => unreachable,
.unreachable_value => unreachable,
.null_value => unreachable,
.bool_true => unreachable,
.bool_false => unreachable,
.empty_struct => unreachable,
.generic_poison => unreachable,
_ => {},
.none => unreachable,
if (ty.ip_index != .none) {
if (@enumToInt(ty.ip_index) < Air.ref_start_index)
return @intToEnum(Air.Inst.Ref, @enumToInt(ty.ip_index));
try sema.air_instructions.append(sema.gpa, .{
.tag = .interned,
.data = .{ .interned = ty.ip_index },
});
return Air.indexToRef(@intCast(u32, sema.air_instructions.len - 1));
}
switch (ty.tag()) {
.u1 => return .u1_type,

View File

@ -521,7 +521,7 @@ fn gen(self: *Self) !void {
const inst = self.air.getMainBody()[arg_index];
assert(self.air.instructions.items(.tag)[inst] == .arg);
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const abi_size = @intCast(u32, ty.abiSize(mod));
const abi_align = ty.abiAlignment(mod);
@ -653,13 +653,14 @@ fn gen(self: *Self) !void {
}
fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
const mod = self.bin_file.options.module.?;
const ip = &mod.intern_pool;
const air_tags = self.air.instructions.items(.tag);
for (body) |inst| {
// TODO: remove now-redundant isUnused calls from AIR handler functions
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) {
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst, ip.*))
continue;
}
const old_air_bookkeeping = self.air_bookkeeping;
try self.ensureProcessDeathCapacity(Liveness.bpi);
@ -845,6 +846,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.constant => unreachable, // excluded from function bodies
.const_ty => unreachable, // excluded from function bodies
.interned => unreachable, // excluded from function bodies
.unreach => self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -1028,7 +1030,7 @@ fn allocMem(
/// Use a pointer instruction as the basis for allocating stack memory.
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
const mod = self.bin_file.options.module.?;
const elem_ty = self.air.typeOfIndex(inst).elemType();
const elem_ty = self.typeOfIndex(inst).elemType();
if (!elem_ty.hasRuntimeBits(mod)) {
// return the stack offset 0. Stack offset 0 will be where all
@ -1067,7 +1069,7 @@ fn allocRegOrMem(self: *Self, elem_ty: Type, reg_ok: bool, maybe_inst: ?Air.Inst
}
pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void {
const stack_mcv = try self.allocRegOrMem(self.air.typeOfIndex(inst), false, inst);
const stack_mcv = try self.allocRegOrMem(self.typeOfIndex(inst), false, inst);
log.debug("spilling {d} to stack mcv {any}", .{ inst, stack_mcv });
const reg_mcv = self.getResolvedInstValue(inst);
@ -1079,14 +1081,14 @@ pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
try branch.inst_table.put(self.gpa, inst, stack_mcv);
try self.genSetStack(self.air.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv);
try self.genSetStack(self.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv);
}
/// Save the current instruction stored in the compare flags if
/// occupied
fn spillCompareFlagsIfOccupied(self: *Self) !void {
if (self.compare_flags_inst) |inst_to_save| {
const ty = self.air.typeOfIndex(inst_to_save);
const ty = self.typeOfIndex(inst_to_save);
const mcv = self.getResolvedInstValue(inst_to_save);
const new_mcv = switch (mcv) {
.compare_flags => try self.allocRegOrMem(ty, true, inst_to_save),
@ -1094,7 +1096,7 @@ fn spillCompareFlagsIfOccupied(self: *Self) !void {
else => unreachable, // mcv doesn't occupy the compare flags
};
try self.setRegOrMem(self.air.typeOfIndex(inst_to_save), new_mcv, mcv);
try self.setRegOrMem(self.typeOfIndex(inst_to_save), new_mcv, mcv);
log.debug("spilling {d} to mcv {any}", .{ inst_to_save, new_mcv });
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@ -1126,9 +1128,9 @@ fn copyToTmpRegister(self: *Self, ty: Type, mcv: MCValue) !Register {
/// This can have a side effect of spilling instructions to the stack to free up a register.
fn copyToNewRegister(self: *Self, reg_owner: Air.Inst.Index, mcv: MCValue) !MCValue {
const raw_reg = try self.register_manager.allocReg(reg_owner, gp);
const ty = self.air.typeOfIndex(reg_owner);
const ty = self.typeOfIndex(reg_owner);
const reg = self.registerAlias(raw_reg, ty);
try self.genSetReg(self.air.typeOfIndex(reg_owner), reg, mcv);
try self.genSetReg(self.typeOfIndex(reg_owner), reg, mcv);
return MCValue{ .register = reg };
}
@ -1181,10 +1183,10 @@ fn airIntCast(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const operand = ty_op.operand;
const operand_mcv = try self.resolveInst(operand);
const operand_ty = self.air.typeOf(operand);
const operand_ty = self.typeOf(operand);
const operand_info = operand_ty.intInfo(mod);
const dest_ty = self.air.typeOfIndex(inst);
const dest_ty = self.typeOfIndex(inst);
const dest_info = dest_ty.intInfo(mod);
const result: MCValue = result: {
@ -1201,14 +1203,14 @@ fn airIntCast(self: *Self, inst: Air.Inst.Index) !void {
if (dest_info.bits > operand_info.bits) {
const dest_mcv = try self.allocRegOrMem(dest_ty, true, inst);
try self.setRegOrMem(self.air.typeOfIndex(inst), dest_mcv, truncated);
try self.setRegOrMem(self.typeOfIndex(inst), dest_mcv, truncated);
break :result dest_mcv;
} else {
if (self.reuseOperand(inst, operand, 0, truncated)) {
break :result truncated;
} else {
const dest_mcv = try self.allocRegOrMem(dest_ty, true, inst);
try self.setRegOrMem(self.air.typeOfIndex(inst), dest_mcv, truncated);
try self.setRegOrMem(self.typeOfIndex(inst), dest_mcv, truncated);
break :result dest_mcv;
}
}
@ -1303,8 +1305,8 @@ fn trunc(
fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand = try self.resolveInst(ty_op.operand);
const operand_ty = self.air.typeOf(ty_op.operand);
const dest_ty = self.air.typeOfIndex(inst);
const operand_ty = self.typeOf(ty_op.operand);
const dest_ty = self.typeOfIndex(inst);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else blk: {
break :blk try self.trunc(inst, operand, operand_ty, dest_ty);
@ -1325,7 +1327,7 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand = try self.resolveInst(ty_op.operand);
const operand_ty = self.air.typeOf(ty_op.operand);
const operand_ty = self.typeOf(ty_op.operand);
switch (operand) {
.dead => unreachable,
.unreach => unreachable,
@ -1492,8 +1494,8 @@ fn minMax(
fn airMinMax(self: *Self, inst: Air.Inst.Index) !void {
const tag = self.air.instructions.items(.tag)[inst];
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
@ -1512,9 +1514,9 @@ fn airSlice(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const ptr = try self.resolveInst(bin_op.lhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const len = try self.resolveInst(bin_op.rhs);
const len_ty = self.air.typeOf(bin_op.rhs);
const len_ty = self.typeOf(bin_op.rhs);
const ptr_bits = self.target.ptrBitWidth();
const ptr_bytes = @divExact(ptr_bits, 8);
@ -2436,8 +2438,8 @@ fn ptrArithmetic(
fn airBinOp(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
@ -2487,8 +2489,8 @@ fn airBinOp(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
fn airPtrArithmetic(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
@ -2525,10 +2527,10 @@ fn airOverflow(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs_bind: ReadArg.Bind = .{ .inst = extra.lhs };
const rhs_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
const tuple_size = @intCast(u32, tuple_ty.abiSize(mod));
const tuple_align = tuple_ty.abiAlignment(mod);
const overflow_bit_offset = @intCast(u32, tuple_ty.structFieldOffset(1, mod));
@ -2653,10 +2655,10 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = result: {
const lhs_bind: ReadArg.Bind = .{ .inst = extra.lhs };
const rhs_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
const tuple_size = @intCast(u32, tuple_ty.abiSize(mod));
const tuple_align = tuple_ty.abiAlignment(mod);
const overflow_bit_offset = @intCast(u32, tuple_ty.structFieldOffset(1, mod));
@ -2877,10 +2879,10 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = result: {
const lhs_bind: ReadArg.Bind = .{ .inst = extra.lhs };
const rhs_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
const tuple_size = @intCast(u32, tuple_ty.abiSize(mod));
const tuple_align = tuple_ty.abiAlignment(mod);
const overflow_bit_offset = @intCast(u32, tuple_ty.structFieldOffset(1, mod));
@ -3013,7 +3015,7 @@ fn airShlSat(self: *Self, inst: Air.Inst.Index) !void {
fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const optional_ty = self.air.typeOf(ty_op.operand);
const optional_ty = self.typeOf(ty_op.operand);
const mcv = try self.resolveInst(ty_op.operand);
break :result try self.optionalPayload(inst, mcv, optional_ty);
};
@ -3132,7 +3134,7 @@ fn airUnwrapErrErr(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_bind: ReadArg.Bind = .{ .inst = ty_op.operand };
const error_union_ty = self.air.typeOf(ty_op.operand);
const error_union_ty = self.typeOf(ty_op.operand);
break :result try self.errUnionErr(error_union_bind, error_union_ty, inst);
};
@ -3212,7 +3214,7 @@ fn airUnwrapErrPayload(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_bind: ReadArg.Bind = .{ .inst = ty_op.operand };
const error_union_ty = self.air.typeOf(ty_op.operand);
const error_union_ty = self.typeOf(ty_op.operand);
break :result try self.errUnionPayload(error_union_bind, error_union_ty, inst);
};
@ -3266,12 +3268,12 @@ fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
}
const result: MCValue = result: {
const payload_ty = self.air.typeOf(ty_op.operand);
const payload_ty = self.typeOf(ty_op.operand);
if (!payload_ty.hasRuntimeBits(mod)) {
break :result MCValue{ .immediate = 1 };
}
const optional_ty = self.air.typeOfIndex(inst);
const optional_ty = self.typeOfIndex(inst);
const operand = try self.resolveInst(ty_op.operand);
const operand_lock: ?RegisterLock = switch (operand) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -3433,7 +3435,7 @@ fn airPtrSlicePtrPtr(self: *Self, inst: Air.Inst.Index) !void {
fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const slice_ty = self.air.typeOf(bin_op.lhs);
const slice_ty = self.typeOf(bin_op.lhs);
const result: MCValue = if (!slice_ty.isVolatilePtr() and self.liveness.isUnused(inst)) .dead else result: {
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const ptr_ty = slice_ty.slicePtrFieldType(&buf);
@ -3482,8 +3484,8 @@ fn airSliceElemPtr(self: *Self, inst: Air.Inst.Index) !void {
const base_bind: ReadArg.Bind = .{ .mcv = base_mcv };
const index_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const slice_ty = self.air.typeOf(extra.lhs);
const index_ty = self.air.typeOf(extra.rhs);
const slice_ty = self.typeOf(extra.lhs);
const index_ty = self.typeOf(extra.rhs);
const addr = try self.ptrArithmetic(.ptr_add, base_bind, index_bind, slice_ty, index_ty, null);
break :result addr;
@ -3499,7 +3501,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
fn airPtrElemVal(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const result: MCValue = if (!ptr_ty.isVolatilePtr() and self.liveness.isUnused(inst)) .dead else result: {
const base_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
const index_bind: ReadArg.Bind = .{ .inst = bin_op.rhs };
@ -3516,8 +3518,8 @@ fn airPtrElemPtr(self: *Self, inst: Air.Inst.Index) !void {
const ptr_bind: ReadArg.Bind = .{ .inst = extra.lhs };
const index_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const ptr_ty = self.air.typeOf(extra.lhs);
const index_ty = self.air.typeOf(extra.rhs);
const ptr_ty = self.typeOf(extra.lhs);
const index_ty = self.typeOf(extra.rhs);
const addr = try self.ptrArithmetic(.ptr_add, ptr_bind, index_bind, ptr_ty, index_ty, null);
break :result addr;
@ -3862,16 +3864,16 @@ fn genInlineMemsetCode(
}
fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const elem_ty = self.air.typeOfIndex(inst);
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const elem_ty = self.typeOfIndex(inst);
const elem_size = elem_ty.abiSize(mod);
const result: MCValue = result: {
if (!elem_ty.hasRuntimeBits(mod))
break :result MCValue.none;
const ptr = try self.resolveInst(ty_op.operand);
const is_volatile = self.air.typeOf(ty_op.operand).isVolatilePtr();
const is_volatile = self.typeOf(ty_op.operand).isVolatilePtr();
if (self.liveness.isUnused(inst) and !is_volatile)
break :result MCValue.dead;
@ -3886,7 +3888,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(elem_ty, true, inst);
}
};
try self.load(dst_mcv, ptr, self.air.typeOf(ty_op.operand));
try self.load(dst_mcv, ptr, self.typeOf(ty_op.operand));
break :result dst_mcv;
};
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
@ -4068,8 +4070,8 @@ fn airStore(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr = try self.resolveInst(bin_op.lhs);
const value = try self.resolveInst(bin_op.rhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const value_ty = self.air.typeOf(bin_op.rhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const value_ty = self.typeOf(bin_op.rhs);
try self.store(ptr, value, ptr_ty, value_ty);
@ -4093,7 +4095,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
return if (self.liveness.isUnused(inst)) .dead else result: {
const mod = self.bin_file.options.module.?;
const mcv = try self.resolveInst(operand);
const ptr_ty = self.air.typeOf(operand);
const ptr_ty = self.typeOf(operand);
const struct_ty = ptr_ty.childType();
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
switch (mcv) {
@ -4118,7 +4120,7 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const mod = self.bin_file.options.module.?;
const mcv = try self.resolveInst(operand);
const struct_ty = self.air.typeOf(operand);
const struct_ty = self.typeOf(operand);
const struct_field_ty = struct_ty.structFieldType(index);
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
@ -4194,7 +4196,7 @@ fn airArg(self: *Self, inst: Air.Inst.Index) !void {
while (self.args[arg_index] == .none) arg_index += 1;
self.arg_index = arg_index + 1;
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const tag = self.air.instructions.items(.tag)[inst];
const src_index = self.air.instructions.items(.data)[inst].arg.src_index;
const name = self.mod_fn.getParamName(self.bin_file.options.module.?, src_index);
@ -4247,7 +4249,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const callee = pl_op.operand;
const extra = self.air.extraData(Air.Call, pl_op.payload);
const args = @ptrCast([]const Air.Inst.Ref, self.air.extra[extra.end..][0..extra.data.args_len]);
const ty = self.air.typeOf(callee);
const ty = self.typeOf(callee);
const mod = self.bin_file.options.module.?;
const fn_ty = switch (ty.zigTypeTag(mod)) {
@ -4294,7 +4296,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
for (info.args, 0..) |mc_arg, arg_i| {
const arg = args[arg_i];
const arg_ty = self.air.typeOf(arg);
const arg_ty = self.typeOf(arg);
const arg_mcv = try self.resolveInst(args[arg_i]);
switch (mc_arg) {
@ -4470,7 +4472,7 @@ fn airRet(self: *Self, inst: Air.Inst.Index) !void {
fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const ret_ty = self.fn_type.fnReturnType();
switch (self.ret_mcv) {
@ -4512,7 +4514,7 @@ fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
fn airCmp(self: *Self, inst: Air.Inst.Index, op: math.CompareOperator) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs_ty = self.air.typeOf(bin_op.lhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else blk: {
break :blk try self.cmp(.{ .inst = bin_op.lhs }, .{ .inst = bin_op.rhs }, lhs_ty, op);
@ -4652,7 +4654,7 @@ fn airDbgVar(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const operand = pl_op.operand;
const tag = self.air.instructions.items(.tag)[inst];
const ty = self.air.typeOf(operand);
const ty = self.typeOf(operand);
const mcv = try self.resolveInst(operand);
const name = self.air.nullTerminatedString(pl_op.payload);
@ -4804,7 +4806,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
log.debug("consolidating else_entry {d} {}=>{}", .{ else_key, else_value, canon_mcv });
// TODO make sure the destination stack offset / register does not already have something
// going on there.
try self.setRegOrMem(self.air.typeOfIndex(else_key), canon_mcv, else_value);
try self.setRegOrMem(self.typeOfIndex(else_key), canon_mcv, else_value);
// TODO track the new register / stack allocation
}
try parent_branch.inst_table.ensureUnusedCapacity(self.gpa, saved_then_branch.inst_table.count());
@ -4831,7 +4833,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
log.debug("consolidating then_entry {d} {}=>{}", .{ then_key, parent_mcv, then_value });
// TODO make sure the destination stack offset / register does not already have something
// going on there.
try self.setRegOrMem(self.air.typeOfIndex(then_key), parent_mcv, then_value);
try self.setRegOrMem(self.typeOfIndex(then_key), parent_mcv, then_value);
// TODO track the new register / stack allocation
}
@ -4936,7 +4938,7 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand = try self.resolveInst(un_op);
const operand_ty = self.air.typeOf(un_op);
const operand_ty = self.typeOf(un_op);
break :result try self.isNull(.{ .mcv = operand }, operand_ty);
};
@ -4947,7 +4949,7 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
const operand = try self.allocRegOrMem(elem_ty, true, null);
@ -4962,7 +4964,7 @@ fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand = try self.resolveInst(un_op);
const operand_ty = self.air.typeOf(un_op);
const operand_ty = self.typeOf(un_op);
break :result try self.isNonNull(.{ .mcv = operand }, operand_ty);
};
@ -4973,7 +4975,7 @@ fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
const operand = try self.allocRegOrMem(elem_ty, true, null);
@ -4988,7 +4990,7 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_bind: ReadArg.Bind = .{ .inst = un_op };
const error_union_ty = self.air.typeOf(un_op);
const error_union_ty = self.typeOf(un_op);
break :result try self.isErr(error_union_bind, error_union_ty);
};
@ -4999,7 +5001,7 @@ fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
const operand = try self.allocRegOrMem(elem_ty, true, null);
@ -5014,7 +5016,7 @@ fn airIsNonErr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_bind: ReadArg.Bind = .{ .inst = un_op };
const error_union_ty = self.air.typeOf(un_op);
const error_union_ty = self.typeOf(un_op);
break :result try self.isNonErr(error_union_bind, error_union_ty);
};
@ -5025,7 +5027,7 @@ fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
const operand = try self.allocRegOrMem(elem_ty, true, null);
@ -5093,7 +5095,7 @@ fn airBlock(self: *Self, inst: Air.Inst.Index) !void {
fn airSwitch(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const condition_ty = self.air.typeOf(pl_op.operand);
const condition_ty = self.typeOf(pl_op.operand);
const switch_br = self.air.extraData(Air.SwitchBr, pl_op.payload);
const liveness = try self.liveness.getSwitchBr(
self.gpa,
@ -5241,7 +5243,7 @@ fn br(self: *Self, block: Air.Inst.Index, operand: Air.Inst.Ref) !void {
const mod = self.bin_file.options.module.?;
const block_data = self.blocks.getPtr(block).?;
if (self.air.typeOf(operand).hasRuntimeBits(mod)) {
if (self.typeOf(operand).hasRuntimeBits(mod)) {
const operand_mcv = try self.resolveInst(operand);
const block_mcv = block_data.mcv;
if (block_mcv == .none) {
@ -5249,14 +5251,14 @@ fn br(self: *Self, block: Air.Inst.Index, operand: Air.Inst.Ref) !void {
.none, .dead, .unreach => unreachable,
.register, .stack_offset, .memory => operand_mcv,
.immediate, .stack_argument_offset, .compare_flags => blk: {
const new_mcv = try self.allocRegOrMem(self.air.typeOfIndex(block), true, block);
try self.setRegOrMem(self.air.typeOfIndex(block), new_mcv, operand_mcv);
const new_mcv = try self.allocRegOrMem(self.typeOfIndex(block), true, block);
try self.setRegOrMem(self.typeOfIndex(block), new_mcv, operand_mcv);
break :blk new_mcv;
},
else => return self.fail("TODO implement block_data.mcv = operand_mcv for {}", .{operand_mcv}),
};
} else {
try self.setRegOrMem(self.air.typeOfIndex(block), block_mcv, operand_mcv);
try self.setRegOrMem(self.typeOfIndex(block), block_mcv, operand_mcv);
}
}
return self.brVoid(block);
@ -5322,7 +5324,7 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
const arg_mcv = try self.resolveInst(input);
try self.register_manager.getReg(reg, null);
try self.genSetReg(self.air.typeOf(input), reg, arg_mcv);
try self.genSetReg(self.typeOf(input), reg, arg_mcv);
}
{
@ -5945,7 +5947,7 @@ fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
};
defer if (operand_lock) |lock| self.register_manager.unlockReg(lock);
const dest_ty = self.air.typeOfIndex(inst);
const dest_ty = self.typeOfIndex(inst);
const dest = try self.allocRegOrMem(dest_ty, true, inst);
try self.setRegOrMem(dest_ty, dest, operand);
break :result dest;
@ -5956,7 +5958,7 @@ fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const ptr_ty = self.air.typeOf(ty_op.operand);
const ptr_ty = self.typeOf(ty_op.operand);
const ptr = try self.resolveInst(ty_op.operand);
const array_ty = ptr_ty.childType();
const array_len = @intCast(u32, array_ty.arrayLen());
@ -6076,7 +6078,7 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
}
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
const vector_ty = self.air.typeOfIndex(inst);
const vector_ty = self.typeOfIndex(inst);
const len = vector_ty.vectorLen();
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
@ -6125,7 +6127,7 @@ fn airTry(self: *Self, inst: Air.Inst.Index) !void {
const body = self.air.extra[extra.end..][0..extra.data.body_len];
const result: MCValue = result: {
const error_union_bind: ReadArg.Bind = .{ .inst = pl_op.operand };
const error_union_ty = self.air.typeOf(pl_op.operand);
const error_union_ty = self.typeOf(pl_op.operand);
const error_union_size = @intCast(u32, error_union_ty.abiSize(mod));
const error_union_align = error_union_ty.abiAlignment(mod);
@ -6159,7 +6161,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
const mod = self.bin_file.options.module.?;
// If the type has no codegen bits, no need to store it.
const inst_ty = self.air.typeOf(inst);
const inst_ty = self.typeOf(inst);
if (!inst_ty.hasRuntimeBitsIgnoreComptime(mod) and !inst_ty.isError(mod))
return MCValue{ .none = {} };
@ -6428,3 +6430,13 @@ fn registerAlias(self: *Self, reg: Register, ty: Type) Register {
},
}
}
fn typeOf(self: *Self, inst: Air.Inst.Ref) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOf(inst, mod.intern_pool);
}
fn typeOfIndex(self: *Self, inst: Air.Inst.Index) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOfIndex(inst, mod.intern_pool);
}

View File

@ -477,6 +477,7 @@ pub fn addExtraAssumeCapacity(self: *Self, extra: anytype) u32 {
}
fn gen(self: *Self) !void {
const mod = self.bin_file.options.module.?;
const cc = self.fn_type.fnCallingConvention();
if (cc != .Naked) {
// push {fp, lr}
@ -518,9 +519,8 @@ fn gen(self: *Self) !void {
const inst = self.air.getMainBody()[arg_index];
assert(self.air.instructions.items(.tag)[inst] == .arg);
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const mod = self.bin_file.options.module.?;
const abi_size = @intCast(u32, ty.abiSize(mod));
const abi_align = ty.abiAlignment(mod);
const stack_offset = try self.allocMem(abi_size, abi_align, inst);
@ -637,13 +637,14 @@ fn gen(self: *Self) !void {
}
fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
const mod = self.bin_file.options.module.?;
const ip = &mod.intern_pool;
const air_tags = self.air.instructions.items(.tag);
for (body) |inst| {
// TODO: remove now-redundant isUnused calls from AIR handler functions
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) {
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst, ip.*))
continue;
}
const old_air_bookkeeping = self.air_bookkeeping;
try self.ensureProcessDeathCapacity(Liveness.bpi);
@ -829,6 +830,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.constant => unreachable, // excluded from function bodies
.const_ty => unreachable, // excluded from function bodies
.interned => unreachable, // excluded from function bodies
.unreach => self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -1008,7 +1010,7 @@ fn allocMem(
/// Use a pointer instruction as the basis for allocating stack memory.
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
const mod = self.bin_file.options.module.?;
const elem_ty = self.air.typeOfIndex(inst).elemType();
const elem_ty = self.typeOfIndex(inst).elemType();
if (!elem_ty.hasRuntimeBits(mod)) {
// As this stack item will never be dereferenced at runtime,
@ -1050,7 +1052,7 @@ fn allocRegOrMem(self: *Self, elem_ty: Type, reg_ok: bool, maybe_inst: ?Air.Inst
}
pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void {
const stack_mcv = try self.allocRegOrMem(self.air.typeOfIndex(inst), false, inst);
const stack_mcv = try self.allocRegOrMem(self.typeOfIndex(inst), false, inst);
log.debug("spilling {} (%{d}) to stack mcv {any}", .{ reg, inst, stack_mcv });
const reg_mcv = self.getResolvedInstValue(inst);
@ -1064,14 +1066,14 @@ pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
try branch.inst_table.put(self.gpa, inst, stack_mcv);
try self.genSetStack(self.air.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv);
try self.genSetStack(self.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv);
}
/// Save the current instruction stored in the compare flags if
/// occupied
fn spillCompareFlagsIfOccupied(self: *Self) !void {
if (self.cpsr_flags_inst) |inst_to_save| {
const ty = self.air.typeOfIndex(inst_to_save);
const ty = self.typeOfIndex(inst_to_save);
const mcv = self.getResolvedInstValue(inst_to_save);
const new_mcv = switch (mcv) {
.cpsr_flags => try self.allocRegOrMem(ty, true, inst_to_save),
@ -1081,7 +1083,7 @@ fn spillCompareFlagsIfOccupied(self: *Self) !void {
else => unreachable, // mcv doesn't occupy the compare flags
};
try self.setRegOrMem(self.air.typeOfIndex(inst_to_save), new_mcv, mcv);
try self.setRegOrMem(self.typeOfIndex(inst_to_save), new_mcv, mcv);
log.debug("spilling {d} to mcv {any}", .{ inst_to_save, new_mcv });
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@ -1151,15 +1153,15 @@ fn airFpext(self: *Self, inst: Air.Inst.Index) !void {
}
fn airIntCast(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
if (self.liveness.isUnused(inst))
return self.finishAir(inst, .dead, .{ ty_op.operand, .none, .none });
const operand = try self.resolveInst(ty_op.operand);
const operand_ty = self.air.typeOf(ty_op.operand);
const dest_ty = self.air.typeOfIndex(inst);
const operand_ty = self.typeOf(ty_op.operand);
const dest_ty = self.typeOfIndex(inst);
const mod = self.bin_file.options.module.?;
const operand_abi_size = operand_ty.abiSize(mod);
const dest_abi_size = dest_ty.abiSize(mod);
const info_a = operand_ty.intInfo(mod);
@ -1262,8 +1264,8 @@ fn trunc(
fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand_bind: ReadArg.Bind = .{ .inst = ty_op.operand };
const operand_ty = self.air.typeOf(ty_op.operand);
const dest_ty = self.air.typeOfIndex(inst);
const operand_ty = self.typeOf(ty_op.operand);
const dest_ty = self.typeOfIndex(inst);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else blk: {
break :blk try self.trunc(inst, operand_bind, operand_ty, dest_ty);
@ -1284,7 +1286,7 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_bind: ReadArg.Bind = .{ .inst = ty_op.operand };
const operand_ty = self.air.typeOf(ty_op.operand);
const operand_ty = self.typeOf(ty_op.operand);
switch (try operand_bind.resolveToMcv(self)) {
.dead => unreachable,
.unreach => unreachable,
@ -1467,8 +1469,8 @@ fn minMax(
fn airMinMax(self: *Self, inst: Air.Inst.Index) !void {
const tag = self.air.instructions.items(.tag)[inst];
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
@ -1487,9 +1489,9 @@ fn airSlice(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const ptr = try self.resolveInst(bin_op.lhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const len = try self.resolveInst(bin_op.rhs);
const len_ty = self.air.typeOf(bin_op.rhs);
const len_ty = self.typeOf(bin_op.rhs);
const stack_offset = try self.allocMem(8, 4, inst);
try self.genSetStack(ptr_ty, stack_offset, ptr);
@ -1501,8 +1503,8 @@ fn airSlice(self: *Self, inst: Air.Inst.Index) !void {
fn airBinOp(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
@ -1552,8 +1554,8 @@ fn airBinOp(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
fn airPtrArithmetic(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
@ -1590,10 +1592,10 @@ fn airOverflow(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs_bind: ReadArg.Bind = .{ .inst = extra.lhs };
const rhs_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
const tuple_size = @intCast(u32, tuple_ty.abiSize(mod));
const tuple_align = tuple_ty.abiAlignment(mod);
const overflow_bit_offset = @intCast(u32, tuple_ty.structFieldOffset(1, mod));
@ -1703,10 +1705,10 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = result: {
const lhs_bind: ReadArg.Bind = .{ .inst = extra.lhs };
const rhs_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
const tuple_size = @intCast(u32, tuple_ty.abiSize(mod));
const tuple_align = tuple_ty.abiAlignment(mod);
const overflow_bit_offset = @intCast(u32, tuple_ty.structFieldOffset(1, mod));
@ -1865,10 +1867,10 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
if (self.liveness.isUnused(inst)) return self.finishAir(inst, .dead, .{ extra.lhs, extra.rhs, .none });
const mod = self.bin_file.options.module.?;
const result: MCValue = result: {
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
const tuple_size = @intCast(u32, tuple_ty.abiSize(mod));
const tuple_align = tuple_ty.abiAlignment(mod);
const overflow_bit_offset = @intCast(u32, tuple_ty.structFieldOffset(1, mod));
@ -2019,10 +2021,10 @@ fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
}
fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const optional_ty = self.air.typeOfIndex(inst);
const mod = self.bin_file.options.module.?;
const optional_ty = self.typeOfIndex(inst);
const abi_size = @intCast(u32, optional_ty.abiSize(mod));
// Optional with a zero-bit payload type is just a boolean true
@ -2105,7 +2107,7 @@ fn airUnwrapErrErr(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_bind: ReadArg.Bind = .{ .inst = ty_op.operand };
const error_union_ty = self.air.typeOf(ty_op.operand);
const error_union_ty = self.typeOf(ty_op.operand);
break :result try self.errUnionErr(error_union_bind, error_union_ty, inst);
};
@ -2182,7 +2184,7 @@ fn airUnwrapErrPayload(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_bind: ReadArg.Bind = .{ .inst = ty_op.operand };
const error_union_ty = self.air.typeOf(ty_op.operand);
const error_union_ty = self.typeOf(ty_op.operand);
break :result try self.errUnionPayload(error_union_bind, error_union_ty, inst);
};
@ -2430,7 +2432,7 @@ fn ptrElemVal(
fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const slice_ty = self.air.typeOf(bin_op.lhs);
const slice_ty = self.typeOf(bin_op.lhs);
const result: MCValue = if (!slice_ty.isVolatilePtr() and self.liveness.isUnused(inst)) .dead else result: {
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const ptr_ty = slice_ty.slicePtrFieldType(&buf);
@ -2456,8 +2458,8 @@ fn airSliceElemPtr(self: *Self, inst: Air.Inst.Index) !void {
const base_bind: ReadArg.Bind = .{ .mcv = base_mcv };
const index_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const slice_ty = self.air.typeOf(extra.lhs);
const index_ty = self.air.typeOf(extra.rhs);
const slice_ty = self.typeOf(extra.lhs);
const index_ty = self.typeOf(extra.rhs);
const addr = try self.ptrArithmetic(.ptr_add, base_bind, index_bind, slice_ty, index_ty, null);
break :result addr;
@ -2523,7 +2525,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const array_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
const index_bind: ReadArg.Bind = .{ .inst = bin_op.rhs };
const array_ty = self.air.typeOf(bin_op.lhs);
const array_ty = self.typeOf(bin_op.lhs);
break :result try self.arrayElemVal(array_bind, index_bind, array_ty, inst);
};
@ -2532,7 +2534,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
fn airPtrElemVal(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const result: MCValue = if (!ptr_ty.isVolatilePtr() and self.liveness.isUnused(inst)) .dead else result: {
const base_bind: ReadArg.Bind = .{ .inst = bin_op.lhs };
const index_bind: ReadArg.Bind = .{ .inst = bin_op.rhs };
@ -2549,8 +2551,8 @@ fn airPtrElemPtr(self: *Self, inst: Air.Inst.Index) !void {
const ptr_bind: ReadArg.Bind = .{ .inst = extra.lhs };
const index_bind: ReadArg.Bind = .{ .inst = extra.rhs };
const ptr_ty = self.air.typeOf(extra.lhs);
const index_ty = self.air.typeOf(extra.rhs);
const ptr_ty = self.typeOf(extra.lhs);
const index_ty = self.typeOf(extra.rhs);
const addr = try self.ptrArithmetic(.ptr_add, ptr_bind, index_bind, ptr_ty, index_ty, null);
break :result addr;
@ -2736,13 +2738,13 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const elem_ty = self.air.typeOfIndex(inst);
const elem_ty = self.typeOfIndex(inst);
const result: MCValue = result: {
if (!elem_ty.hasRuntimeBits(mod))
break :result MCValue.none;
const ptr = try self.resolveInst(ty_op.operand);
const is_volatile = self.air.typeOf(ty_op.operand).isVolatilePtr();
const is_volatile = self.typeOf(ty_op.operand).isVolatilePtr();
if (self.liveness.isUnused(inst) and !is_volatile)
break :result MCValue.dead;
@ -2755,7 +2757,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(elem_ty, true, inst);
}
};
try self.load(dest_mcv, ptr, self.air.typeOf(ty_op.operand));
try self.load(dest_mcv, ptr, self.typeOf(ty_op.operand));
break :result dest_mcv;
};
@ -2860,8 +2862,8 @@ fn airStore(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr = try self.resolveInst(bin_op.lhs);
const value = try self.resolveInst(bin_op.rhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const value_ty = self.air.typeOf(bin_op.rhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const value_ty = self.typeOf(bin_op.rhs);
try self.store(ptr, value, ptr_ty, value_ty);
@ -2885,7 +2887,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
return if (self.liveness.isUnused(inst)) .dead else result: {
const mod = self.bin_file.options.module.?;
const mcv = try self.resolveInst(operand);
const ptr_ty = self.air.typeOf(operand);
const ptr_ty = self.typeOf(operand);
const struct_ty = ptr_ty.childType();
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
switch (mcv) {
@ -2910,7 +2912,7 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const mcv = try self.resolveInst(operand);
const struct_ty = self.air.typeOf(operand);
const struct_ty = self.typeOf(operand);
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
const struct_field_ty = struct_ty.structFieldType(index);
@ -4169,7 +4171,7 @@ fn airArg(self: *Self, inst: Air.Inst.Index) !void {
while (self.args[arg_index] == .none) arg_index += 1;
self.arg_index = arg_index + 1;
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const tag = self.air.instructions.items(.tag)[inst];
const src_index = self.air.instructions.items(.data)[inst].arg.src_index;
const name = self.mod_fn.getParamName(self.bin_file.options.module.?, src_index);
@ -4222,7 +4224,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const callee = pl_op.operand;
const extra = self.air.extraData(Air.Call, pl_op.payload);
const args = @ptrCast([]const Air.Inst.Ref, self.air.extra[extra.end..][0..extra.data.args_len]);
const ty = self.air.typeOf(callee);
const ty = self.typeOf(callee);
const mod = self.bin_file.options.module.?;
const fn_ty = switch (ty.zigTypeTag(mod)) {
@ -4276,7 +4278,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
for (info.args, 0..) |mc_arg, arg_i| {
const arg = args[arg_i];
const arg_ty = self.air.typeOf(arg);
const arg_ty = self.typeOf(arg);
const arg_mcv = try self.resolveInst(args[arg_i]);
switch (mc_arg) {
@ -4418,7 +4420,7 @@ fn airRet(self: *Self, inst: Air.Inst.Index) !void {
fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const ret_ty = self.fn_type.fnReturnType();
switch (self.ret_mcv) {
@ -4461,7 +4463,7 @@ fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
fn airCmp(self: *Self, inst: Air.Inst.Index, op: math.CompareOperator) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs_ty = self.air.typeOf(bin_op.lhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else blk: {
break :blk try self.cmp(.{ .inst = bin_op.lhs }, .{ .inst = bin_op.rhs }, lhs_ty, op);
@ -4600,7 +4602,7 @@ fn airDbgVar(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const operand = pl_op.operand;
const tag = self.air.instructions.items(.tag)[inst];
const ty = self.air.typeOf(operand);
const ty = self.typeOf(operand);
const mcv = try self.resolveInst(operand);
const name = self.air.nullTerminatedString(pl_op.payload);
@ -4755,7 +4757,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
log.debug("consolidating else_entry {d} {}=>{}", .{ else_key, else_value, canon_mcv });
// TODO make sure the destination stack offset / register does not already have something
// going on there.
try self.setRegOrMem(self.air.typeOfIndex(else_key), canon_mcv, else_value);
try self.setRegOrMem(self.typeOfIndex(else_key), canon_mcv, else_value);
// TODO track the new register / stack allocation
}
try parent_branch.inst_table.ensureUnusedCapacity(self.gpa, saved_then_branch.inst_table.count());
@ -4782,7 +4784,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
log.debug("consolidating then_entry {d} {}=>{}", .{ then_key, parent_mcv, then_value });
// TODO make sure the destination stack offset / register does not already have something
// going on there.
try self.setRegOrMem(self.air.typeOfIndex(then_key), parent_mcv, then_value);
try self.setRegOrMem(self.typeOfIndex(then_key), parent_mcv, then_value);
// TODO track the new register / stack allocation
}
@ -4827,7 +4829,7 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_bind: ReadArg.Bind = .{ .inst = un_op };
const operand_ty = self.air.typeOf(un_op);
const operand_ty = self.typeOf(un_op);
break :result try self.isNull(operand_bind, operand_ty);
};
@ -4838,7 +4840,7 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
const operand = try self.allocRegOrMem(elem_ty, true, null);
@ -4853,7 +4855,7 @@ fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_bind: ReadArg.Bind = .{ .inst = un_op };
const operand_ty = self.air.typeOf(un_op);
const operand_ty = self.typeOf(un_op);
break :result try self.isNonNull(operand_bind, operand_ty);
};
@ -4864,7 +4866,7 @@ fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
const operand = try self.allocRegOrMem(elem_ty, true, null);
@ -4913,7 +4915,7 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_bind: ReadArg.Bind = .{ .inst = un_op };
const error_union_ty = self.air.typeOf(un_op);
const error_union_ty = self.typeOf(un_op);
break :result try self.isErr(error_union_bind, error_union_ty);
};
@ -4924,7 +4926,7 @@ fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
const operand = try self.allocRegOrMem(elem_ty, true, null);
@ -4939,7 +4941,7 @@ fn airIsNonErr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_bind: ReadArg.Bind = .{ .inst = un_op };
const error_union_ty = self.air.typeOf(un_op);
const error_union_ty = self.typeOf(un_op);
break :result try self.isNonErr(error_union_bind, error_union_ty);
};
@ -4950,7 +4952,7 @@ fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const elem_ty = ptr_ty.elemType();
const operand = try self.allocRegOrMem(elem_ty, true, null);
@ -5018,7 +5020,7 @@ fn airBlock(self: *Self, inst: Air.Inst.Index) !void {
fn airSwitch(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const condition_ty = self.air.typeOf(pl_op.operand);
const condition_ty = self.typeOf(pl_op.operand);
const switch_br = self.air.extraData(Air.SwitchBr, pl_op.payload);
const liveness = try self.liveness.getSwitchBr(
self.gpa,
@ -5164,7 +5166,7 @@ fn br(self: *Self, block: Air.Inst.Index, operand: Air.Inst.Ref) !void {
const mod = self.bin_file.options.module.?;
const block_data = self.blocks.getPtr(block).?;
if (self.air.typeOf(operand).hasRuntimeBits(mod)) {
if (self.typeOf(operand).hasRuntimeBits(mod)) {
const operand_mcv = try self.resolveInst(operand);
const block_mcv = block_data.mcv;
if (block_mcv == .none) {
@ -5172,14 +5174,14 @@ fn br(self: *Self, block: Air.Inst.Index, operand: Air.Inst.Ref) !void {
.none, .dead, .unreach => unreachable,
.register, .stack_offset, .memory => operand_mcv,
.immediate, .stack_argument_offset, .cpsr_flags => blk: {
const new_mcv = try self.allocRegOrMem(self.air.typeOfIndex(block), true, block);
try self.setRegOrMem(self.air.typeOfIndex(block), new_mcv, operand_mcv);
const new_mcv = try self.allocRegOrMem(self.typeOfIndex(block), true, block);
try self.setRegOrMem(self.typeOfIndex(block), new_mcv, operand_mcv);
break :blk new_mcv;
},
else => return self.fail("TODO implement block_data.mcv = operand_mcv for {}", .{operand_mcv}),
};
} else {
try self.setRegOrMem(self.air.typeOfIndex(block), block_mcv, operand_mcv);
try self.setRegOrMem(self.typeOfIndex(block), block_mcv, operand_mcv);
}
}
return self.brVoid(block);
@ -5243,7 +5245,7 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
const arg_mcv = try self.resolveInst(input);
try self.register_manager.getReg(reg, null);
try self.genSetReg(self.air.typeOf(input), reg, arg_mcv);
try self.genSetReg(self.typeOf(input), reg, arg_mcv);
}
{
@ -5896,7 +5898,7 @@ fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
};
defer if (operand_lock) |lock| self.register_manager.unlockReg(lock);
const dest_ty = self.air.typeOfIndex(inst);
const dest_ty = self.typeOfIndex(inst);
const dest = try self.allocRegOrMem(dest_ty, true, inst);
try self.setRegOrMem(dest_ty, dest, operand);
break :result dest;
@ -5907,7 +5909,7 @@ fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const ptr_ty = self.air.typeOf(ty_op.operand);
const ptr_ty = self.typeOf(ty_op.operand);
const ptr = try self.resolveInst(ty_op.operand);
const array_ty = ptr_ty.childType();
const array_len = @intCast(u32, array_ty.arrayLen());
@ -6023,7 +6025,7 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
}
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
const vector_ty = self.air.typeOfIndex(inst);
const vector_ty = self.typeOfIndex(inst);
const len = vector_ty.vectorLen();
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
@ -6072,7 +6074,7 @@ fn airTry(self: *Self, inst: Air.Inst.Index) !void {
const body = self.air.extra[extra.end..][0..extra.data.body_len];
const result: MCValue = result: {
const error_union_bind: ReadArg.Bind = .{ .inst = pl_op.operand };
const error_union_ty = self.air.typeOf(pl_op.operand);
const error_union_ty = self.typeOf(pl_op.operand);
const mod = self.bin_file.options.module.?;
const error_union_size = @intCast(u32, error_union_ty.abiSize(mod));
const error_union_align = error_union_ty.abiAlignment(mod);
@ -6107,7 +6109,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
const mod = self.bin_file.options.module.?;
// If the type has no codegen bits, no need to store it.
const inst_ty = self.air.typeOf(inst);
const inst_ty = self.typeOf(inst);
if (!inst_ty.hasRuntimeBitsIgnoreComptime(mod) and !inst_ty.isError(mod))
return MCValue{ .none = {} };
@ -6333,3 +6335,13 @@ fn parseRegName(name: []const u8) ?Register {
}
return std.meta.stringToEnum(Register, name);
}
fn typeOf(self: *Self, inst: Air.Inst.Ref) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOf(inst, mod.intern_pool);
}
fn typeOfIndex(self: *Self, inst: Air.Inst.Index) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOfIndex(inst, mod.intern_pool);
}

View File

@ -470,13 +470,14 @@ fn gen(self: *Self) !void {
}
fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
const mod = self.bin_file.options.module.?;
const ip = &mod.intern_pool;
const air_tags = self.air.instructions.items(.tag);
for (body) |inst| {
// TODO: remove now-redundant isUnused calls from AIR handler functions
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) {
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst, ip.*))
continue;
}
const old_air_bookkeeping = self.air_bookkeeping;
try self.ensureProcessDeathCapacity(Liveness.bpi);
@ -658,6 +659,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.constant => unreachable, // excluded from function bodies
.const_ty => unreachable, // excluded from function bodies
.interned => unreachable, // excluded from function bodies
.unreach => self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -804,8 +806,8 @@ fn allocMem(self: *Self, inst: Air.Inst.Index, abi_size: u32, abi_align: u32) !u
/// Use a pointer instruction as the basis for allocating stack memory.
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
const elem_ty = self.air.typeOfIndex(inst).elemType();
const mod = self.bin_file.options.module.?;
const elem_ty = self.typeOfIndex(inst).elemType();
const abi_size = math.cast(u32, elem_ty.abiSize(mod)) orelse {
return self.fail("type '{}' too big to fit into stack frame", .{elem_ty.fmt(mod)});
};
@ -815,8 +817,8 @@ fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
}
fn allocRegOrMem(self: *Self, inst: Air.Inst.Index, reg_ok: bool) !MCValue {
const elem_ty = self.air.typeOfIndex(inst);
const mod = self.bin_file.options.module.?;
const elem_ty = self.typeOfIndex(inst);
const abi_size = math.cast(u32, elem_ty.abiSize(mod)) orelse {
return self.fail("type '{}' too big to fit into stack frame", .{elem_ty.fmt(mod)});
};
@ -845,7 +847,7 @@ pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void
assert(reg == reg_mcv.register);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
try branch.inst_table.put(self.gpa, inst, stack_mcv);
try self.genSetStack(self.air.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv);
try self.genSetStack(self.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv);
}
/// Copies a value to a register without tracking the register. The register is not considered
@ -862,7 +864,7 @@ fn copyToTmpRegister(self: *Self, ty: Type, mcv: MCValue) !Register {
/// This can have a side effect of spilling instructions to the stack to free up a register.
fn copyToNewRegister(self: *Self, reg_owner: Air.Inst.Index, mcv: MCValue) !MCValue {
const reg = try self.register_manager.allocReg(reg_owner, gp);
try self.genSetReg(self.air.typeOfIndex(reg_owner), reg, mcv);
try self.genSetReg(self.typeOfIndex(reg_owner), reg, mcv);
return MCValue{ .register = reg };
}
@ -894,10 +896,10 @@ fn airIntCast(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAir(inst, .dead, .{ ty_op.operand, .none, .none });
const mod = self.bin_file.options.module.?;
const operand_ty = self.air.typeOf(ty_op.operand);
const operand_ty = self.typeOf(ty_op.operand);
const operand = try self.resolveInst(ty_op.operand);
const info_a = operand_ty.intInfo(mod);
const info_b = self.air.typeOfIndex(inst).intInfo(mod);
const info_b = self.typeOfIndex(inst).intInfo(mod);
if (info_a.signedness != info_b.signedness)
return self.fail("TODO gen intcast sign safety in semantic analysis", .{});
@ -1126,8 +1128,8 @@ fn airBinOp(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else try self.binOp(tag, inst, lhs, rhs, lhs_ty, rhs_ty);
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
@ -1138,8 +1140,8 @@ fn airPtrArithmetic(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else try self.binOp(tag, inst, lhs, rhs, lhs_ty, rhs_ty);
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
@ -1333,7 +1335,7 @@ fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const mod = self.bin_file.options.module.?;
const optional_ty = self.air.typeOfIndex(inst);
const optional_ty = self.typeOfIndex(inst);
// Optional with a zero-bit payload type is just a boolean true
if (optional_ty.abiSize(mod) == 1)
@ -1525,15 +1527,15 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
}
fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const elem_ty = self.air.typeOfIndex(inst);
const elem_ty = self.typeOfIndex(inst);
const result: MCValue = result: {
const mod = self.bin_file.options.module.?;
if (!elem_ty.hasRuntimeBits(mod))
break :result MCValue.none;
const ptr = try self.resolveInst(ty_op.operand);
const is_volatile = self.air.typeOf(ty_op.operand).isVolatilePtr();
const is_volatile = self.typeOf(ty_op.operand).isVolatilePtr();
if (self.liveness.isUnused(inst) and !is_volatile)
break :result MCValue.dead;
@ -1545,7 +1547,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(inst, true);
}
};
try self.load(dst_mcv, ptr, self.air.typeOf(ty_op.operand));
try self.load(dst_mcv, ptr, self.typeOf(ty_op.operand));
break :result dst_mcv;
};
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
@ -1586,8 +1588,8 @@ fn airStore(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr = try self.resolveInst(bin_op.lhs);
const value = try self.resolveInst(bin_op.rhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const value_ty = self.air.typeOf(bin_op.rhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const value_ty = self.typeOf(bin_op.rhs);
try self.store(ptr, value, ptr_ty, value_ty);
@ -1647,7 +1649,7 @@ fn airArg(self: *Self, inst: Air.Inst.Index) !void {
const arg_index = self.arg_index;
self.arg_index += 1;
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
_ = ty;
const result = self.args[arg_index];
@ -1704,7 +1706,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const mod = self.bin_file.options.module.?;
if (modifier == .always_tail) return self.fail("TODO implement tail calls for riscv64", .{});
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const fn_ty = self.air.typeOf(pl_op.operand);
const fn_ty = self.typeOf(pl_op.operand);
const callee = pl_op.operand;
const extra = self.air.extraData(Air.Call, pl_op.payload);
const args = @ptrCast([]const Air.Inst.Ref, self.air.extra[extra.end..][0..extra.data.args_len]);
@ -1717,7 +1719,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
for (info.args, 0..) |mc_arg, arg_i| {
const arg = args[arg_i];
const arg_ty = self.air.typeOf(arg);
const arg_ty = self.typeOf(arg);
const arg_mcv = try self.resolveInst(args[arg_i]);
switch (mc_arg) {
@ -1829,9 +1831,9 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: math.CompareOperator) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
if (self.liveness.isUnused(inst))
return self.finishAir(inst, .dead, .{ bin_op.lhs, bin_op.rhs, .none });
const ty = self.air.typeOf(bin_op.lhs);
const ty = self.typeOf(bin_op.lhs);
const mod = self.bin_file.options.module.?;
assert(ty.eql(self.air.typeOf(bin_op.rhs), mod));
assert(ty.eql(self.typeOf(bin_op.rhs), mod));
if (ty.zigTypeTag(mod) == .ErrorSet)
return self.fail("TODO implement cmp for errors", .{});
@ -1950,7 +1952,7 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(inst, true);
}
};
try self.load(operand, operand_ptr, self.air.typeOf(un_op));
try self.load(operand, operand_ptr, self.typeOf(un_op));
break :result try self.isNull(operand);
};
return self.finishAir(inst, result, .{ un_op, .none, .none });
@ -1977,7 +1979,7 @@ fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(inst, true);
}
};
try self.load(operand, operand_ptr, self.air.typeOf(un_op));
try self.load(operand, operand_ptr, self.typeOf(un_op));
break :result try self.isNonNull(operand);
};
return self.finishAir(inst, result, .{ un_op, .none, .none });
@ -2004,7 +2006,7 @@ fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(inst, true);
}
};
try self.load(operand, operand_ptr, self.air.typeOf(un_op));
try self.load(operand, operand_ptr, self.typeOf(un_op));
break :result try self.isErr(operand);
};
return self.finishAir(inst, result, .{ un_op, .none, .none });
@ -2031,7 +2033,7 @@ fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(inst, true);
}
};
try self.load(operand, operand_ptr, self.air.typeOf(un_op));
try self.load(operand, operand_ptr, self.typeOf(un_op));
break :result try self.isNonErr(operand);
};
return self.finishAir(inst, result, .{ un_op, .none, .none });
@ -2112,13 +2114,13 @@ fn br(self: *Self, block: Air.Inst.Index, operand: Air.Inst.Ref) !void {
const block_data = self.blocks.getPtr(block).?;
const mod = self.bin_file.options.module.?;
if (self.air.typeOf(operand).hasRuntimeBits(mod)) {
if (self.typeOf(operand).hasRuntimeBits(mod)) {
const operand_mcv = try self.resolveInst(operand);
const block_mcv = block_data.mcv;
if (block_mcv == .none) {
block_data.mcv = operand_mcv;
} else {
try self.setRegOrMem(self.air.typeOfIndex(block), block_mcv, operand_mcv);
try self.setRegOrMem(self.typeOfIndex(block), block_mcv, operand_mcv);
}
}
return self.brVoid(block);
@ -2181,7 +2183,7 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
const arg_mcv = try self.resolveInst(input);
try self.register_manager.getReg(reg, null);
try self.genSetReg(self.air.typeOf(input), reg, arg_mcv);
try self.genSetReg(self.typeOf(input), reg, arg_mcv);
}
{
@ -2377,7 +2379,7 @@ fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
defer if (operand_lock) |lock| self.register_manager.unlockReg(lock);
const dest = try self.allocRegOrMem(inst, true);
try self.setRegOrMem(self.air.typeOfIndex(inst), dest, operand);
try self.setRegOrMem(self.typeOfIndex(inst), dest, operand);
break :result dest;
};
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
@ -2494,7 +2496,7 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
}
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
const vector_ty = self.air.typeOfIndex(inst);
const vector_ty = self.typeOfIndex(inst);
const len = vector_ty.vectorLen();
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
@ -2541,7 +2543,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
const mod = self.bin_file.options.module.?;
// If the type has no codegen bits, no need to store it.
const inst_ty = self.air.typeOf(inst);
const inst_ty = self.typeOf(inst);
if (!inst_ty.hasRuntimeBits(mod))
return MCValue{ .none = {} };
@ -2733,3 +2735,13 @@ fn parseRegName(name: []const u8) ?Register {
}
return std.meta.stringToEnum(Register, name);
}
fn typeOf(self: *Self, inst: Air.Inst.Ref) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOf(inst, mod.intern_pool);
}
fn typeOfIndex(self: *Self, inst: Air.Inst.Index) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOfIndex(inst, mod.intern_pool);
}

View File

@ -490,13 +490,14 @@ fn gen(self: *Self) !void {
}
fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
const mod = self.bin_file.options.module.?;
const ip = &mod.intern_pool;
const air_tags = self.air.instructions.items(.tag);
for (body) |inst| {
// TODO: remove now-redundant isUnused calls from AIR handler functions
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) {
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst, ip.*))
continue;
}
const old_air_bookkeeping = self.air_bookkeeping;
try self.ensureProcessDeathCapacity(Liveness.bpi);
@ -678,6 +679,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.constant => unreachable, // excluded from function bodies
.const_ty => unreachable, // excluded from function bodies
.interned => unreachable, // excluded from function bodies
.unreach => self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -762,8 +764,8 @@ fn airAddSubWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs = try self.resolveInst(extra.lhs);
const rhs = try self.resolveInst(extra.rhs);
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
switch (lhs_ty.zigTypeTag(mod)) {
.Vector => return self.fail("TODO implement add_with_overflow/sub_with_overflow for vectors", .{}),
@ -836,7 +838,7 @@ fn airAddSubWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
}
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
const vector_ty = self.air.typeOfIndex(inst);
const vector_ty = self.typeOfIndex(inst);
const len = vector_ty.vectorLen();
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
@ -871,7 +873,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const ptr_ty = self.air.typeOf(ty_op.operand);
const ptr_ty = self.typeOf(ty_op.operand);
const ptr = try self.resolveInst(ty_op.operand);
const array_ty = ptr_ty.childType();
const array_len = @intCast(u32, array_ty.arrayLen());
@ -935,7 +937,7 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
const arg_mcv = try self.resolveInst(input);
try self.register_manager.getReg(reg, null);
try self.genSetReg(self.air.typeOf(input), reg, arg_mcv);
try self.genSetReg(self.typeOf(input), reg, arg_mcv);
}
{
@ -1008,16 +1010,16 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
}
fn airArg(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const arg_index = self.arg_index;
self.arg_index += 1;
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const arg = self.args[arg_index];
const mcv = blk: {
switch (arg) {
.stack_offset => |off| {
const mod = self.bin_file.options.module.?;
const abi_size = math.cast(u32, ty.abiSize(mod)) orelse {
return self.fail("type '{}' too big to fit into stack frame", .{ty.fmt(mod)});
};
@ -1063,8 +1065,8 @@ fn airBinOp(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst))
.dead
else
@ -1088,8 +1090,8 @@ fn airPtrArithmetic(self: *Self, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst))
.dead
else
@ -1115,7 +1117,7 @@ fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
defer if (operand_lock) |lock| self.register_manager.unlockReg(lock);
const dest = try self.allocRegOrMem(inst, true);
try self.setRegOrMem(self.air.typeOfIndex(inst), dest, operand);
try self.setRegOrMem(self.typeOfIndex(inst), dest, operand);
break :result dest;
};
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
@ -1218,7 +1220,7 @@ fn airByteSwap(self: *Self, inst: Air.Inst.Index) !void {
// TODO: Fold byteswap+store into a single ST*A and load+byteswap into a single LD*A.
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand = try self.resolveInst(ty_op.operand);
const operand_ty = self.air.typeOf(ty_op.operand);
const operand_ty = self.typeOf(ty_op.operand);
switch (operand_ty.zigTypeTag(mod)) {
.Vector => return self.fail("TODO byteswap for vectors", .{}),
.Int => {
@ -1294,7 +1296,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const callee = pl_op.operand;
const extra = self.air.extraData(Air.Call, pl_op.payload);
const args = @ptrCast([]const Air.Inst.Ref, self.air.extra[extra.end .. extra.end + extra.data.args_len]);
const ty = self.air.typeOf(callee);
const ty = self.typeOf(callee);
const mod = self.bin_file.options.module.?;
const fn_ty = switch (ty.zigTypeTag(mod)) {
.Fn => ty,
@ -1318,7 +1320,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
for (info.args, 0..) |mc_arg, arg_i| {
const arg = args[arg_i];
const arg_ty = self.air.typeOf(arg);
const arg_ty = self.typeOf(arg);
const arg_mcv = try self.resolveInst(arg);
switch (mc_arg) {
@ -1428,7 +1430,7 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: math.CompareOperator) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const int_ty = switch (lhs_ty.zigTypeTag(mod)) {
.Vector => unreachable, // Handled by cmp_vector.
@ -1605,7 +1607,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
log.debug("consolidating else_entry {d} {}=>{}", .{ else_key, else_value, canon_mcv });
// TODO make sure the destination stack offset / register does not already have something
// going on there.
try self.setRegOrMem(self.air.typeOfIndex(else_key), canon_mcv, else_value);
try self.setRegOrMem(self.typeOfIndex(else_key), canon_mcv, else_value);
// TODO track the new register / stack allocation
}
try parent_branch.inst_table.ensureUnusedCapacity(self.gpa, saved_then_branch.inst_table.count());
@ -1632,7 +1634,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
log.debug("consolidating then_entry {d} {}=>{}", .{ then_key, parent_mcv, then_value });
// TODO make sure the destination stack offset / register does not already have something
// going on there.
try self.setRegOrMem(self.air.typeOfIndex(then_key), parent_mcv, then_value);
try self.setRegOrMem(self.typeOfIndex(then_key), parent_mcv, then_value);
// TODO track the new register / stack allocation
}
@ -1755,10 +1757,10 @@ fn airIntCast(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAir(inst, .dead, .{ ty_op.operand, .none, .none });
const mod = self.bin_file.options.module.?;
const operand_ty = self.air.typeOf(ty_op.operand);
const operand_ty = self.typeOf(ty_op.operand);
const operand = try self.resolveInst(ty_op.operand);
const info_a = operand_ty.intInfo(mod);
const info_b = self.air.typeOfIndex(inst).intInfo(mod);
const info_b = self.typeOfIndex(inst).intInfo(mod);
if (info_a.signedness != info_b.signedness)
return self.fail("TODO gen intcast sign safety in semantic analysis", .{});
@ -1780,7 +1782,7 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand = try self.resolveInst(un_op);
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
break :result try self.isErr(ty, operand);
};
return self.finishAir(inst, result, .{ un_op, .none, .none });
@ -1790,7 +1792,7 @@ fn airIsNonErr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand = try self.resolveInst(un_op);
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
break :result try self.isNonErr(ty, operand);
};
return self.finishAir(inst, result, .{ un_op, .none, .none });
@ -1815,16 +1817,16 @@ fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void {
}
fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const elem_ty = self.air.typeOfIndex(inst);
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const elem_ty = self.typeOfIndex(inst);
const elem_size = elem_ty.abiSize(mod);
const result: MCValue = result: {
if (!elem_ty.hasRuntimeBits(mod))
break :result MCValue.none;
const ptr = try self.resolveInst(ty_op.operand);
const is_volatile = self.air.typeOf(ty_op.operand).isVolatilePtr();
const is_volatile = self.typeOf(ty_op.operand).isVolatilePtr();
if (self.liveness.isUnused(inst) and !is_volatile)
break :result MCValue.dead;
@ -1839,7 +1841,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(inst, true);
}
};
try self.load(dst_mcv, ptr, self.air.typeOf(ty_op.operand));
try self.load(dst_mcv, ptr, self.typeOf(ty_op.operand));
break :result dst_mcv;
};
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
@ -1882,8 +1884,8 @@ fn airMinMax(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result: MCValue = if (self.liveness.isUnused(inst))
.dead
@ -1897,8 +1899,8 @@ fn airMod(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
assert(lhs_ty.eql(rhs_ty, self.bin_file.options.module.?));
if (self.liveness.isUnused(inst))
@ -2045,8 +2047,8 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs = try self.resolveInst(extra.lhs);
const rhs = try self.resolveInst(extra.rhs);
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
switch (lhs_ty.zigTypeTag(mod)) {
.Vector => return self.fail("TODO implement mul_with_overflow for vectors", .{}),
@ -2108,7 +2110,7 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand = try self.resolveInst(ty_op.operand);
const operand_ty = self.air.typeOf(ty_op.operand);
const operand_ty = self.typeOf(ty_op.operand);
switch (operand) {
.dead => unreachable,
.unreach => unreachable,
@ -2285,8 +2287,8 @@ fn airRem(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
// TODO add safety check
@ -2341,8 +2343,8 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const lhs = try self.resolveInst(extra.lhs);
const rhs = try self.resolveInst(extra.rhs);
const lhs_ty = self.air.typeOf(extra.lhs);
const rhs_ty = self.air.typeOf(extra.rhs);
const lhs_ty = self.typeOf(extra.lhs);
const rhs_ty = self.typeOf(extra.rhs);
switch (lhs_ty.zigTypeTag(mod)) {
.Vector => return self.fail("TODO implement mul_with_overflow for vectors", .{}),
@ -2429,9 +2431,9 @@ fn airSlice(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const ptr = try self.resolveInst(bin_op.lhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const len = try self.resolveInst(bin_op.rhs);
const len_ty = self.air.typeOf(bin_op.rhs);
const len_ty = self.typeOf(bin_op.rhs);
const ptr_bits = self.target.ptrBitWidth();
const ptr_bytes = @divExact(ptr_bits, 8);
@ -2453,7 +2455,7 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
const slice_mcv = try self.resolveInst(bin_op.lhs);
const index_mcv = try self.resolveInst(bin_op.rhs);
const slice_ty = self.air.typeOf(bin_op.lhs);
const slice_ty = self.typeOf(bin_op.lhs);
const elem_ty = slice_ty.childType();
const mod = self.bin_file.options.module.?;
const elem_size = elem_ty.abiSize(mod);
@ -2544,8 +2546,8 @@ fn airStore(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr = try self.resolveInst(bin_op.lhs);
const value = try self.resolveInst(bin_op.rhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const value_ty = self.air.typeOf(bin_op.rhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const value_ty = self.typeOf(bin_op.rhs);
try self.store(ptr, value, ptr_ty, value_ty);
@ -2573,7 +2575,7 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const mod = self.bin_file.options.module.?;
const mcv = try self.resolveInst(operand);
const struct_ty = self.air.typeOf(operand);
const struct_ty = self.typeOf(operand);
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
switch (mcv) {
@ -2659,8 +2661,8 @@ fn airTagName(self: *Self, inst: Air.Inst.Index) !void {
fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand = try self.resolveInst(ty_op.operand);
const operand_ty = self.air.typeOf(ty_op.operand);
const dest_ty = self.air.typeOfIndex(inst);
const operand_ty = self.typeOf(ty_op.operand);
const dest_ty = self.typeOfIndex(inst);
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else blk: {
break :blk try self.trunc(inst, operand, operand_ty, dest_ty);
@ -2674,7 +2676,7 @@ fn airTry(self: *Self, inst: Air.Inst.Index) !void {
const extra = self.air.extraData(Air.Try, pl_op.payload);
const body = self.air.extra[extra.end..][0..extra.data.body_len];
const result: MCValue = result: {
const error_union_ty = self.air.typeOf(pl_op.operand);
const error_union_ty = self.typeOf(pl_op.operand);
const error_union = try self.resolveInst(pl_op.operand);
const is_err_result = try self.isErr(error_union_ty, error_union);
const reloc = try self.condBr(is_err_result);
@ -2706,7 +2708,7 @@ fn airUnionInit(self: *Self, inst: Air.Inst.Index) !void {
fn airUnwrapErrErr(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_ty = self.air.typeOf(ty_op.operand);
const error_union_ty = self.typeOf(ty_op.operand);
const payload_ty = error_union_ty.errorUnionPayload();
const mcv = try self.resolveInst(ty_op.operand);
const mod = self.bin_file.options.module.?;
@ -2720,7 +2722,7 @@ fn airUnwrapErrErr(self: *Self, inst: Air.Inst.Index) !void {
fn airUnwrapErrPayload(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const error_union_ty = self.air.typeOf(ty_op.operand);
const error_union_ty = self.typeOf(ty_op.operand);
const payload_ty = error_union_ty.errorUnionPayload();
const mod = self.bin_file.options.module.?;
if (!payload_ty.hasRuntimeBits(mod)) break :result MCValue.none;
@ -2753,12 +2755,12 @@ fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) !void {
}
fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const optional_ty = self.air.typeOfIndex(inst);
const optional_ty = self.typeOfIndex(inst);
// Optional with a zero-bit payload type is just a boolean true
const mod = self.bin_file.options.module.?;
if (optional_ty.abiSize(mod) == 1)
break :result MCValue{ .immediate = 1 };
@ -2794,9 +2796,9 @@ fn allocMem(self: *Self, inst: Air.Inst.Index, abi_size: u32, abi_align: u32) !u
/// Use a pointer instruction as the basis for allocating stack memory.
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
const elem_ty = self.air.typeOfIndex(inst).elemType();
const mod = self.bin_file.options.module.?;
const elem_ty = self.typeOfIndex(inst).elemType();
if (!elem_ty.hasRuntimeBits(mod)) {
// As this stack item will never be dereferenced at runtime,
// return the stack offset 0. Stack offset 0 will be where all
@ -2814,8 +2816,8 @@ fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
}
fn allocRegOrMem(self: *Self, inst: Air.Inst.Index, reg_ok: bool) !MCValue {
const elem_ty = self.air.typeOfIndex(inst);
const mod = self.bin_file.options.module.?;
const elem_ty = self.typeOfIndex(inst);
const abi_size = math.cast(u32, elem_ty.abiSize(mod)) orelse {
return self.fail("type '{}' too big to fit into stack frame", .{elem_ty.fmt(mod)});
};
@ -3406,7 +3408,7 @@ fn br(self: *Self, block: Air.Inst.Index, operand: Air.Inst.Ref) !void {
const block_data = self.blocks.getPtr(block).?;
const mod = self.bin_file.options.module.?;
if (self.air.typeOf(operand).hasRuntimeBits(mod)) {
if (self.typeOf(operand).hasRuntimeBits(mod)) {
const operand_mcv = try self.resolveInst(operand);
const block_mcv = block_data.mcv;
if (block_mcv == .none) {
@ -3415,13 +3417,13 @@ fn br(self: *Self, block: Air.Inst.Index, operand: Air.Inst.Ref) !void {
.register, .stack_offset, .memory => operand_mcv,
.immediate => blk: {
const new_mcv = try self.allocRegOrMem(block, true);
try self.setRegOrMem(self.air.typeOfIndex(block), new_mcv, operand_mcv);
try self.setRegOrMem(self.typeOfIndex(block), new_mcv, operand_mcv);
break :blk new_mcv;
},
else => return self.fail("TODO implement block_data.mcv = operand_mcv for {}", .{operand_mcv}),
};
} else {
try self.setRegOrMem(self.air.typeOfIndex(block), block_mcv, operand_mcv);
try self.setRegOrMem(self.typeOfIndex(block), block_mcv, operand_mcv);
}
}
return self.brVoid(block);
@ -4549,7 +4551,7 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type, role: RegisterView)
fn resolveInst(self: *Self, ref: Air.Inst.Ref) InnerError!MCValue {
const mod = self.bin_file.options.module.?;
const ty = self.air.typeOf(ref);
const ty = self.typeOf(ref);
// If the type has no codegen bits, no need to store it.
if (!ty.hasRuntimeBitsIgnoreComptime(mod)) return .none;
@ -4654,7 +4656,7 @@ fn spillConditionFlagsIfOccupied(self: *Self) !void {
else => unreachable, // mcv doesn't occupy the compare flags
};
try self.setRegOrMem(self.air.typeOfIndex(inst_to_save), new_mcv, mcv);
try self.setRegOrMem(self.typeOfIndex(inst_to_save), new_mcv, mcv);
log.debug("spilling {d} to mcv {any}", .{ inst_to_save, new_mcv });
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@ -4678,7 +4680,7 @@ pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void
assert(reg == reg_mcv.register);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
try branch.inst_table.put(self.gpa, inst, stack_mcv);
try self.genSetStack(self.air.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv);
try self.genSetStack(self.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv);
}
fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type) InnerError!void {
@ -4726,7 +4728,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
return if (self.liveness.isUnused(inst)) .dead else result: {
const mod = self.bin_file.options.module.?;
const mcv = try self.resolveInst(operand);
const ptr_ty = self.air.typeOf(operand);
const ptr_ty = self.typeOf(operand);
const struct_ty = ptr_ty.childType();
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
switch (mcv) {
@ -4885,3 +4887,13 @@ fn wantSafety(self: *Self) bool {
.ReleaseSmall => false,
};
}
fn typeOf(self: *Self, inst: Air.Inst.Ref) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOf(inst, mod.intern_pool);
}
fn typeOfIndex(self: *Self, inst: Air.Inst.Index) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOfIndex(inst, mod.intern_pool);
}

View File

@ -790,7 +790,7 @@ fn resolveInst(func: *CodeGen, ref: Air.Inst.Ref) InnerError!WValue {
const mod = func.bin_file.base.options.module.?;
const val = func.air.value(ref, mod).?;
const ty = func.air.typeOf(ref);
const ty = func.typeOf(ref);
if (!ty.hasRuntimeBitsIgnoreComptime(mod) and !ty.isInt(mod) and !ty.isError(mod)) {
gop.value_ptr.* = WValue{ .none = {} };
return gop.value_ptr.*;
@ -1260,7 +1260,7 @@ fn genFunc(func: *CodeGen) InnerError!void {
// we emit an unreachable instruction to tell the stack validator that part will never be reached.
if (func_type.returns.len != 0 and func.air.instructions.len > 0) {
const inst = @intCast(u32, func.air.instructions.len - 1);
const last_inst_ty = func.air.typeOfIndex(inst);
const last_inst_ty = func.typeOfIndex(inst);
if (!last_inst_ty.hasRuntimeBitsIgnoreComptime(mod) or last_inst_ty.isNoReturn()) {
try func.addTag(.@"unreachable");
}
@ -1541,7 +1541,7 @@ fn allocStack(func: *CodeGen, ty: Type) !WValue {
/// if it is set, to ensure the stack alignment will be set correctly.
fn allocStackPtr(func: *CodeGen, inst: Air.Inst.Index) !WValue {
const mod = func.bin_file.base.options.module.?;
const ptr_ty = func.air.typeOfIndex(inst);
const ptr_ty = func.typeOfIndex(inst);
const pointee_ty = ptr_ty.childType();
if (func.initial_stack_value == .none) {
@ -1834,6 +1834,7 @@ fn genInst(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
return switch (air_tags[inst]) {
.constant => unreachable,
.const_ty => unreachable,
.interned => unreachable,
.add => func.airBinOp(inst, .add),
.add_sat => func.airSatBinOp(inst, .add),
@ -2073,8 +2074,11 @@ fn genInst(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn genBody(func: *CodeGen, body: []const Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ip = &mod.intern_pool;
for (body) |inst| {
if (func.liveness.isUnused(inst) and !func.air.mustLower(inst)) {
if (func.liveness.isUnused(inst) and !func.air.mustLower(inst, ip.*)) {
continue;
}
const old_bookkeeping_value = func.air_bookkeeping;
@ -2134,8 +2138,8 @@ fn airRet(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn airRetPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const child_type = func.air.typeOfIndex(inst).childType();
const mod = func.bin_file.base.options.module.?;
const child_type = func.typeOfIndex(inst).childType();
var result = result: {
if (!child_type.isFnOrHasRuntimeBitsIgnoreComptime(mod)) {
@ -2157,7 +2161,7 @@ fn airRetLoad(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const un_op = func.air.instructions.items(.data)[inst].un_op;
const operand = try func.resolveInst(un_op);
const ret_ty = func.air.typeOf(un_op).childType();
const ret_ty = func.typeOf(un_op).childType();
const fn_info = func.decl.ty.fnInfo();
if (!ret_ty.hasRuntimeBitsIgnoreComptime(mod)) {
@ -2179,7 +2183,7 @@ fn airCall(func: *CodeGen, inst: Air.Inst.Index, modifier: std.builtin.CallModif
const pl_op = func.air.instructions.items(.data)[inst].pl_op;
const extra = func.air.extraData(Air.Call, pl_op.payload);
const args = @ptrCast([]const Air.Inst.Ref, func.air.extra[extra.end..][0..extra.data.args_len]);
const ty = func.air.typeOf(pl_op.operand);
const ty = func.typeOf(pl_op.operand);
const mod = func.bin_file.base.options.module.?;
const fn_ty = switch (ty.zigTypeTag(mod)) {
@ -2228,7 +2232,7 @@ fn airCall(func: *CodeGen, inst: Air.Inst.Index, modifier: std.builtin.CallModif
for (args) |arg| {
const arg_val = try func.resolveInst(arg);
const arg_ty = func.air.typeOf(arg);
const arg_ty = func.typeOf(arg);
if (!arg_ty.hasRuntimeBitsIgnoreComptime(mod)) continue;
try func.lowerArg(fn_ty.fnInfo().cc, arg_ty, arg_val);
@ -2296,7 +2300,7 @@ fn airStore(func: *CodeGen, inst: Air.Inst.Index, safety: bool) InnerError!void
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
const ptr_ty = func.air.typeOf(bin_op.lhs);
const ptr_ty = func.typeOf(bin_op.lhs);
const ptr_info = ptr_ty.ptrInfo().data;
const ty = ptr_ty.childType();
@ -2449,7 +2453,7 @@ fn airLoad(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const ty = func.air.getRefType(ty_op.ty);
const ptr_ty = func.air.typeOf(ty_op.operand);
const ptr_ty = func.typeOf(ty_op.operand);
const ptr_info = ptr_ty.ptrInfo().data;
if (!ty.hasRuntimeBitsIgnoreComptime(mod)) return func.finishAir(inst, .none, &.{ty_op.operand});
@ -2522,11 +2526,11 @@ fn load(func: *CodeGen, operand: WValue, ty: Type, offset: u32) InnerError!WValu
}
fn airArg(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const arg_index = func.arg_index;
const arg = func.args[arg_index];
const cc = func.decl.ty.fnInfo().cc;
const arg_ty = func.air.typeOfIndex(inst);
const mod = func.bin_file.base.options.module.?;
const arg_ty = func.typeOfIndex(inst);
if (cc == .C) {
const arg_classes = abi.classifyType(arg_ty, mod);
for (arg_classes) |class| {
@ -2572,8 +2576,8 @@ fn airBinOp(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerError!void {
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
const lhs_ty = func.air.typeOf(bin_op.lhs);
const rhs_ty = func.air.typeOf(bin_op.rhs);
const lhs_ty = func.typeOf(bin_op.lhs);
const rhs_ty = func.typeOf(bin_op.rhs);
// For certain operations, such as shifting, the types are different.
// When converting this to a WebAssembly type, they *must* match to perform
@ -2770,7 +2774,7 @@ const FloatOp = enum {
fn airUnaryFloatOp(func: *CodeGen, inst: Air.Inst.Index, op: FloatOp) InnerError!void {
const un_op = func.air.instructions.items(.data)[inst].un_op;
const operand = try func.resolveInst(un_op);
const ty = func.air.typeOf(un_op);
const ty = func.typeOf(un_op);
const result = try (try func.floatOp(op, ty, &.{operand})).toLocal(func, ty);
func.finishAir(inst, result, &.{un_op});
@ -2847,8 +2851,8 @@ fn airWrapBinOp(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerError!void {
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
const lhs_ty = func.air.typeOf(bin_op.lhs);
const rhs_ty = func.air.typeOf(bin_op.rhs);
const lhs_ty = func.typeOf(bin_op.lhs);
const rhs_ty = func.typeOf(bin_op.rhs);
if (lhs_ty.zigTypeTag(mod) == .Vector or rhs_ty.zigTypeTag(mod) == .Vector) {
return func.fail("TODO: Implement wrapping arithmetic for vectors", .{});
@ -3387,7 +3391,7 @@ fn airCmp(func: *CodeGen, inst: Air.Inst.Index, op: std.math.CompareOperator) In
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
const operand_ty = func.air.typeOf(bin_op.lhs);
const operand_ty = func.typeOf(bin_op.lhs);
const result = try (try func.cmp(lhs, rhs, operand_ty, op)).toLocal(func, Type.u32); // comparison result is always 32 bits
func.finishAir(inst, result, &.{ bin_op.lhs, bin_op.rhs });
}
@ -3488,7 +3492,7 @@ fn airBr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const block = func.blocks.get(br.block_inst).?;
// if operand has codegen bits we should break with a value
if (func.air.typeOf(br.operand).hasRuntimeBitsIgnoreComptime(mod)) {
if (func.typeOf(br.operand).hasRuntimeBitsIgnoreComptime(mod)) {
const operand = try func.resolveInst(br.operand);
try func.lowerToStack(operand);
@ -3509,7 +3513,7 @@ fn airNot(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const operand_ty = func.air.typeOf(ty_op.operand);
const operand_ty = func.typeOf(ty_op.operand);
const mod = func.bin_file.base.options.module.?;
const result = result: {
@ -3575,8 +3579,8 @@ fn airBitcast(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const result = result: {
const operand = try func.resolveInst(ty_op.operand);
const wanted_ty = func.air.typeOfIndex(inst);
const given_ty = func.air.typeOf(ty_op.operand);
const wanted_ty = func.typeOfIndex(inst);
const given_ty = func.typeOf(ty_op.operand);
if (given_ty.isAnyFloat() or wanted_ty.isAnyFloat()) {
const bitcast_result = try func.bitcast(wanted_ty, given_ty, operand);
break :result try bitcast_result.toLocal(func, wanted_ty);
@ -3609,7 +3613,7 @@ fn airStructFieldPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const extra = func.air.extraData(Air.StructField, ty_pl.payload);
const struct_ptr = try func.resolveInst(extra.data.struct_operand);
const struct_ty = func.air.typeOf(extra.data.struct_operand).childType();
const struct_ty = func.typeOf(extra.data.struct_operand).childType();
const result = try func.structFieldPtr(inst, extra.data.struct_operand, struct_ptr, struct_ty, extra.data.field_index);
func.finishAir(inst, result, &.{extra.data.struct_operand});
}
@ -3617,7 +3621,7 @@ fn airStructFieldPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
fn airStructFieldPtrIndex(func: *CodeGen, inst: Air.Inst.Index, index: u32) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const struct_ptr = try func.resolveInst(ty_op.operand);
const struct_ty = func.air.typeOf(ty_op.operand).childType();
const struct_ty = func.typeOf(ty_op.operand).childType();
const result = try func.structFieldPtr(inst, ty_op.operand, struct_ptr, struct_ty, index);
func.finishAir(inst, result, &.{ty_op.operand});
@ -3632,7 +3636,7 @@ fn structFieldPtr(
index: u32,
) InnerError!WValue {
const mod = func.bin_file.base.options.module.?;
const result_ty = func.air.typeOfIndex(inst);
const result_ty = func.typeOfIndex(inst);
const offset = switch (struct_ty.containerLayout()) {
.Packed => switch (struct_ty.zigTypeTag(mod)) {
.Struct => offset: {
@ -3663,7 +3667,7 @@ fn airStructFieldVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
const struct_field = func.air.extraData(Air.StructField, ty_pl.payload).data;
const struct_ty = func.air.typeOf(struct_field.struct_operand);
const struct_ty = func.typeOf(struct_field.struct_operand);
const operand = try func.resolveInst(struct_field.struct_operand);
const field_index = struct_field.field_index;
const field_ty = struct_ty.structFieldType(field_index);
@ -3762,7 +3766,7 @@ fn airSwitchBr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const blocktype = wasm.block_empty;
const pl_op = func.air.instructions.items(.data)[inst].pl_op;
const target = try func.resolveInst(pl_op.operand);
const target_ty = func.air.typeOf(pl_op.operand);
const target_ty = func.typeOf(pl_op.operand);
const switch_br = func.air.extraData(Air.SwitchBr, pl_op.payload);
const liveness = try func.liveness.getSwitchBr(func.gpa, inst, switch_br.data.cases_len + 1);
defer func.gpa.free(liveness.deaths);
@ -3940,7 +3944,7 @@ fn airIsErr(func: *CodeGen, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerErro
const mod = func.bin_file.base.options.module.?;
const un_op = func.air.instructions.items(.data)[inst].un_op;
const operand = try func.resolveInst(un_op);
const err_union_ty = func.air.typeOf(un_op);
const err_union_ty = func.typeOf(un_op);
const pl_ty = err_union_ty.errorUnionPayload();
const result = result: {
@ -3976,7 +3980,7 @@ fn airUnwrapErrUnionPayload(func: *CodeGen, inst: Air.Inst.Index, op_is_ptr: boo
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const op_ty = func.air.typeOf(ty_op.operand);
const op_ty = func.typeOf(ty_op.operand);
const err_ty = if (op_is_ptr) op_ty.childType() else op_ty;
const payload_ty = err_ty.errorUnionPayload();
@ -4004,7 +4008,7 @@ fn airUnwrapErrUnionError(func: *CodeGen, inst: Air.Inst.Index, op_is_ptr: bool)
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const op_ty = func.air.typeOf(ty_op.operand);
const op_ty = func.typeOf(ty_op.operand);
const err_ty = if (op_is_ptr) op_ty.childType() else op_ty;
const payload_ty = err_ty.errorUnionPayload();
@ -4028,9 +4032,9 @@ fn airWrapErrUnionPayload(func: *CodeGen, inst: Air.Inst.Index) InnerError!void
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const err_ty = func.air.typeOfIndex(inst);
const err_ty = func.typeOfIndex(inst);
const pl_ty = func.air.typeOf(ty_op.operand);
const pl_ty = func.typeOf(ty_op.operand);
const result = result: {
if (!pl_ty.hasRuntimeBitsIgnoreComptime(mod)) {
break :result func.reuseOperand(ty_op.operand, operand);
@ -4082,7 +4086,7 @@ fn airIntcast(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty = func.air.getRefType(ty_op.ty);
const operand = try func.resolveInst(ty_op.operand);
const operand_ty = func.air.typeOf(ty_op.operand);
const operand_ty = func.typeOf(ty_op.operand);
const mod = func.bin_file.base.options.module.?;
if (ty.zigTypeTag(mod) == .Vector or operand_ty.zigTypeTag(mod) == .Vector) {
return func.fail("todo Wasm intcast for vectors", .{});
@ -4155,7 +4159,7 @@ fn airIsNull(func: *CodeGen, inst: Air.Inst.Index, opcode: wasm.Opcode, op_kind:
const un_op = func.air.instructions.items(.data)[inst].un_op;
const operand = try func.resolveInst(un_op);
const op_ty = func.air.typeOf(un_op);
const op_ty = func.typeOf(un_op);
const optional_ty = if (op_kind == .ptr) op_ty.childType() else op_ty;
const is_null = try func.isNull(operand, optional_ty, opcode);
const result = try is_null.toLocal(func, optional_ty);
@ -4196,8 +4200,8 @@ fn isNull(func: *CodeGen, operand: WValue, optional_ty: Type, opcode: wasm.Opcod
fn airOptionalPayload(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const opt_ty = func.air.typeOf(ty_op.operand);
const payload_ty = func.air.typeOfIndex(inst);
const opt_ty = func.typeOf(ty_op.operand);
const payload_ty = func.typeOfIndex(inst);
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
return func.finishAir(inst, .none, &.{ty_op.operand});
}
@ -4219,7 +4223,7 @@ fn airOptionalPayload(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
fn airOptionalPayloadPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const opt_ty = func.air.typeOf(ty_op.operand).childType();
const opt_ty = func.typeOf(ty_op.operand).childType();
const mod = func.bin_file.base.options.module.?;
const result = result: {
@ -4238,7 +4242,7 @@ fn airOptionalPayloadPtrSet(func: *CodeGen, inst: Air.Inst.Index) InnerError!voi
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const opt_ty = func.air.typeOf(ty_op.operand).childType();
const opt_ty = func.typeOf(ty_op.operand).childType();
var buf: Type.Payload.ElemType = undefined;
const payload_ty = opt_ty.optionalChild(&buf);
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
@ -4263,7 +4267,7 @@ fn airOptionalPayloadPtrSet(func: *CodeGen, inst: Air.Inst.Index) InnerError!voi
fn airWrapOptional(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const payload_ty = func.air.typeOf(ty_op.operand);
const payload_ty = func.typeOf(ty_op.operand);
const mod = func.bin_file.base.options.module.?;
const result = result: {
@ -4276,7 +4280,7 @@ fn airWrapOptional(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
const operand = try func.resolveInst(ty_op.operand);
const op_ty = func.air.typeOfIndex(inst);
const op_ty = func.typeOfIndex(inst);
if (op_ty.optionalReprIsPayload(mod)) {
break :result func.reuseOperand(ty_op.operand, operand);
}
@ -4304,7 +4308,7 @@ fn airSlice(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
const slice_ty = func.air.typeOfIndex(inst);
const slice_ty = func.typeOfIndex(inst);
const slice = try func.allocStack(slice_ty);
try func.store(slice, lhs, Type.usize, 0);
@ -4323,7 +4327,7 @@ fn airSliceLen(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
fn airSliceElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const slice_ty = func.air.typeOf(bin_op.lhs);
const slice_ty = func.typeOf(bin_op.lhs);
const slice = try func.resolveInst(bin_op.lhs);
const index = try func.resolveInst(bin_op.rhs);
const elem_ty = slice_ty.childType();
@ -4395,7 +4399,7 @@ fn airTrunc(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const operand = try func.resolveInst(ty_op.operand);
const wanted_ty = func.air.getRefType(ty_op.ty);
const op_ty = func.air.typeOf(ty_op.operand);
const op_ty = func.typeOf(ty_op.operand);
const result = try func.trunc(operand, wanted_ty, op_ty);
func.finishAir(inst, try result.toLocal(func, wanted_ty), &.{ty_op.operand});
@ -4432,7 +4436,7 @@ fn airArrayToSlice(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const array_ty = func.air.typeOf(ty_op.operand).childType();
const array_ty = func.typeOf(ty_op.operand).childType();
const slice_ty = func.air.getRefType(ty_op.ty);
// create a slice on the stack
@ -4453,7 +4457,7 @@ fn airArrayToSlice(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
fn airPtrToInt(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const un_op = func.air.instructions.items(.data)[inst].un_op;
const operand = try func.resolveInst(un_op);
const ptr_ty = func.air.typeOf(un_op);
const ptr_ty = func.typeOf(un_op);
const result = if (ptr_ty.isSlice())
try func.slicePtr(operand)
else switch (operand) {
@ -4467,7 +4471,7 @@ fn airPtrToInt(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
fn airPtrElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = func.air.typeOf(bin_op.lhs);
const ptr_ty = func.typeOf(bin_op.lhs);
const ptr = try func.resolveInst(bin_op.lhs);
const index = try func.resolveInst(bin_op.rhs);
const elem_ty = ptr_ty.childType();
@ -4505,7 +4509,7 @@ fn airPtrElemPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
const bin_op = func.air.extraData(Air.Bin, ty_pl.payload).data;
const ptr_ty = func.air.typeOf(bin_op.lhs);
const ptr_ty = func.typeOf(bin_op.lhs);
const elem_ty = func.air.getRefType(ty_pl.ty).childType();
const mod = func.bin_file.base.options.module.?;
const elem_size = elem_ty.abiSize(mod);
@ -4538,7 +4542,7 @@ fn airPtrBinOp(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerError!void {
const ptr = try func.resolveInst(bin_op.lhs);
const offset = try func.resolveInst(bin_op.rhs);
const ptr_ty = func.air.typeOf(bin_op.lhs);
const ptr_ty = func.typeOf(bin_op.lhs);
const pointee_ty = switch (ptr_ty.ptrSize()) {
.One => ptr_ty.childType().childType(), // ptr to array, so get array element type
else => ptr_ty.childType(),
@ -4568,7 +4572,7 @@ fn airMemset(func: *CodeGen, inst: Air.Inst.Index, safety: bool) InnerError!void
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const ptr = try func.resolveInst(bin_op.lhs);
const ptr_ty = func.air.typeOf(bin_op.lhs);
const ptr_ty = func.typeOf(bin_op.lhs);
const value = try func.resolveInst(bin_op.rhs);
const len = switch (ptr_ty.ptrSize()) {
.Slice => try func.sliceLen(ptr),
@ -4683,7 +4687,7 @@ fn memset(func: *CodeGen, elem_ty: Type, ptr: WValue, len: WValue, value: WValue
fn airArrayElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const array_ty = func.air.typeOf(bin_op.lhs);
const array_ty = func.typeOf(bin_op.lhs);
const array = try func.resolveInst(bin_op.lhs);
const index = try func.resolveInst(bin_op.rhs);
const elem_ty = array_ty.childType();
@ -4750,12 +4754,12 @@ fn airArrayElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn airFloatToInt(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const dest_ty = func.air.typeOfIndex(inst);
const op_ty = func.air.typeOf(ty_op.operand);
const mod = func.bin_file.base.options.module.?;
const dest_ty = func.typeOfIndex(inst);
const op_ty = func.typeOf(ty_op.operand);
if (op_ty.abiSize(mod) > 8) {
return func.fail("TODO: floatToInt for integers/floats with bitsize larger than 64 bits", .{});
@ -4775,12 +4779,12 @@ fn airFloatToInt(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn airIntToFloat(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const dest_ty = func.air.typeOfIndex(inst);
const op_ty = func.air.typeOf(ty_op.operand);
const mod = func.bin_file.base.options.module.?;
const dest_ty = func.typeOfIndex(inst);
const op_ty = func.typeOf(ty_op.operand);
if (op_ty.abiSize(mod) > 8) {
return func.fail("TODO: intToFloat for integers/floats with bitsize larger than 64 bits", .{});
@ -4804,7 +4808,7 @@ fn airSplat(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
const elem_ty = ty.childType();
if (determineSimdStoreStrategy(ty, mod) == .direct) blk: {
@ -4881,7 +4885,7 @@ fn airSelect(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
fn airShuffle(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const inst_ty = func.air.typeOfIndex(inst);
const inst_ty = func.typeOfIndex(inst);
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
const extra = func.air.extraData(Air.Shuffle, ty_pl.payload).data;
@ -4894,7 +4898,7 @@ fn airShuffle(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const elem_size = child_ty.abiSize(mod);
// TODO: One of them could be by ref; handle in loop
if (isByRef(func.air.typeOf(extra.a), mod) or isByRef(inst_ty, mod)) {
if (isByRef(func.typeOf(extra.a), mod) or isByRef(inst_ty, mod)) {
const result = try func.allocStack(inst_ty);
for (0..mask_len) |index| {
@ -4951,11 +4955,11 @@ fn airReduce(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn airAggregateInit(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
const result_ty = func.air.typeOfIndex(inst);
const result_ty = func.typeOfIndex(inst);
const len = @intCast(usize, result_ty.arrayLen());
const elements = @ptrCast([]const Air.Inst.Ref, func.air.extra[ty_pl.payload..][0..len]);
const mod = func.bin_file.base.options.module.?;
const result: WValue = result_value: {
switch (result_ty.zigTypeTag(mod)) {
@ -5085,7 +5089,7 @@ fn airUnionInit(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const extra = func.air.extraData(Air.UnionInit, ty_pl.payload).data;
const result = result: {
const union_ty = func.air.typeOfIndex(inst);
const union_ty = func.typeOfIndex(inst);
const layout = union_ty.unionGetLayout(mod);
const union_obj = union_ty.cast(Type.Payload.Union).?.data;
const field = union_obj.fields.values()[extra.field_index];
@ -5164,7 +5168,7 @@ fn airPrefetch(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
fn airWasmMemorySize(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const pl_op = func.air.instructions.items(.data)[inst].pl_op;
const result = try func.allocLocal(func.air.typeOfIndex(inst));
const result = try func.allocLocal(func.typeOfIndex(inst));
try func.addLabel(.memory_size, pl_op.payload);
try func.addLabel(.local_set, result.local.value);
func.finishAir(inst, result, &.{pl_op.operand});
@ -5174,7 +5178,7 @@ fn airWasmMemoryGrow(func: *CodeGen, inst: Air.Inst.Index) !void {
const pl_op = func.air.instructions.items(.data)[inst].pl_op;
const operand = try func.resolveInst(pl_op.operand);
const result = try func.allocLocal(func.air.typeOfIndex(inst));
const result = try func.allocLocal(func.typeOfIndex(inst));
try func.emitWValue(operand);
try func.addLabel(.memory_grow, pl_op.payload);
try func.addLabel(.local_set, result.local.value);
@ -5263,8 +5267,8 @@ fn cmpBigInt(func: *CodeGen, lhs: WValue, rhs: WValue, operand_ty: Type, op: std
fn airSetUnionTag(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const un_ty = func.air.typeOf(bin_op.lhs).childType();
const tag_ty = func.air.typeOf(bin_op.rhs);
const un_ty = func.typeOf(bin_op.lhs).childType();
const tag_ty = func.typeOf(bin_op.rhs);
const layout = un_ty.unionGetLayout(mod);
if (layout.tag_size == 0) return func.finishAir(inst, .none, &.{ bin_op.lhs, bin_op.rhs });
@ -5288,8 +5292,8 @@ fn airGetUnionTag(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const un_ty = func.air.typeOf(ty_op.operand);
const tag_ty = func.air.typeOfIndex(inst);
const un_ty = func.typeOf(ty_op.operand);
const tag_ty = func.typeOfIndex(inst);
const layout = un_ty.unionGetLayout(mod);
if (layout.tag_size == 0) return func.finishAir(inst, .none, &.{ty_op.operand});
@ -5307,9 +5311,9 @@ fn airGetUnionTag(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
fn airFpext(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const dest_ty = func.air.typeOfIndex(inst);
const dest_ty = func.typeOfIndex(inst);
const operand = try func.resolveInst(ty_op.operand);
const extended = try func.fpext(operand, func.air.typeOf(ty_op.operand), dest_ty);
const extended = try func.fpext(operand, func.typeOf(ty_op.operand), dest_ty);
const result = try extended.toLocal(func, dest_ty);
func.finishAir(inst, result, &.{ty_op.operand});
}
@ -5352,9 +5356,9 @@ fn fpext(func: *CodeGen, operand: WValue, given: Type, wanted: Type) InnerError!
fn airFptrunc(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const dest_ty = func.air.typeOfIndex(inst);
const dest_ty = func.typeOfIndex(inst);
const operand = try func.resolveInst(ty_op.operand);
const truncated = try func.fptrunc(operand, func.air.typeOf(ty_op.operand), dest_ty);
const truncated = try func.fptrunc(operand, func.typeOf(ty_op.operand), dest_ty);
const result = try truncated.toLocal(func, dest_ty);
func.finishAir(inst, result, &.{ty_op.operand});
}
@ -5393,7 +5397,7 @@ fn airErrUnionPayloadPtrSet(func: *CodeGen, inst: Air.Inst.Index) InnerError!voi
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const err_set_ty = func.air.typeOf(ty_op.operand).childType();
const err_set_ty = func.typeOf(ty_op.operand).childType();
const payload_ty = err_set_ty.errorUnionPayload();
const operand = try func.resolveInst(ty_op.operand);
@ -5448,10 +5452,10 @@ fn airMemcpy(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const dst = try func.resolveInst(bin_op.lhs);
const dst_ty = func.air.typeOf(bin_op.lhs);
const dst_ty = func.typeOf(bin_op.lhs);
const ptr_elem_ty = dst_ty.childType();
const src = try func.resolveInst(bin_op.rhs);
const src_ty = func.air.typeOf(bin_op.rhs);
const src_ty = func.typeOf(bin_op.rhs);
const len = switch (dst_ty.ptrSize()) {
.Slice => blk: {
const slice_len = try func.sliceLen(dst);
@ -5485,12 +5489,12 @@ fn airRetAddr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn airPopcount(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const operand = try func.resolveInst(ty_op.operand);
const op_ty = func.air.typeOf(ty_op.operand);
const result_ty = func.air.typeOfIndex(inst);
const mod = func.bin_file.base.options.module.?;
const op_ty = func.typeOf(ty_op.operand);
const result_ty = func.typeOfIndex(inst);
if (op_ty.zigTypeTag(mod) == .Vector) {
return func.fail("TODO: Implement @popCount for vectors", .{});
@ -5585,7 +5589,7 @@ fn airAddSubWithOverflow(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerErro
const lhs_op = try func.resolveInst(extra.lhs);
const rhs_op = try func.resolveInst(extra.rhs);
const lhs_ty = func.air.typeOf(extra.lhs);
const lhs_ty = func.typeOf(extra.lhs);
const mod = func.bin_file.base.options.module.?;
if (lhs_ty.zigTypeTag(mod) == .Vector) {
@ -5599,7 +5603,7 @@ fn airAddSubWithOverflow(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerErro
};
if (wasm_bits == 128) {
const result = try func.addSubWithOverflowBigInt(lhs_op, rhs_op, lhs_ty, func.air.typeOfIndex(inst), op);
const result = try func.addSubWithOverflowBigInt(lhs_op, rhs_op, lhs_ty, func.typeOfIndex(inst), op);
return func.finishAir(inst, result, &.{ extra.lhs, extra.rhs });
}
@ -5649,7 +5653,7 @@ fn airAddSubWithOverflow(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerErro
var overflow_local = try overflow_bit.toLocal(func, Type.u32);
defer overflow_local.free(func);
const result_ptr = try func.allocStack(func.air.typeOfIndex(inst));
const result_ptr = try func.allocStack(func.typeOfIndex(inst));
try func.store(result_ptr, result, lhs_ty, 0);
const offset = @intCast(u32, lhs_ty.abiSize(mod));
try func.store(result_ptr, overflow_local, Type.initTag(.u1), offset);
@ -5729,8 +5733,8 @@ fn airShlWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const lhs = try func.resolveInst(extra.lhs);
const rhs = try func.resolveInst(extra.rhs);
const lhs_ty = func.air.typeOf(extra.lhs);
const rhs_ty = func.air.typeOf(extra.rhs);
const lhs_ty = func.typeOf(extra.lhs);
const rhs_ty = func.typeOf(extra.rhs);
if (lhs_ty.zigTypeTag(mod) == .Vector) {
return func.fail("TODO: Implement overflow arithmetic for vectors", .{});
@ -5771,7 +5775,7 @@ fn airShlWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
var overflow_local = try overflow_bit.toLocal(func, Type.initTag(.u1));
defer overflow_local.free(func);
const result_ptr = try func.allocStack(func.air.typeOfIndex(inst));
const result_ptr = try func.allocStack(func.typeOfIndex(inst));
try func.store(result_ptr, result, lhs_ty, 0);
const offset = @intCast(u32, lhs_ty.abiSize(mod));
try func.store(result_ptr, overflow_local, Type.initTag(.u1), offset);
@ -5785,7 +5789,7 @@ fn airMulWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const lhs = try func.resolveInst(extra.lhs);
const rhs = try func.resolveInst(extra.rhs);
const lhs_ty = func.air.typeOf(extra.lhs);
const lhs_ty = func.typeOf(extra.lhs);
const mod = func.bin_file.base.options.module.?;
if (lhs_ty.zigTypeTag(mod) == .Vector) {
@ -5946,7 +5950,7 @@ fn airMulWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
var bin_op_local = try bin_op.toLocal(func, lhs_ty);
defer bin_op_local.free(func);
const result_ptr = try func.allocStack(func.air.typeOfIndex(inst));
const result_ptr = try func.allocStack(func.typeOfIndex(inst));
try func.store(result_ptr, bin_op_local, lhs_ty, 0);
const offset = @intCast(u32, lhs_ty.abiSize(mod));
try func.store(result_ptr, overflow_bit, Type.initTag(.u1), offset);
@ -5955,10 +5959,10 @@ fn airMulWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn airMaxMin(func: *CodeGen, inst: Air.Inst.Index, op: enum { max, min }) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const ty = func.air.typeOfIndex(inst);
const mod = func.bin_file.base.options.module.?;
const ty = func.typeOfIndex(inst);
if (ty.zigTypeTag(mod) == .Vector) {
return func.fail("TODO: `@maximum` and `@minimum` for vectors", .{});
}
@ -5986,11 +5990,11 @@ fn airMaxMin(func: *CodeGen, inst: Air.Inst.Index, op: enum { max, min }) InnerE
}
fn airMulAdd(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const pl_op = func.air.instructions.items(.data)[inst].pl_op;
const bin_op = func.air.extraData(Air.Bin, pl_op.payload).data;
const ty = func.air.typeOfIndex(inst);
const mod = func.bin_file.base.options.module.?;
const ty = func.typeOfIndex(inst);
if (ty.zigTypeTag(mod) == .Vector) {
return func.fail("TODO: `@mulAdd` for vectors", .{});
}
@ -6020,11 +6024,11 @@ fn airMulAdd(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn airClz(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const ty = func.air.typeOf(ty_op.operand);
const result_ty = func.air.typeOfIndex(inst);
const mod = func.bin_file.base.options.module.?;
const ty = func.typeOf(ty_op.operand);
const result_ty = func.typeOfIndex(inst);
if (ty.zigTypeTag(mod) == .Vector) {
return func.fail("TODO: `@clz` for vectors", .{});
}
@ -6073,12 +6077,12 @@ fn airClz(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
}
fn airCtz(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const ty = func.air.typeOf(ty_op.operand);
const result_ty = func.air.typeOfIndex(inst);
const ty = func.typeOf(ty_op.operand);
const result_ty = func.typeOfIndex(inst);
const mod = func.bin_file.base.options.module.?;
if (ty.zigTypeTag(mod) == .Vector) {
return func.fail("TODO: `@ctz` for vectors", .{});
}
@ -6141,7 +6145,7 @@ fn airDbgVar(func: *CodeGen, inst: Air.Inst.Index, is_ptr: bool) !void {
if (func.debug_output != .dwarf) return func.finishAir(inst, .none, &.{});
const pl_op = func.air.instructions.items(.data)[inst].pl_op;
const ty = func.air.typeOf(pl_op.operand);
const ty = func.typeOf(pl_op.operand);
const operand = try func.resolveInst(pl_op.operand);
log.debug("airDbgVar: %{d}: {}, {}", .{ inst, ty.fmtDebug(), operand });
@ -6179,7 +6183,7 @@ fn airTry(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const err_union = try func.resolveInst(pl_op.operand);
const extra = func.air.extraData(Air.Try, pl_op.payload);
const body = func.air.extra[extra.end..][0..extra.data.body_len];
const err_union_ty = func.air.typeOf(pl_op.operand);
const err_union_ty = func.typeOf(pl_op.operand);
const result = try lowerTry(func, inst, err_union, body, err_union_ty, false);
func.finishAir(inst, result, &.{pl_op.operand});
}
@ -6189,7 +6193,7 @@ fn airTryPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const extra = func.air.extraData(Air.TryPtr, ty_pl.payload);
const err_union_ptr = try func.resolveInst(extra.data.ptr);
const body = func.air.extra[extra.end..][0..extra.data.body_len];
const err_union_ty = func.air.typeOf(extra.data.ptr).childType();
const err_union_ty = func.typeOf(extra.data.ptr).childType();
const result = try lowerTry(func, inst, err_union_ptr, body, err_union_ty, true);
func.finishAir(inst, result, &.{extra.data.ptr});
}
@ -6251,11 +6255,11 @@ fn lowerTry(
}
fn airByteSwap(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
const operand = try func.resolveInst(ty_op.operand);
const mod = func.bin_file.base.options.module.?;
if (ty.zigTypeTag(mod) == .Vector) {
return func.fail("TODO: @byteSwap for vectors", .{});
@ -6325,7 +6329,7 @@ fn airDiv(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
@ -6340,7 +6344,7 @@ fn airDivTrunc(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
@ -6361,7 +6365,7 @@ fn airDivFloor(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const mod = func.bin_file.base.options.module.?;
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
@ -6512,7 +6516,7 @@ fn airSatBinOp(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerError!void {
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const mod = func.bin_file.base.options.module.?;
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
const lhs = try func.resolveInst(bin_op.lhs);
const rhs = try func.resolveInst(bin_op.rhs);
@ -6626,7 +6630,7 @@ fn airShlSat(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
const mod = func.bin_file.base.options.module.?;
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
const int_info = ty.intInfo(mod);
const is_signed = int_info.signedness == .signed;
if (int_info.bits > 64) {
@ -6785,11 +6789,11 @@ fn callIntrinsic(
fn airTagName(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const un_op = func.air.instructions.items(.data)[inst].un_op;
const operand = try func.resolveInst(un_op);
const enum_ty = func.air.typeOf(un_op);
const enum_ty = func.typeOf(un_op);
const func_sym_index = try func.getTagNameFunction(enum_ty);
const result_ptr = try func.allocStack(func.air.typeOfIndex(inst));
const result_ptr = try func.allocStack(func.typeOfIndex(inst));
try func.lowerToStack(result_ptr);
try func.emitWValue(operand);
try func.addLabel(.call, func_sym_index);
@ -7061,9 +7065,9 @@ fn airCmpxchg(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
const extra = func.air.extraData(Air.Cmpxchg, ty_pl.payload).data;
const ptr_ty = func.air.typeOf(extra.ptr);
const ptr_ty = func.typeOf(extra.ptr);
const ty = ptr_ty.childType();
const result_ty = func.air.typeOfIndex(inst);
const result_ty = func.typeOfIndex(inst);
const ptr_operand = try func.resolveInst(extra.ptr);
const expected_val = try func.resolveInst(extra.expected_value);
@ -7133,7 +7137,7 @@ fn airAtomicLoad(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const mod = func.bin_file.base.options.module.?;
const atomic_load = func.air.instructions.items(.data)[inst].atomic_load;
const ptr = try func.resolveInst(atomic_load.ptr);
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
if (func.useAtomicFeature()) {
const tag: wasm.AtomicsOpcode = switch (ty.abiSize(mod)) {
@ -7163,7 +7167,7 @@ fn airAtomicRmw(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ptr = try func.resolveInst(pl_op.operand);
const operand = try func.resolveInst(extra.operand);
const ty = func.air.typeOfIndex(inst);
const ty = func.typeOfIndex(inst);
const op: std.builtin.AtomicRmwOp = extra.op();
if (func.useAtomicFeature()) {
@ -7348,7 +7352,7 @@ fn airAtomicStore(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const ptr = try func.resolveInst(bin_op.lhs);
const operand = try func.resolveInst(bin_op.rhs);
const ptr_ty = func.air.typeOf(bin_op.lhs);
const ptr_ty = func.typeOf(bin_op.lhs);
const ty = ptr_ty.childType();
if (func.useAtomicFeature()) {
@ -7380,3 +7384,13 @@ fn airFrameAddress(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const result = try WValue.toLocal(.stack, func, Type.usize);
return func.finishAir(inst, result, &.{});
}
fn typeOf(func: *CodeGen, inst: Air.Inst.Ref) Type {
const mod = func.bin_file.base.options.module.?;
return func.air.typeOf(inst, mod.intern_pool);
}
fn typeOfIndex(func: *CodeGen, inst: Air.Inst.Index) Type {
const mod = func.bin_file.base.options.module.?;
return func.air.typeOfIndex(inst, mod.intern_pool);
}

View File

@ -447,7 +447,7 @@ const InstTracking = struct {
else => unreachable,
}
tracking_log.debug("spill %{d} from {} to {}", .{ inst, self.short, self.long });
try function.genCopy(function.air.typeOfIndex(inst), self.long, self.short);
try function.genCopy(function.typeOfIndex(inst), self.long, self.short);
}
fn reuseFrame(self: *InstTracking) void {
@ -537,7 +537,7 @@ const InstTracking = struct {
inst: Air.Inst.Index,
target: InstTracking,
) !void {
const ty = function.air.typeOfIndex(inst);
const ty = function.typeOfIndex(inst);
if ((self.long == .none or self.long == .reserved_frame) and target.long == .load_frame)
try function.genCopy(ty, target.long, self.short);
try function.genCopy(ty, target.short, self.short);
@ -1725,6 +1725,8 @@ fn gen(self: *Self) InnerError!void {
}
fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
const mod = self.bin_file.options.module.?;
const ip = &mod.intern_pool;
const air_tags = self.air.instructions.items(.tag);
for (body) |inst| {
@ -1733,7 +1735,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
try self.mir_to_air_map.put(self.gpa, mir_inst, inst);
}
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) continue;
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst, ip.*)) continue;
wip_mir_log.debug("{}", .{self.fmtAir(inst)});
verbose_tracking_log.debug("{}", .{self.fmtTracking()});
@ -1919,6 +1921,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.constant => unreachable, // excluded from function bodies
.const_ty => unreachable, // excluded from function bodies
.interned => unreachable, // excluded from function bodies
.unreach => if (self.wantSafety()) try self.airTrap() else self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -2255,7 +2258,7 @@ fn allocFrameIndex(self: *Self, alloc: FrameAlloc) !FrameIndex {
/// Use a pointer instruction as the basis for allocating stack memory.
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !FrameIndex {
const mod = self.bin_file.options.module.?;
const ptr_ty = self.air.typeOfIndex(inst);
const ptr_ty = self.typeOfIndex(inst);
const val_ty = ptr_ty.childType();
return self.allocFrameIndex(FrameAlloc.init(.{
.size = math.cast(u32, val_ty.abiSize(mod)) orelse {
@ -2266,7 +2269,7 @@ fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !FrameIndex {
}
fn allocRegOrMem(self: *Self, inst: Air.Inst.Index, reg_ok: bool) !MCValue {
return self.allocRegOrMemAdvanced(self.air.typeOfIndex(inst), inst, reg_ok);
return self.allocRegOrMemAdvanced(self.typeOfIndex(inst), inst, reg_ok);
}
fn allocTempRegOrMem(self: *Self, elem_ty: Type, reg_ok: bool) !MCValue {
@ -2485,7 +2488,7 @@ fn airRetPtr(self: *Self, inst: Air.Inst.Index) !void {
.load_frame => .{ .register_offset = .{
.reg = (try self.copyToRegisterWithInstTracking(
inst,
self.air.typeOfIndex(inst),
self.typeOfIndex(inst),
self.ret_mcv.long,
)).register,
.off = self.ret_mcv.short.indirect.off,
@ -2496,9 +2499,9 @@ fn airRetPtr(self: *Self, inst: Air.Inst.Index) !void {
fn airFptrunc(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const dst_bits = dst_ty.floatBits(self.target.*);
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_bits = src_ty.floatBits(self.target.*);
const src_mcv = try self.resolveInst(ty_op.operand);
@ -2562,9 +2565,9 @@ fn airFptrunc(self: *Self, inst: Air.Inst.Index) !void {
fn airFpext(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const dst_bits = dst_ty.floatBits(self.target.*);
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_bits = src_ty.floatBits(self.target.*);
const src_mcv = try self.resolveInst(ty_op.operand);
@ -2625,10 +2628,10 @@ fn airIntCast(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = result: {
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_int_info = src_ty.intInfo(mod);
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const dst_int_info = dst_ty.intInfo(mod);
const abi_size = @intCast(u32, dst_ty.abiSize(mod));
@ -2707,9 +2710,9 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const dst_abi_size = @intCast(u32, dst_ty.abiSize(mod));
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_abi_size = @intCast(u32, src_ty.abiSize(mod));
const result = result: {
@ -2818,7 +2821,7 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
fn airBoolToInt(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const operand = try self.resolveInst(un_op);
const dst_mcv = if (self.reuseOperand(inst, un_op, 0, operand))
@ -2834,11 +2837,11 @@ fn airSlice(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const slice_ty = self.air.typeOfIndex(inst);
const slice_ty = self.typeOfIndex(inst);
const ptr = try self.resolveInst(bin_op.lhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const len = try self.resolveInst(bin_op.rhs);
const len_ty = self.air.typeOf(bin_op.rhs);
const len_ty = self.typeOf(bin_op.rhs);
const frame_index = try self.allocFrameIndex(FrameAlloc.initType(slice_ty, mod));
try self.genSetMem(.{ .frame = frame_index }, 0, ptr_ty, ptr);
@ -2877,7 +2880,7 @@ fn activeIntBits(self: *Self, dst_air: Air.Inst.Ref) u16 {
const air_tag = self.air.instructions.items(.tag);
const air_data = self.air.instructions.items(.data);
const dst_ty = self.air.typeOf(dst_air);
const dst_ty = self.typeOf(dst_air);
const dst_info = dst_ty.intInfo(mod);
if (Air.refToIndex(dst_air)) |inst| {
switch (air_tag[inst]) {
@ -2889,7 +2892,7 @@ fn activeIntBits(self: *Self, dst_air: Air.Inst.Ref) u16 {
@boolToInt(src_int.positive and dst_info.signedness == .signed);
},
.intcast => {
const src_ty = self.air.typeOf(air_data[inst].ty_op.operand);
const src_ty = self.typeOf(air_data[inst].ty_op.operand);
const src_info = src_ty.intInfo(mod);
return @min(switch (src_info.signedness) {
.signed => switch (dst_info.signedness) {
@ -2913,7 +2916,7 @@ fn airMulDivBinOp(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const result = result: {
const tag = self.air.instructions.items(.tag)[inst];
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
switch (dst_ty.zigTypeTag(mod)) {
.Float, .Vector => break :result try self.genBinOp(inst, tag, bin_op.lhs, bin_op.rhs),
else => {},
@ -2942,7 +2945,7 @@ fn airMulDivBinOp(self: *Self, inst: Air.Inst.Index) !void {
fn airAddSat(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ty = self.air.typeOf(bin_op.lhs);
const ty = self.typeOf(bin_op.lhs);
const lhs_mcv = try self.resolveInst(bin_op.lhs);
const dst_mcv = if (lhs_mcv.isRegister() and self.reuseOperand(inst, bin_op.lhs, 0, lhs_mcv))
@ -3021,7 +3024,7 @@ fn airAddSat(self: *Self, inst: Air.Inst.Index) !void {
fn airSubSat(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ty = self.air.typeOf(bin_op.lhs);
const ty = self.typeOf(bin_op.lhs);
const lhs_mcv = try self.resolveInst(bin_op.lhs);
const dst_mcv = if (lhs_mcv.isRegister() and self.reuseOperand(inst, bin_op.lhs, 0, lhs_mcv))
@ -3093,7 +3096,7 @@ fn airSubSat(self: *Self, inst: Air.Inst.Index) !void {
fn airMulSat(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ty = self.air.typeOf(bin_op.lhs);
const ty = self.typeOf(bin_op.lhs);
try self.spillRegisters(&.{ .rax, .rdx });
const reg_locks = self.register_manager.lockRegs(2, .{ .rax, .rdx });
@ -3151,7 +3154,7 @@ fn airAddSubWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const result: MCValue = result: {
const tag = self.air.instructions.items(.tag)[inst];
const ty = self.air.typeOf(bin_op.lhs);
const ty = self.typeOf(bin_op.lhs);
switch (ty.zigTypeTag(mod)) {
.Vector => return self.fail("TODO implement add/sub with overflow for Vector type", .{}),
.Int => {
@ -3168,7 +3171,7 @@ fn airAddSubWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
.signed => .o,
};
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
if (int_info.bits >= 8 and math.isPowerOfTwo(int_info.bits)) {
switch (partial_mcv) {
.register => |reg| {
@ -3211,8 +3214,8 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const result: MCValue = result: {
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
switch (lhs_ty.zigTypeTag(mod)) {
.Vector => return self.fail("TODO implement shl with overflow for Vector type", .{}),
.Int => {
@ -3241,7 +3244,7 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
try self.genBinOpMir(.{ ._, .cmp }, lhs_ty, tmp_mcv, lhs);
const cc = Condition.ne;
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
if (int_info.bits >= 8 and math.isPowerOfTwo(int_info.bits)) {
switch (partial_mcv) {
.register => |reg| {
@ -3349,7 +3352,7 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const dst_ty = self.air.typeOf(bin_op.lhs);
const dst_ty = self.typeOf(bin_op.lhs);
const result: MCValue = switch (dst_ty.zigTypeTag(mod)) {
.Vector => return self.fail("TODO implement mul_with_overflow for Vector type", .{}),
.Int => result: {
@ -3370,7 +3373,7 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_ty = self.typeOfIndex(inst);
const extra_bits = if (dst_info.bits <= 64)
self.regExtraBits(dst_ty)
else
@ -3525,8 +3528,8 @@ fn airShlShrBinOp(self: *Self, inst: Air.Inst.Index) !void {
try self.register_manager.getReg(.rcx, null);
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
const lhs_ty = self.typeOf(bin_op.lhs);
const rhs_ty = self.typeOf(bin_op.rhs);
const result = try self.genShiftBinOp(tag, inst, lhs, rhs, lhs_ty, rhs_ty);
@ -3543,7 +3546,7 @@ fn airShlSat(self: *Self, inst: Air.Inst.Index) !void {
fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = result: {
const pl_ty = self.air.typeOfIndex(inst);
const pl_ty = self.typeOfIndex(inst);
const opt_mcv = try self.resolveInst(ty_op.operand);
if (self.reuseOperand(inst, ty_op.operand, 0, opt_mcv)) {
@ -3568,7 +3571,7 @@ fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) !void {
fn airOptionalPayloadPtr(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const opt_mcv = try self.resolveInst(ty_op.operand);
const dst_mcv = if (self.reuseOperand(inst, ty_op.operand, 0, opt_mcv))
@ -3582,8 +3585,8 @@ fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result = result: {
const dst_ty = self.air.typeOfIndex(inst);
const src_ty = self.air.typeOf(ty_op.operand);
const dst_ty = self.typeOfIndex(inst);
const src_ty = self.typeOf(ty_op.operand);
const opt_ty = src_ty.childType();
const src_mcv = try self.resolveInst(ty_op.operand);
@ -3615,7 +3618,7 @@ fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
fn airUnwrapErrUnionErr(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const err_union_ty = self.air.typeOf(ty_op.operand);
const err_union_ty = self.typeOf(ty_op.operand);
const err_ty = err_union_ty.errorUnionSet();
const payload_ty = err_union_ty.errorUnionPayload();
const operand = try self.resolveInst(ty_op.operand);
@ -3662,7 +3665,7 @@ fn airUnwrapErrUnionErr(self: *Self, inst: Air.Inst.Index) !void {
fn airUnwrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const err_union_ty = self.air.typeOf(ty_op.operand);
const err_union_ty = self.typeOf(ty_op.operand);
const operand = try self.resolveInst(ty_op.operand);
const result = try self.genUnwrapErrorUnionPayloadMir(inst, err_union_ty, operand);
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
@ -3720,7 +3723,7 @@ fn airUnwrapErrUnionErrPtr(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_mcv = try self.resolveInst(ty_op.operand);
const src_reg = switch (src_mcv) {
.register => |reg| reg,
@ -3756,7 +3759,7 @@ fn airUnwrapErrUnionPayloadPtr(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_mcv = try self.resolveInst(ty_op.operand);
const src_reg = switch (src_mcv) {
.register => |reg| reg,
@ -3765,7 +3768,7 @@ fn airUnwrapErrUnionPayloadPtr(self: *Self, inst: Air.Inst.Index) !void {
const src_lock = self.register_manager.lockRegAssumeUnused(src_reg);
defer self.register_manager.unlockReg(src_lock);
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const dst_reg = if (self.reuseOperand(inst, ty_op.operand, 0, src_mcv))
src_reg
else
@ -3791,7 +3794,7 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = result: {
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_mcv = try self.resolveInst(ty_op.operand);
const src_reg = switch (src_mcv) {
.register => |reg| reg,
@ -3816,7 +3819,7 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
if (self.liveness.isUnused(inst)) break :result .unreach;
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const dst_reg = if (self.reuseOperand(inst, ty_op.operand, 0, src_mcv))
src_reg
else
@ -3856,10 +3859,10 @@ fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = result: {
const pl_ty = self.air.typeOf(ty_op.operand);
const pl_ty = self.typeOf(ty_op.operand);
if (!pl_ty.hasRuntimeBits(mod)) break :result .{ .immediate = 1 };
const opt_ty = self.air.typeOfIndex(inst);
const opt_ty = self.typeOfIndex(inst);
const pl_mcv = try self.resolveInst(ty_op.operand);
const same_repr = opt_ty.optionalReprIsPayload(mod);
if (same_repr and self.reuseOperand(inst, ty_op.operand, 0, pl_mcv)) break :result pl_mcv;
@ -3952,7 +3955,7 @@ fn airSlicePtr(self: *Self, inst: Air.Inst.Index) !void {
if (self.reuseOperand(inst, ty_op.operand, 0, src_mcv)) break :result src_mcv;
const dst_mcv = try self.allocRegOrMem(inst, true);
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
try self.genCopy(dst_ty, dst_mcv, src_mcv);
break :result dst_mcv;
};
@ -3980,7 +3983,7 @@ fn airPtrSliceLenPtr(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_mcv = try self.resolveInst(ty_op.operand);
const src_reg = switch (src_mcv) {
.register => |reg| reg,
@ -3989,7 +3992,7 @@ fn airPtrSliceLenPtr(self: *Self, inst: Air.Inst.Index) !void {
const src_lock = self.register_manager.lockRegAssumeUnused(src_reg);
defer self.register_manager.unlockReg(src_lock);
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const dst_reg = if (self.reuseOperand(inst, ty_op.operand, 0, src_mcv))
src_reg
else
@ -4014,7 +4017,7 @@ fn airPtrSliceLenPtr(self: *Self, inst: Air.Inst.Index) !void {
fn airPtrSlicePtrPtr(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const opt_mcv = try self.resolveInst(ty_op.operand);
const dst_mcv = if (self.reuseOperand(inst, ty_op.operand, 0, opt_mcv))
@ -4046,7 +4049,7 @@ fn elemOffset(self: *Self, index_ty: Type, index: MCValue, elem_size: u64) !Regi
fn genSliceElemPtr(self: *Self, lhs: Air.Inst.Ref, rhs: Air.Inst.Ref) !MCValue {
const mod = self.bin_file.options.module.?;
const slice_ty = self.air.typeOf(lhs);
const slice_ty = self.typeOf(lhs);
const slice_mcv = try self.resolveInst(lhs);
const slice_mcv_lock: ?RegisterLock = switch (slice_mcv) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -4059,7 +4062,7 @@ fn genSliceElemPtr(self: *Self, lhs: Air.Inst.Ref, rhs: Air.Inst.Ref) !MCValue {
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const slice_ptr_field_type = slice_ty.slicePtrFieldType(&buf);
const index_ty = self.air.typeOf(rhs);
const index_ty = self.typeOf(rhs);
const index_mcv = try self.resolveInst(rhs);
const index_mcv_lock: ?RegisterLock = switch (index_mcv) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -4083,7 +4086,7 @@ fn genSliceElemPtr(self: *Self, lhs: Air.Inst.Ref, rhs: Air.Inst.Ref) !MCValue {
fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const slice_ty = self.air.typeOf(bin_op.lhs);
const slice_ty = self.typeOf(bin_op.lhs);
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const slice_ptr_field_type = slice_ty.slicePtrFieldType(&buf);
@ -4105,7 +4108,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const array_ty = self.air.typeOf(bin_op.lhs);
const array_ty = self.typeOf(bin_op.lhs);
const array = try self.resolveInst(bin_op.lhs);
const array_lock: ?RegisterLock = switch (array) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -4116,7 +4119,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
const elem_ty = array_ty.childType();
const elem_abi_size = elem_ty.abiSize(mod);
const index_ty = self.air.typeOf(bin_op.rhs);
const index_ty = self.typeOf(bin_op.rhs);
const index = try self.resolveInst(bin_op.rhs);
const index_lock: ?RegisterLock = switch (index) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -4170,14 +4173,14 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
fn airPtrElemVal(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
// this is identical to the `airPtrElemPtr` codegen expect here an
// additional `mov` is needed at the end to get the actual value
const elem_ty = ptr_ty.elemType2(mod);
const elem_abi_size = @intCast(u32, elem_ty.abiSize(mod));
const index_ty = self.air.typeOf(bin_op.rhs);
const index_ty = self.typeOf(bin_op.rhs);
const index_mcv = try self.resolveInst(bin_op.rhs);
const index_lock = switch (index_mcv) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -4218,7 +4221,7 @@ fn airPtrElemPtr(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const extra = self.air.extraData(Air.Bin, ty_pl.payload).data;
const ptr_ty = self.air.typeOf(extra.lhs);
const ptr_ty = self.typeOf(extra.lhs);
const ptr = try self.resolveInst(extra.lhs);
const ptr_lock: ?RegisterLock = switch (ptr) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -4228,7 +4231,7 @@ fn airPtrElemPtr(self: *Self, inst: Air.Inst.Index) !void {
const elem_ty = ptr_ty.elemType2(mod);
const elem_abi_size = elem_ty.abiSize(mod);
const index_ty = self.air.typeOf(extra.rhs);
const index_ty = self.typeOf(extra.rhs);
const index = try self.resolveInst(extra.rhs);
const index_lock: ?RegisterLock = switch (index) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -4249,9 +4252,9 @@ fn airPtrElemPtr(self: *Self, inst: Air.Inst.Index) !void {
fn airSetUnionTag(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr_union_ty = self.air.typeOf(bin_op.lhs);
const ptr_union_ty = self.typeOf(bin_op.lhs);
const union_ty = ptr_union_ty.childType();
const tag_ty = self.air.typeOf(bin_op.rhs);
const tag_ty = self.typeOf(bin_op.rhs);
const layout = union_ty.unionGetLayout(mod);
if (layout.tag_size == 0) {
@ -4296,8 +4299,8 @@ fn airGetUnionTag(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const tag_ty = self.air.typeOfIndex(inst);
const union_ty = self.air.typeOf(ty_op.operand);
const tag_ty = self.typeOfIndex(inst);
const union_ty = self.typeOf(ty_op.operand);
const layout = union_ty.unionGetLayout(mod);
if (layout.tag_size == 0) {
@ -4350,8 +4353,8 @@ fn airClz(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result = result: {
const dst_ty = self.air.typeOfIndex(inst);
const src_ty = self.air.typeOf(ty_op.operand);
const dst_ty = self.typeOfIndex(inst);
const src_ty = self.typeOf(ty_op.operand);
const src_mcv = try self.resolveInst(ty_op.operand);
const mat_src_mcv = switch (src_mcv) {
@ -4479,8 +4482,8 @@ fn airCtz(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result = result: {
const dst_ty = self.air.typeOfIndex(inst);
const src_ty = self.air.typeOf(ty_op.operand);
const dst_ty = self.typeOfIndex(inst);
const src_ty = self.typeOf(ty_op.operand);
const src_bits = src_ty.bitSize(mod);
const src_mcv = try self.resolveInst(ty_op.operand);
@ -4575,7 +4578,7 @@ fn airPopcount(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = result: {
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_abi_size = @intCast(u32, src_ty.abiSize(mod));
const src_mcv = try self.resolveInst(ty_op.operand);
@ -4745,7 +4748,7 @@ fn airByteSwap(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_mcv = try self.resolveInst(ty_op.operand);
const dst_mcv = try self.byteSwap(inst, src_ty, src_mcv, true);
@ -4766,7 +4769,7 @@ fn airBitReverse(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_abi_size = @intCast(u32, src_ty.abiSize(mod));
const src_mcv = try self.resolveInst(ty_op.operand);
@ -4876,12 +4879,12 @@ fn airFloatSign(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const tag = self.air.instructions.items(.tag)[inst];
const un_op = self.air.instructions.items(.data)[inst].un_op;
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const abi_size: u32 = switch (ty.abiSize(mod)) {
1...16 => 16,
17...32 => 32,
else => return self.fail("TODO implement airFloatSign for {}", .{
ty.fmt(self.bin_file.options.module.?),
ty.fmt(mod),
}),
};
const scalar_bits = ty.scalarType(mod).floatBits(self.target.*);
@ -5005,7 +5008,7 @@ fn airFloatSign(self: *Self, inst: Air.Inst.Index) !void {
fn airRound(self: *Self, inst: Air.Inst.Index, mode: u4) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const src_mcv = try self.resolveInst(un_op);
const dst_mcv = if (src_mcv.isRegister() and self.reuseOperand(inst, un_op, 0, src_mcv))
@ -5093,7 +5096,7 @@ fn genRound(self: *Self, ty: Type, dst_reg: Register, src_mcv: MCValue, mode: u4
fn airSqrt(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const un_op = self.air.instructions.items(.data)[inst].un_op;
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const abi_size = @intCast(u32, ty.abiSize(mod));
const src_mcv = try self.resolveInst(un_op);
@ -5399,7 +5402,7 @@ fn load(self: *Self, dst_mcv: MCValue, ptr_ty: Type, ptr_mcv: MCValue) InnerErro
fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const elem_ty = self.air.typeOfIndex(inst);
const elem_ty = self.typeOfIndex(inst);
const result: MCValue = result: {
if (!elem_ty.hasRuntimeBitsIgnoreComptime(mod)) break :result .none;
@ -5407,7 +5410,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
const reg_locks = self.register_manager.lockRegsAssumeUnused(3, .{ .rdi, .rsi, .rcx });
defer for (reg_locks) |lock| self.register_manager.unlockReg(lock);
const ptr_ty = self.air.typeOf(ty_op.operand);
const ptr_ty = self.typeOf(ty_op.operand);
const elem_size = elem_ty.abiSize(mod);
const elem_rc = regClassForType(elem_ty, mod);
@ -5548,7 +5551,7 @@ fn airStore(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
}
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr_mcv = try self.resolveInst(bin_op.lhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const src_mcv = try self.resolveInst(bin_op.rhs);
if (ptr_ty.ptrInfo().data.host_size > 0) {
try self.packedStore(ptr_ty, ptr_mcv, src_mcv);
@ -5573,8 +5576,8 @@ fn airStructFieldPtrIndex(self: *Self, inst: Air.Inst.Index, index: u8) !void {
fn fieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32) !MCValue {
const mod = self.bin_file.options.module.?;
const ptr_field_ty = self.air.typeOfIndex(inst);
const ptr_container_ty = self.air.typeOf(operand);
const ptr_field_ty = self.typeOfIndex(inst);
const ptr_container_ty = self.typeOf(operand);
const container_ty = ptr_container_ty.childType();
const field_offset = @intCast(i32, switch (container_ty.containerLayout()) {
.Auto, .Extern => container_ty.structFieldOffset(index, mod),
@ -5602,7 +5605,7 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
const operand = extra.struct_operand;
const index = extra.field_index;
const container_ty = self.air.typeOf(operand);
const container_ty = self.typeOf(operand);
const container_rc = regClassForType(container_ty, mod);
const field_ty = container_ty.structFieldType(index);
if (!field_ty.hasRuntimeBitsIgnoreComptime(mod)) break :result .none;
@ -5756,7 +5759,7 @@ fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const extra = self.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
const inst_ty = self.air.typeOfIndex(inst);
const inst_ty = self.typeOfIndex(inst);
const parent_ty = inst_ty.childType();
const field_offset = @intCast(i32, parent_ty.structFieldOffset(extra.field_index, mod));
@ -5772,7 +5775,7 @@ fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) !void {
fn genUnOp(self: *Self, maybe_inst: ?Air.Inst.Index, tag: Air.Inst.Tag, src_air: Air.Inst.Ref) !MCValue {
const mod = self.bin_file.options.module.?;
const src_ty = self.air.typeOf(src_air);
const src_ty = self.typeOf(src_air);
const src_mcv = try self.resolveInst(src_air);
if (src_ty.zigTypeTag(mod) == .Vector) {
return self.fail("TODO implement genUnOp for {}", .{src_ty.fmt(self.bin_file.options.module.?)});
@ -6358,8 +6361,8 @@ fn genBinOp(
rhs_air: Air.Inst.Ref,
) !MCValue {
const mod = self.bin_file.options.module.?;
const lhs_ty = self.air.typeOf(lhs_air);
const rhs_ty = self.air.typeOf(rhs_air);
const lhs_ty = self.typeOf(lhs_air);
const rhs_ty = self.typeOf(rhs_air);
const abi_size = @intCast(u32, lhs_ty.abiSize(mod));
const maybe_mask_reg = switch (air_tag) {
@ -7918,6 +7921,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
}
fn airArg(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
// skip zero-bit arguments as they don't have a corresponding arg instruction
var arg_index = self.arg_index;
while (self.args[arg_index] == .none) arg_index += 1;
@ -7931,9 +7935,9 @@ fn airArg(self: *Self, inst: Air.Inst.Index) !void {
else => return self.fail("TODO implement arg for {}", .{dst_mcv}),
}
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const src_index = self.air.instructions.items(.data)[inst].arg.src_index;
const name = self.owner.mod_fn.getParamName(self.bin_file.options.module.?, src_index);
const name = self.owner.mod_fn.getParamName(mod, src_index);
try self.genArgDbgInfo(ty, name, dst_mcv);
break :result dst_mcv;
@ -8050,7 +8054,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const callee = pl_op.operand;
const extra = self.air.extraData(Air.Call, pl_op.payload);
const args = @ptrCast([]const Air.Inst.Ref, self.air.extra[extra.end..][0..extra.data.args_len]);
const ty = self.air.typeOf(callee);
const ty = self.typeOf(callee);
const fn_ty = switch (ty.zigTypeTag(mod)) {
.Fn => ty,
@ -8085,7 +8089,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
else => unreachable,
}
for (args, info.args) |arg, mc_arg| {
const arg_ty = self.air.typeOf(arg);
const arg_ty = self.typeOf(arg);
const arg_mcv = try self.resolveInst(arg);
switch (mc_arg) {
.none => {},
@ -8112,7 +8116,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
defer if (ret_lock) |lock| self.register_manager.unlockReg(lock);
for (args, info.args) |arg, mc_arg| {
const arg_ty = self.air.typeOf(arg);
const arg_ty = self.typeOf(arg);
const arg_mcv = try self.resolveInst(arg);
switch (mc_arg) {
.none, .load_frame => {},
@ -8241,7 +8245,7 @@ fn airRet(self: *Self, inst: Air.Inst.Index) !void {
fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
switch (self.ret_mcv.short) {
.none => {},
.register => try self.load(self.ret_mcv.short, ptr_ty, ptr),
@ -8258,7 +8262,7 @@ fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
fn airCmp(self: *Self, inst: Air.Inst.Index, op: math.CompareOperator) !void {
const mod = self.bin_file.options.module.?;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ty = self.air.typeOf(bin_op.lhs);
const ty = self.typeOf(bin_op.lhs);
try self.spillEflagsIfOccupied();
self.eflags_inst = inst;
@ -8476,7 +8480,7 @@ fn airCmpLtErrorsLen(self: *Self, inst: Air.Inst.Index) !void {
try self.spillEflagsIfOccupied();
self.eflags_inst = inst;
const op_ty = self.air.typeOf(un_op);
const op_ty = self.typeOf(un_op);
const op_abi_size = @intCast(u32, op_ty.abiSize(mod));
const op_mcv = try self.resolveInst(un_op);
const dst_reg = switch (op_mcv) {
@ -8496,7 +8500,7 @@ fn airTry(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const extra = self.air.extraData(Air.Try, pl_op.payload);
const body = self.air.extra[extra.end..][0..extra.data.body_len];
const err_union_ty = self.air.typeOf(pl_op.operand);
const err_union_ty = self.typeOf(pl_op.operand);
const result = try self.genTry(inst, pl_op.operand, body, err_union_ty, false);
return self.finishAir(inst, result, .{ .none, .none, .none });
}
@ -8505,7 +8509,7 @@ fn airTryPtr(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const extra = self.air.extraData(Air.TryPtr, ty_pl.payload);
const body = self.air.extra[extra.end..][0..extra.data.body_len];
const err_union_ty = self.air.typeOf(extra.data.ptr).childType();
const err_union_ty = self.typeOf(extra.data.ptr).childType();
const result = try self.genTry(inst, extra.data.ptr, body, err_union_ty, true);
return self.finishAir(inst, result, .{ .none, .none, .none });
}
@ -8584,7 +8588,7 @@ fn airDbgBlock(self: *Self, inst: Air.Inst.Index) !void {
fn airDbgVar(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const operand = pl_op.operand;
const ty = self.air.typeOf(operand);
const ty = self.typeOf(operand);
const mcv = try self.resolveInst(operand);
const name = self.air.nullTerminatedString(pl_op.payload);
@ -8626,7 +8630,7 @@ fn genCondBrMir(self: *Self, ty: Type, mcv: MCValue) !u32 {
fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const cond = try self.resolveInst(pl_op.operand);
const cond_ty = self.air.typeOf(pl_op.operand);
const cond_ty = self.typeOf(pl_op.operand);
const extra = self.air.extraData(Air.CondBr, pl_op.payload);
const then_body = self.air.extra[extra.end..][0..extra.data.then_body_len];
const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len];
@ -8871,7 +8875,7 @@ fn isNonErr(self: *Self, inst: Air.Inst.Index, ty: Type, operand: MCValue) !MCVa
fn airIsNull(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const result = try self.isNull(inst, ty, operand);
return self.finishAir(inst, result, .{ un_op, .none, .none });
}
@ -8879,7 +8883,7 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index) !void {
fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const result = try self.isNullPtr(inst, ty, operand);
return self.finishAir(inst, result, .{ un_op, .none, .none });
}
@ -8887,7 +8891,7 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void {
fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const result = switch (try self.isNull(inst, ty, operand)) {
.eflags => |cc| .{ .eflags = cc.negate() },
else => unreachable,
@ -8898,7 +8902,7 @@ fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void {
fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const result = switch (try self.isNullPtr(inst, ty, operand)) {
.eflags => |cc| .{ .eflags = cc.negate() },
else => unreachable,
@ -8909,7 +8913,7 @@ fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void {
fn airIsErr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const result = try self.isErr(inst, ty, operand);
return self.finishAir(inst, result, .{ un_op, .none, .none });
}
@ -8932,7 +8936,7 @@ fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(inst, true);
}
};
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
try self.load(operand, ptr_ty, operand_ptr);
const result = try self.isErr(inst, ptr_ty.childType(), operand);
@ -8943,7 +8947,7 @@ fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
fn airIsNonErr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op);
const ty = self.air.typeOf(un_op);
const ty = self.typeOf(un_op);
const result = try self.isNonErr(inst, ty, operand);
return self.finishAir(inst, result, .{ un_op, .none, .none });
}
@ -8966,7 +8970,7 @@ fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
break :blk try self.allocRegOrMem(inst, true);
}
};
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
try self.load(operand, ptr_ty, operand_ptr);
const result = try self.isNonErr(inst, ptr_ty.childType(), operand);
@ -9032,7 +9036,7 @@ fn airBlock(self: *Self, inst: Air.Inst.Index) !void {
fn airSwitchBr(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const condition = try self.resolveInst(pl_op.operand);
const condition_ty = self.air.typeOf(pl_op.operand);
const condition_ty = self.typeOf(pl_op.operand);
const switch_br = self.air.extraData(Air.SwitchBr, pl_op.payload);
var extra_index: usize = switch_br.end;
var case_i: u32 = 0;
@ -9119,7 +9123,7 @@ fn airBr(self: *Self, inst: Air.Inst.Index) !void {
const br = self.air.instructions.items(.data)[inst].br;
const src_mcv = try self.resolveInst(br.operand);
const block_ty = self.air.typeOfIndex(br.block_inst);
const block_ty = self.typeOfIndex(br.block_inst);
const block_unused =
!block_ty.hasRuntimeBitsIgnoreComptime(mod) or self.liveness.isUnused(br.block_inst);
const block_tracking = self.inst_tracking.getPtr(br.block_inst).?;
@ -9244,7 +9248,7 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
const arg_mcv = try self.resolveInst(input);
try self.register_manager.getReg(reg, null);
try self.genSetReg(reg, self.air.typeOf(input), arg_mcv);
try self.genSetReg(reg, self.typeOf(input), arg_mcv);
}
{
@ -10169,7 +10173,7 @@ fn airPtrToInt(self: *Self, inst: Air.Inst.Index) !void {
if (self.reuseOperand(inst, un_op, 0, src_mcv)) break :result src_mcv;
const dst_mcv = try self.allocRegOrMem(inst, true);
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
try self.genCopy(dst_ty, dst_mcv, src_mcv);
break :result dst_mcv;
};
@ -10179,8 +10183,8 @@ fn airPtrToInt(self: *Self, inst: Air.Inst.Index) !void {
fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const dst_ty = self.air.typeOfIndex(inst);
const src_ty = self.air.typeOf(ty_op.operand);
const dst_ty = self.typeOfIndex(inst);
const src_ty = self.typeOf(ty_op.operand);
const result = result: {
const dst_rc = regClassForType(dst_ty, mod);
@ -10241,8 +10245,8 @@ fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const slice_ty = self.air.typeOfIndex(inst);
const ptr_ty = self.air.typeOf(ty_op.operand);
const slice_ty = self.typeOfIndex(inst);
const ptr_ty = self.typeOf(ty_op.operand);
const ptr = try self.resolveInst(ty_op.operand);
const array_ty = ptr_ty.childType();
const array_len = array_ty.arrayLen();
@ -10264,11 +10268,11 @@ fn airIntToFloat(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const src_ty = self.air.typeOf(ty_op.operand);
const src_ty = self.typeOf(ty_op.operand);
const src_bits = @intCast(u32, src_ty.bitSize(mod));
const src_signedness =
if (src_ty.isAbiInt(mod)) src_ty.intInfo(mod).signedness else .unsigned;
const dst_ty = self.air.typeOfIndex(inst);
const dst_ty = self.typeOfIndex(inst);
const src_size = math.divCeil(u32, @max(switch (src_signedness) {
.signed => src_bits,
@ -10318,8 +10322,8 @@ fn airFloatToInt(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const src_ty = self.air.typeOf(ty_op.operand);
const dst_ty = self.air.typeOfIndex(inst);
const src_ty = self.typeOf(ty_op.operand);
const dst_ty = self.typeOfIndex(inst);
const dst_bits = @intCast(u32, dst_ty.bitSize(mod));
const dst_signedness =
if (dst_ty.isAbiInt(mod)) dst_ty.intInfo(mod).signedness else .unsigned;
@ -10371,8 +10375,8 @@ fn airCmpxchg(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const extra = self.air.extraData(Air.Cmpxchg, ty_pl.payload).data;
const ptr_ty = self.air.typeOf(extra.ptr);
const val_ty = self.air.typeOf(extra.expected_value);
const ptr_ty = self.typeOf(extra.ptr);
const val_ty = self.typeOf(extra.expected_value);
const val_abi_size = @intCast(u32, val_ty.abiSize(mod));
try self.spillRegisters(&.{ .rax, .rdx, .rbx, .rcx });
@ -10712,10 +10716,10 @@ fn airAtomicRmw(self: *Self, inst: Air.Inst.Index) !void {
const unused = self.liveness.isUnused(inst);
const ptr_ty = self.air.typeOf(pl_op.operand);
const ptr_ty = self.typeOf(pl_op.operand);
const ptr_mcv = try self.resolveInst(pl_op.operand);
const val_ty = self.air.typeOf(extra.operand);
const val_ty = self.typeOf(extra.operand);
const val_mcv = try self.resolveInst(extra.operand);
const result =
@ -10726,7 +10730,7 @@ fn airAtomicRmw(self: *Self, inst: Air.Inst.Index) !void {
fn airAtomicLoad(self: *Self, inst: Air.Inst.Index) !void {
const atomic_load = self.air.instructions.items(.data)[inst].atomic_load;
const ptr_ty = self.air.typeOf(atomic_load.ptr);
const ptr_ty = self.typeOf(atomic_load.ptr);
const ptr_mcv = try self.resolveInst(atomic_load.ptr);
const ptr_lock = switch (ptr_mcv) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
@ -10747,10 +10751,10 @@ fn airAtomicLoad(self: *Self, inst: Air.Inst.Index) !void {
fn airAtomicStore(self: *Self, inst: Air.Inst.Index, order: std.builtin.AtomicOrder) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const ptr_mcv = try self.resolveInst(bin_op.lhs);
const val_ty = self.air.typeOf(bin_op.rhs);
const val_ty = self.typeOf(bin_op.rhs);
const val_mcv = try self.resolveInst(bin_op.rhs);
const result = try self.atomicOp(ptr_mcv, val_mcv, ptr_ty, val_ty, true, null, order);
@ -10768,7 +10772,7 @@ fn airMemset(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const dst_ptr = try self.resolveInst(bin_op.lhs);
const dst_ptr_ty = self.air.typeOf(bin_op.lhs);
const dst_ptr_ty = self.typeOf(bin_op.lhs);
const dst_ptr_lock: ?RegisterLock = switch (dst_ptr) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
else => null,
@ -10776,7 +10780,7 @@ fn airMemset(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
defer if (dst_ptr_lock) |lock| self.register_manager.unlockReg(lock);
const src_val = try self.resolveInst(bin_op.rhs);
const elem_ty = self.air.typeOf(bin_op.rhs);
const elem_ty = self.typeOf(bin_op.rhs);
const src_val_lock: ?RegisterLock = switch (src_val) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
else => null,
@ -10888,7 +10892,7 @@ fn airMemcpy(self: *Self, inst: Air.Inst.Index) !void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const dst_ptr = try self.resolveInst(bin_op.lhs);
const dst_ptr_ty = self.air.typeOf(bin_op.lhs);
const dst_ptr_ty = self.typeOf(bin_op.lhs);
const dst_ptr_lock: ?RegisterLock = switch (dst_ptr) {
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
else => null,
@ -10922,8 +10926,8 @@ fn airMemcpy(self: *Self, inst: Air.Inst.Index) !void {
fn airTagName(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const un_op = self.air.instructions.items(.data)[inst].un_op;
const inst_ty = self.air.typeOfIndex(inst);
const enum_ty = self.air.typeOf(un_op);
const inst_ty = self.typeOfIndex(inst);
const enum_ty = self.typeOf(un_op);
// We need a properly aligned and sized call frame to be able to call this function.
{
@ -10964,7 +10968,7 @@ fn airErrorName(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const un_op = self.air.instructions.items(.data)[inst].un_op;
const err_ty = self.air.typeOf(un_op);
const err_ty = self.typeOf(un_op);
const err_mcv = try self.resolveInst(un_op);
const err_reg = try self.copyToTmpRegister(err_ty, err_mcv);
const err_lock = self.register_manager.lockRegAssumeUnused(err_reg);
@ -11046,7 +11050,7 @@ fn airErrorName(self: *Self, inst: Air.Inst.Index) !void {
fn airSplat(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const vector_ty = self.air.typeOfIndex(inst);
const vector_ty = self.typeOfIndex(inst);
const dst_rc = regClassForType(vector_ty, mod);
const scalar_ty = vector_ty.scalarType(mod);
@ -11266,7 +11270,7 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const result_ty = self.air.typeOfIndex(inst);
const result_ty = self.typeOfIndex(inst);
const len = @intCast(usize, result_ty.arrayLen());
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
@ -11411,10 +11415,10 @@ fn airUnionInit(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const extra = self.air.extraData(Air.UnionInit, ty_pl.payload).data;
const result: MCValue = result: {
const union_ty = self.air.typeOfIndex(inst);
const union_ty = self.typeOfIndex(inst);
const layout = union_ty.unionGetLayout(mod);
const src_ty = self.air.typeOf(extra.init);
const src_ty = self.typeOf(extra.init);
const src_mcv = try self.resolveInst(extra.init);
if (layout.tag_size == 0) {
if (self.reuseOperand(inst, extra.init, 0, src_mcv)) break :result src_mcv;
@ -11461,7 +11465,7 @@ fn airMulAdd(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const extra = self.air.extraData(Air.Bin, pl_op.payload).data;
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
if (!self.hasFeature(.fma)) return self.fail("TODO implement airMulAdd for {}", .{
ty.fmt(self.bin_file.options.module.?),
@ -11609,7 +11613,7 @@ fn airMulAdd(self: *Self, inst: Air.Inst.Index) !void {
fn resolveInst(self: *Self, ref: Air.Inst.Ref) InnerError!MCValue {
const mod = self.bin_file.options.module.?;
const ty = self.air.typeOf(ref);
const ty = self.typeOf(ref);
// If the type has no codegen bits, no need to store it.
if (!ty.hasRuntimeBitsIgnoreComptime(mod)) return .none;
@ -11713,7 +11717,7 @@ fn resolveCallingConventionValues(
defer self.gpa.free(param_types);
fn_ty.fnParamTypes(param_types);
// TODO: promote var arg types
for (param_types[param_len..], var_args) |*param_ty, arg| param_ty.* = self.air.typeOf(arg);
for (param_types[param_len..], var_args) |*param_ty, arg| param_ty.* = self.typeOf(arg);
var result: CallMCValues = .{
.args = try self.gpa.alloc(MCValue, param_types.len),
// These undefined values must be populated before returning from this function.
@ -12023,3 +12027,13 @@ fn hasAnyFeatures(self: *Self, features: anytype) bool {
fn hasAllFeatures(self: *Self, features: anytype) bool {
return Target.x86.featureSetHasAll(self.target.cpu.features, features);
}
fn typeOf(self: *Self, inst: Air.Inst.Ref) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOf(inst, mod.intern_pool);
}
fn typeOfIndex(self: *Self, inst: Air.Inst.Index) Type {
const mod = self.bin_file.options.module.?;
return self.air.typeOfIndex(inst, mod.intern_pool);
}

View File

@ -288,7 +288,7 @@ pub const Function = struct {
const mod = f.object.dg.module;
const val = f.air.value(ref, mod).?;
const ty = f.air.typeOf(ref);
const ty = f.typeOf(ref);
const result: CValue = if (lowersToArray(ty, mod)) result: {
const writer = f.object.code_header.writer();
@ -355,7 +355,7 @@ pub const Function = struct {
switch (c_value) {
.constant => |inst| {
const mod = f.object.dg.module;
const ty = f.air.typeOf(inst);
const ty = f.typeOf(inst);
const val = f.air.value(inst, mod).?;
return f.object.dg.renderValue(w, ty, val, location);
},
@ -368,7 +368,7 @@ pub const Function = struct {
switch (c_value) {
.constant => |inst| {
const mod = f.object.dg.module;
const ty = f.air.typeOf(inst);
const ty = f.typeOf(inst);
const val = f.air.value(inst, mod).?;
try w.writeAll("(*");
try f.object.dg.renderValue(w, ty, val, .Other);
@ -382,7 +382,7 @@ pub const Function = struct {
switch (c_value) {
.constant => |inst| {
const mod = f.object.dg.module;
const ty = f.air.typeOf(inst);
const ty = f.typeOf(inst);
const val = f.air.value(inst, mod).?;
try f.object.dg.renderValue(w, ty, val, .Other);
try w.writeByte('.');
@ -396,7 +396,7 @@ pub const Function = struct {
switch (c_value) {
.constant => |inst| {
const mod = f.object.dg.module;
const ty = f.air.typeOf(inst);
const ty = f.typeOf(inst);
const val = f.air.value(inst, mod).?;
try w.writeByte('(');
try f.object.dg.renderValue(w, ty, val, .Other);
@ -486,6 +486,16 @@ pub const Function = struct {
f.object.dg.ctypes.deinit(gpa);
f.object.dg.fwd_decl.deinit();
}
fn typeOf(f: *Function, inst: Air.Inst.Ref) Type {
const mod = f.object.dg.module;
return f.air.typeOf(inst, mod.intern_pool);
}
fn typeOfIndex(f: *Function, inst: Air.Inst.Index) Type {
const mod = f.object.dg.module;
return f.air.typeOfIndex(inst, mod.intern_pool);
}
};
/// This data is available when outputting .c code for a `Module`.
@ -2802,17 +2812,19 @@ fn genBodyResolveState(f: *Function, inst: Air.Inst.Index, leading_deaths: []con
fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutOfMemory }!void {
const mod = f.object.dg.module;
const ip = &mod.intern_pool;
const air_tags = f.air.instructions.items(.tag);
for (body) |inst| {
if (f.liveness.isUnused(inst) and !f.air.mustLower(inst)) {
if (f.liveness.isUnused(inst) and !f.air.mustLower(inst, ip.*))
continue;
}
const result_value = switch (air_tags[inst]) {
// zig fmt: off
.constant => unreachable, // excluded from function bodies
.const_ty => unreachable, // excluded from function bodies
.interned => unreachable, // excluded from function bodies
.arg => try airArg(f, inst),
.trap => try airTrap(f.object.writer()),
@ -2837,7 +2849,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
.div_trunc, .div_exact => try airBinOp(f, inst, "/", "div_trunc", .none),
.rem => blk: {
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const lhs_scalar_ty = f.air.typeOf(bin_op.lhs).scalarType(mod);
const lhs_scalar_ty = f.typeOf(bin_op.lhs).scalarType(mod);
// For binary operations @TypeOf(lhs)==@TypeOf(rhs),
// so we only check one.
break :blk if (lhs_scalar_ty.isInt(mod))
@ -3088,7 +3100,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
fn airSliceField(f: *Function, inst: Air.Inst.Index, is_ptr: bool, field_name: []const u8) !CValue {
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
@ -3107,7 +3119,7 @@ fn airSliceField(f: *Function, inst: Air.Inst.Index, is_ptr: bool, field_name: [
fn airPtrElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
if (!inst_ty.hasRuntimeBitsIgnoreComptime(mod)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
@ -3136,8 +3148,8 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data;
const inst_ty = f.air.typeOfIndex(inst);
const ptr_ty = f.air.typeOf(bin_op.lhs);
const inst_ty = f.typeOfIndex(inst);
const ptr_ty = f.typeOf(bin_op.lhs);
const elem_ty = ptr_ty.childType();
const elem_has_bits = elem_ty.hasRuntimeBitsIgnoreComptime(mod);
@ -3169,7 +3181,7 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
fn airSliceElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
if (!inst_ty.hasRuntimeBitsIgnoreComptime(mod)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
@ -3198,8 +3210,8 @@ fn airSliceElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data;
const inst_ty = f.air.typeOfIndex(inst);
const slice_ty = f.air.typeOf(bin_op.lhs);
const inst_ty = f.typeOfIndex(inst);
const slice_ty = f.typeOf(bin_op.lhs);
const elem_ty = slice_ty.elemType2(mod);
const elem_has_bits = elem_ty.hasRuntimeBitsIgnoreComptime(mod);
@ -3226,7 +3238,7 @@ fn airSliceElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
if (!inst_ty.hasRuntimeBitsIgnoreComptime(mod)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
return .none;
@ -3251,7 +3263,7 @@ fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const elem_type = inst_ty.elemType();
if (!elem_type.isFnOrHasRuntimeBitsIgnoreComptime(mod)) return .{ .undef = inst_ty };
@ -3267,7 +3279,7 @@ fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const elem_ty = inst_ty.elemType();
if (!elem_ty.isFnOrHasRuntimeBitsIgnoreComptime(mod)) return .{ .undef = inst_ty };
@ -3282,7 +3294,7 @@ fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
}
fn airArg(f: *Function, inst: Air.Inst.Index) !CValue {
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_cty = try f.typeToIndex(inst_ty, .parameter);
const i = f.next_arg_index;
@ -3309,7 +3321,7 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const ptr_ty = f.air.typeOf(ty_op.operand);
const ptr_ty = f.typeOf(ty_op.operand);
const ptr_scalar_ty = ptr_ty.scalarType(mod);
const ptr_info = ptr_scalar_ty.ptrInfo().data;
const src_ty = ptr_info.pointee_type;
@ -3399,7 +3411,7 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
const writer = f.object.writer();
const op_inst = Air.refToIndex(un_op);
const op_ty = f.air.typeOf(un_op);
const op_ty = f.typeOf(un_op);
const ret_ty = if (is_ptr) op_ty.childType() else op_ty;
var lowered_ret_buf: LowerFnRetTyBuffer = undefined;
const lowered_ret_ty = lowerFnRetTy(ret_ty, &lowered_ret_buf, mod);
@ -3453,9 +3465,9 @@ fn airIntCast(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
const scalar_ty = operand_ty.scalarType(mod);
const writer = f.object.writer();
@ -3478,13 +3490,13 @@ fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const dest_int_info = inst_scalar_ty.intInfo(mod);
const dest_bits = dest_int_info.bits;
const dest_c_bits = toCIntBits(dest_int_info.bits) orelse
return f.fail("TODO: C backend: implement integer types larger than 128 bits", .{});
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
const scalar_ty = operand_ty.scalarType(mod);
const scalar_int_info = scalar_ty.intInfo(mod);
@ -3572,7 +3584,7 @@ fn airBoolToInt(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const local = try f.allocLocal(inst, inst_ty);
const a = try Assignment.start(f, writer, inst_ty);
try f.writeCValue(writer, local, .Other);
@ -3587,12 +3599,12 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
// *a = b;
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = f.air.typeOf(bin_op.lhs);
const ptr_ty = f.typeOf(bin_op.lhs);
const ptr_scalar_ty = ptr_ty.scalarType(mod);
const ptr_info = ptr_scalar_ty.ptrInfo().data;
const ptr_val = try f.resolveInst(bin_op.lhs);
const src_ty = f.air.typeOf(bin_op.rhs);
const src_ty = f.typeOf(bin_op.rhs);
const val_is_undef = if (f.air.value(bin_op.rhs, mod)) |v| v.isUndefDeep() else false;
@ -3737,8 +3749,8 @@ fn airOverflow(f: *Function, inst: Air.Inst.Index, operation: []const u8, info:
const rhs = try f.resolveInst(bin_op.rhs);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const inst_ty = f.air.typeOfIndex(inst);
const operand_ty = f.air.typeOf(bin_op.lhs);
const inst_ty = f.typeOfIndex(inst);
const operand_ty = f.typeOf(bin_op.lhs);
const scalar_ty = operand_ty.scalarType(mod);
const w = f.object.writer();
@ -3769,14 +3781,14 @@ fn airOverflow(f: *Function, inst: Air.Inst.Index, operation: []const u8, info:
fn airNot(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
const scalar_ty = operand_ty.scalarType(mod);
if (scalar_ty.tag() != .bool) return try airUnBuiltinCall(f, inst, "not", .bits);
const op = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
@ -3802,7 +3814,7 @@ fn airBinOp(
) !CValue {
const mod = f.object.dg.module;
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const operand_ty = f.air.typeOf(bin_op.lhs);
const operand_ty = f.typeOf(bin_op.lhs);
const scalar_ty = operand_ty.scalarType(mod);
if ((scalar_ty.isInt(mod) and scalar_ty.bitSize(mod) > 64) or scalar_ty.isRuntimeFloat())
return try airBinBuiltinCall(f, inst, operation, info);
@ -3811,7 +3823,7 @@ fn airBinOp(
const rhs = try f.resolveInst(bin_op.rhs);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
@ -3839,7 +3851,7 @@ fn airCmpOp(
operator: std.math.CompareOperator,
) !CValue {
const mod = f.object.dg.module;
const lhs_ty = f.air.typeOf(data.lhs);
const lhs_ty = f.typeOf(data.lhs);
const scalar_ty = lhs_ty.scalarType(mod);
const scalar_bits = scalar_ty.bitSize(mod);
@ -3855,12 +3867,12 @@ fn airCmpOp(
if (scalar_ty.isRuntimeFloat())
return airCmpBuiltinCall(f, inst, data, operator, .operator, .none);
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const lhs = try f.resolveInst(data.lhs);
const rhs = try f.resolveInst(data.rhs);
try reap(f, inst, &.{ data.lhs, data.rhs });
const rhs_ty = f.air.typeOf(data.rhs);
const rhs_ty = f.typeOf(data.rhs);
const need_cast = lhs_ty.isSinglePointer() or rhs_ty.isSinglePointer();
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
@ -3891,7 +3903,7 @@ fn airEquality(
const mod = f.object.dg.module;
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const operand_ty = f.air.typeOf(bin_op.lhs);
const operand_ty = f.typeOf(bin_op.lhs);
const operand_bits = operand_ty.bitSize(mod);
if (operand_ty.isInt(mod) and operand_bits > 64)
return airCmpBuiltinCall(
@ -3910,7 +3922,7 @@ fn airEquality(
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const local = try f.allocLocal(inst, inst_ty);
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = ");
@ -3954,7 +3966,7 @@ fn airEquality(
fn airCmpLtErrorsLen(f: *Function, inst: Air.Inst.Index) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
@ -3976,7 +3988,7 @@ fn airPtrAddSub(f: *Function, inst: Air.Inst.Index, operator: u8) !CValue {
const rhs = try f.resolveInst(bin_op.rhs);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const elem_ty = inst_scalar_ty.elemType2(mod);
@ -4019,7 +4031,7 @@ fn airMinMax(f: *Function, inst: Air.Inst.Index, operator: u8, operation: []cons
const mod = f.object.dg.module;
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
if (inst_scalar_ty.isInt(mod) and inst_scalar_ty.bitSize(mod) > 64)
@ -4065,7 +4077,7 @@ fn airSlice(f: *Function, inst: Air.Inst.Index) !CValue {
const len = try f.resolveInst(bin_op.rhs);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const ptr_ty = inst_ty.slicePtrFieldType(&buf);
@ -4110,7 +4122,7 @@ fn airCall(
const resolved_args = try gpa.alloc(CValue, args.len);
defer gpa.free(resolved_args);
for (resolved_args, args) |*resolved_arg, arg| {
const arg_ty = f.air.typeOf(arg);
const arg_ty = f.typeOf(arg);
const arg_cty = try f.typeToIndex(arg_ty, .parameter);
if (f.indexToCType(arg_cty).tag() == .void) {
resolved_arg.* = .none;
@ -4141,7 +4153,7 @@ fn airCall(
for (args) |arg| try bt.feed(arg);
}
const callee_ty = f.air.typeOf(pl_op.operand);
const callee_ty = f.typeOf(pl_op.operand);
const fn_ty = switch (callee_ty.zigTypeTag(mod)) {
.Fn => callee_ty,
.Pointer => callee_ty.childType(),
@ -4279,7 +4291,7 @@ fn airBlock(f: *Function, inst: Air.Inst.Index) !CValue {
f.next_block_index += 1;
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const result = if (inst_ty.tag() != .void and !f.liveness.isUnused(inst))
try f.allocLocal(inst, inst_ty)
else
@ -4302,7 +4314,7 @@ fn airBlock(f: *Function, inst: Air.Inst.Index) !CValue {
try f.object.indent_writer.insertNewline();
// noreturn blocks have no `br` instructions reaching them, so we don't want a label
if (!f.air.typeOfIndex(inst).isNoReturn()) {
if (!f.typeOfIndex(inst).isNoReturn()) {
// label must be followed by an expression, include an empty one.
try writer.print("zig_block_{d}:;\n", .{block_id});
}
@ -4314,7 +4326,7 @@ fn airTry(f: *Function, inst: Air.Inst.Index) !CValue {
const pl_op = f.air.instructions.items(.data)[inst].pl_op;
const extra = f.air.extraData(Air.Try, pl_op.payload);
const body = f.air.extra[extra.end..][0..extra.data.body_len];
const err_union_ty = f.air.typeOf(pl_op.operand);
const err_union_ty = f.typeOf(pl_op.operand);
return lowerTry(f, inst, pl_op.operand, body, err_union_ty, false);
}
@ -4322,7 +4334,7 @@ fn airTryPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const extra = f.air.extraData(Air.TryPtr, ty_pl.payload);
const body = f.air.extra[extra.end..][0..extra.data.body_len];
const err_union_ty = f.air.typeOf(extra.data.ptr).childType();
const err_union_ty = f.typeOf(extra.data.ptr).childType();
return lowerTry(f, inst, extra.data.ptr, body, err_union_ty, true);
}
@ -4336,7 +4348,7 @@ fn lowerTry(
) !CValue {
const mod = f.object.dg.module;
const err_union = try f.resolveInst(operand);
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const liveness_condbr = f.liveness.getCondBr(inst);
const writer = f.object.writer();
const payload_ty = err_union_ty.errorUnionPayload();
@ -4404,7 +4416,7 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !CValue {
// If result is .none then the value of the block is unused.
if (result != .none) {
const operand_ty = f.air.typeOf(branch.operand);
const operand_ty = f.typeOf(branch.operand);
const operand = try f.resolveInst(branch.operand);
try reap(f, inst, &.{branch.operand});
@ -4421,10 +4433,10 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !CValue {
fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const dest_ty = f.air.typeOfIndex(inst);
const dest_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(ty_op.operand);
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
const bitcasted = try bitcast(f, dest_ty, operand, operand_ty);
try reap(f, inst, &.{ty_op.operand});
@ -4684,7 +4696,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
const pl_op = f.air.instructions.items(.data)[inst].pl_op;
const condition = try f.resolveInst(pl_op.operand);
try reap(f, inst, &.{pl_op.operand});
const condition_ty = f.air.typeOf(pl_op.operand);
const condition_ty = f.typeOf(pl_op.operand);
const switch_br = f.air.extraData(Air.SwitchBr, pl_op.payload);
const writer = f.object.writer();
@ -4784,7 +4796,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
const result = result: {
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const local = if (inst_ty.hasRuntimeBitsIgnoreComptime(mod)) local: {
const local = try f.allocLocal(inst, inst_ty);
if (f.wantSafety()) {
@ -4814,7 +4826,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
const is_reg = constraint[1] == '{';
if (is_reg) {
const output_ty = if (output == .none) inst_ty else f.air.typeOf(output).childType();
const output_ty = if (output == .none) inst_ty else f.typeOf(output).childType();
try writer.writeAll("register ");
const alignment = 0;
const local_value = try f.allocLocalValue(output_ty, alignment);
@ -4847,7 +4859,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
const is_reg = constraint[0] == '{';
const input_val = try f.resolveInst(input);
if (asmInputNeedsLocal(constraint, input_val)) {
const input_ty = f.air.typeOf(input);
const input_ty = f.typeOf(input);
if (is_reg) try writer.writeAll("register ");
const alignment = 0;
const local_value = try f.allocLocalValue(input_ty, alignment);
@ -5048,7 +5060,7 @@ fn airIsNull(
try f.writeCValue(writer, operand, .Other);
}
const operand_ty = f.air.typeOf(un_op);
const operand_ty = f.typeOf(un_op);
const optional_ty = if (is_ptr) operand_ty.childType() else operand_ty;
var payload_buf: Type.Payload.ElemType = undefined;
const payload_ty = optional_ty.optionalChild(&payload_buf);
@ -5083,7 +5095,7 @@ fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const opt_ty = f.air.typeOf(ty_op.operand);
const opt_ty = f.typeOf(ty_op.operand);
var buf: Type.Payload.ElemType = undefined;
const payload_ty = opt_ty.optionalChild(&buf);
@ -5092,7 +5104,7 @@ fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue {
return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
@ -5119,9 +5131,9 @@ fn airOptionalPayloadPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const ptr_ty = f.air.typeOf(ty_op.operand);
const ptr_ty = f.typeOf(ty_op.operand);
const opt_ty = ptr_ty.childType();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
if (!inst_ty.childType().hasRuntimeBitsIgnoreComptime(mod)) {
return .{ .undef = inst_ty };
@ -5149,11 +5161,11 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
const opt_ty = operand_ty.elemType();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
if (opt_ty.optionalReprIsPayload(mod)) {
if (f.liveness.isUnused(inst)) {
@ -5249,7 +5261,7 @@ fn airStructFieldPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const container_ptr_val = try f.resolveInst(extra.struct_operand);
try reap(f, inst, &.{extra.struct_operand});
const container_ptr_ty = f.air.typeOf(extra.struct_operand);
const container_ptr_ty = f.typeOf(extra.struct_operand);
return fieldPtr(f, inst, container_ptr_ty, container_ptr_val, extra.field_index);
}
@ -5258,7 +5270,7 @@ fn airStructFieldPtrIndex(f: *Function, inst: Air.Inst.Index, index: u8) !CValue
const container_ptr_val = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const container_ptr_ty = f.air.typeOf(ty_op.operand);
const container_ptr_ty = f.typeOf(ty_op.operand);
return fieldPtr(f, inst, container_ptr_ty, container_ptr_val, index);
}
@ -5267,10 +5279,10 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const extra = f.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
const container_ptr_ty = f.air.typeOfIndex(inst);
const container_ptr_ty = f.typeOfIndex(inst);
const container_ty = container_ptr_ty.childType();
const field_ptr_ty = f.air.typeOf(extra.field_ptr);
const field_ptr_ty = f.typeOf(extra.field_ptr);
const field_ptr_val = try f.resolveInst(extra.field_ptr);
try reap(f, inst, &.{extra.field_ptr});
@ -5334,7 +5346,7 @@ fn fieldPtr(
) !CValue {
const mod = f.object.dg.module;
const container_ty = container_ptr_ty.elemType();
const field_ptr_ty = f.air.typeOfIndex(inst);
const field_ptr_ty = f.typeOfIndex(inst);
// Ensure complete type definition is visible before accessing fields.
_ = try f.typeToIndex(container_ty, .complete);
@ -5385,7 +5397,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const extra = f.air.extraData(Air.StructField, ty_pl.payload).data;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
if (!inst_ty.hasRuntimeBitsIgnoreComptime(mod)) {
try reap(f, inst, &.{extra.struct_operand});
return .none;
@ -5393,7 +5405,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
const struct_byval = try f.resolveInst(extra.struct_operand);
try reap(f, inst, &.{extra.struct_operand});
const struct_ty = f.air.typeOf(extra.struct_operand);
const struct_ty = f.typeOf(extra.struct_operand);
const writer = f.object.writer();
// Ensure complete type definition is visible before accessing fields.
@ -5514,9 +5526,9 @@ fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(ty_op.operand);
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const operand_is_ptr = operand_ty.zigTypeTag(mod) == .Pointer;
@ -5553,10 +5565,10 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValu
const mod = f.object.dg.module;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
const error_union_ty = if (is_ptr) operand_ty.childType() else operand_ty;
const writer = f.object.writer();
@ -5589,9 +5601,9 @@ fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const repr_is_payload = inst_ty.optionalReprIsPayload(mod);
const payload_ty = f.air.typeOf(ty_op.operand);
const payload_ty = f.typeOf(ty_op.operand);
const payload = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
@ -5621,7 +5633,7 @@ fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const payload_ty = inst_ty.errorUnionPayload();
const repr_is_err = !payload_ty.hasRuntimeBitsIgnoreComptime(mod);
const err_ty = inst_ty.errorUnionSet();
@ -5661,7 +5673,7 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const operand = try f.resolveInst(ty_op.operand);
const error_union_ty = f.air.typeOf(ty_op.operand).childType();
const error_union_ty = f.typeOf(ty_op.operand).childType();
const error_ty = error_union_ty.errorUnionSet();
const payload_ty = error_union_ty.errorUnionPayload();
@ -5684,7 +5696,7 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
// Then return the payload pointer (only if it is used)
if (f.liveness.isUnused(inst)) return .none;
const local = try f.allocLocal(inst, f.air.typeOfIndex(inst));
const local = try f.allocLocal(inst, f.typeOfIndex(inst));
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = &(");
try f.writeCValueDeref(writer, operand);
@ -5711,7 +5723,7 @@ fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const payload_ty = inst_ty.errorUnionPayload();
const payload = try f.resolveInst(ty_op.operand);
const repr_is_err = !payload_ty.hasRuntimeBitsIgnoreComptime(mod);
@ -5747,7 +5759,7 @@ fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const
const writer = f.object.writer();
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
const operand_ty = f.air.typeOf(un_op);
const operand_ty = f.typeOf(un_op);
const local = try f.allocLocal(inst, Type.bool);
const err_union_ty = if (is_ptr) operand_ty.childType() else operand_ty;
const payload_ty = err_union_ty.errorUnionPayload();
@ -5780,10 +5792,10 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
const array_ty = f.air.typeOf(ty_op.operand).childType();
const array_ty = f.typeOf(ty_op.operand).childType();
try f.writeCValueMember(writer, local, .{ .identifier = "ptr" });
try writer.writeAll(" = ");
@ -5812,10 +5824,10 @@ fn airFloatCast(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
const target = f.object.dg.module.getTarget();
const operation = if (inst_ty.isRuntimeFloat() and operand_ty.isRuntimeFloat())
if (inst_ty.floatBits(target) < operand_ty.floatBits(target)) "trunc" else "extend"
@ -5855,9 +5867,9 @@ fn airPtrToInt(f: *Function, inst: Air.Inst.Index) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
const operand = try f.resolveInst(un_op);
const operand_ty = f.air.typeOf(un_op);
const operand_ty = f.typeOf(un_op);
try reap(f, inst, &.{un_op});
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
try f.writeCValue(writer, local, .Other);
@ -5885,9 +5897,9 @@ fn airUnBuiltinCall(
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const operand_ty = f.air.typeOf(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
const scalar_ty = operand_ty.scalarType(mod);
const inst_scalar_cty = try f.typeToCType(inst_scalar_ty, .complete);
@ -5927,7 +5939,7 @@ fn airBinBuiltinCall(
const mod = f.object.dg.module;
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const operand_ty = f.air.typeOf(bin_op.lhs);
const operand_ty = f.typeOf(bin_op.lhs);
const operand_cty = try f.typeToCType(operand_ty, .complete);
const is_big = operand_cty.tag() == .array;
@ -5935,7 +5947,7 @@ fn airBinBuiltinCall(
const rhs = try f.resolveInst(bin_op.rhs);
if (!is_big) try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const scalar_ty = operand_ty.scalarType(mod);
@ -5984,9 +5996,9 @@ fn airCmpBuiltinCall(
const rhs = try f.resolveInst(data.rhs);
try reap(f, inst, &.{ data.lhs, data.rhs });
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const operand_ty = f.air.typeOf(data.lhs);
const operand_ty = f.typeOf(data.lhs);
const scalar_ty = operand_ty.scalarType(mod);
const inst_scalar_cty = try f.typeToCType(inst_scalar_ty, .complete);
@ -6032,11 +6044,11 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
const mod = f.object.dg.module;
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const extra = f.air.extraData(Air.Cmpxchg, ty_pl.payload).data;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const ptr = try f.resolveInst(extra.ptr);
const expected_value = try f.resolveInst(extra.expected_value);
const new_value = try f.resolveInst(extra.new_value);
const ptr_ty = f.air.typeOf(extra.ptr);
const ptr_ty = f.typeOf(extra.ptr);
const ty = ptr_ty.childType();
const writer = f.object.writer();
@ -6137,8 +6149,8 @@ fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const pl_op = f.air.instructions.items(.data)[inst].pl_op;
const extra = f.air.extraData(Air.AtomicRmw, pl_op.payload).data;
const inst_ty = f.air.typeOfIndex(inst);
const ptr_ty = f.air.typeOf(pl_op.operand);
const inst_ty = f.typeOfIndex(inst);
const ptr_ty = f.typeOf(pl_op.operand);
const ty = ptr_ty.childType();
const ptr = try f.resolveInst(pl_op.operand);
const operand = try f.resolveInst(extra.operand);
@ -6193,7 +6205,7 @@ fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue {
const atomic_load = f.air.instructions.items(.data)[inst].atomic_load;
const ptr = try f.resolveInst(atomic_load.ptr);
try reap(f, inst, &.{atomic_load.ptr});
const ptr_ty = f.air.typeOf(atomic_load.ptr);
const ptr_ty = f.typeOf(atomic_load.ptr);
const ty = ptr_ty.childType();
const repr_ty = if (ty.isRuntimeFloat())
@ -6201,7 +6213,7 @@ fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue {
else
ty;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
@ -6227,7 +6239,7 @@ fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue {
fn airAtomicStore(f: *Function, inst: Air.Inst.Index, order: [*:0]const u8) !CValue {
const mod = f.object.dg.module;
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = f.air.typeOf(bin_op.lhs);
const ptr_ty = f.typeOf(bin_op.lhs);
const ty = ptr_ty.childType();
const ptr = try f.resolveInst(bin_op.lhs);
const element = try f.resolveInst(bin_op.rhs);
@ -6270,10 +6282,10 @@ fn writeSliceOrPtr(f: *Function, writer: anytype, ptr: CValue, ptr_ty: Type) !vo
fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
const mod = f.object.dg.module;
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const dest_ty = f.air.typeOf(bin_op.lhs);
const dest_ty = f.typeOf(bin_op.lhs);
const dest_slice = try f.resolveInst(bin_op.lhs);
const value = try f.resolveInst(bin_op.rhs);
const elem_ty = f.air.typeOf(bin_op.rhs);
const elem_ty = f.typeOf(bin_op.rhs);
const elem_abi_size = elem_ty.abiSize(mod);
const val_is_undef = if (f.air.value(bin_op.rhs, mod)) |val| val.isUndefDeep() else false;
const writer = f.object.writer();
@ -6393,8 +6405,8 @@ fn airMemcpy(f: *Function, inst: Air.Inst.Index) !CValue {
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
const dest_ptr = try f.resolveInst(bin_op.lhs);
const src_ptr = try f.resolveInst(bin_op.rhs);
const dest_ty = f.air.typeOf(bin_op.lhs);
const src_ty = f.air.typeOf(bin_op.rhs);
const dest_ty = f.typeOf(bin_op.lhs);
const src_ty = f.typeOf(bin_op.rhs);
const writer = f.object.writer();
try writer.writeAll("memcpy(");
@ -6434,7 +6446,7 @@ fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
const new_tag = try f.resolveInst(bin_op.rhs);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const union_ty = f.air.typeOf(bin_op.lhs).childType();
const union_ty = f.typeOf(bin_op.lhs).childType();
const layout = union_ty.unionGetLayout(mod);
if (layout.tag_size == 0) return .none;
const tag_ty = union_ty.unionTagTypeSafety().?;
@ -6455,11 +6467,11 @@ fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const union_ty = f.air.typeOf(ty_op.operand);
const union_ty = f.typeOf(ty_op.operand);
const layout = union_ty.unionGetLayout(mod);
if (layout.tag_size == 0) return .none;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
const a = try Assignment.start(f, writer, inst_ty);
@ -6473,8 +6485,8 @@ fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
fn airTagName(f: *Function, inst: Air.Inst.Index) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
const inst_ty = f.air.typeOfIndex(inst);
const enum_ty = f.air.typeOf(un_op);
const inst_ty = f.typeOfIndex(inst);
const enum_ty = f.typeOf(un_op);
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
@ -6494,7 +6506,7 @@ fn airErrorName(f: *Function, inst: Air.Inst.Index) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
const local = try f.allocLocal(inst, inst_ty);
@ -6513,7 +6525,7 @@ fn airSplat(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const writer = f.object.writer();
@ -6539,7 +6551,7 @@ fn airSelect(f: *Function, inst: Air.Inst.Index) !CValue {
const rhs = try f.resolveInst(extra.rhs);
try reap(f, inst, &.{ pl_op.operand, extra.lhs, extra.rhs });
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
@ -6570,7 +6582,7 @@ fn airShuffle(f: *Function, inst: Air.Inst.Index) !CValue {
const lhs = try f.resolveInst(extra.a);
const rhs = try f.resolveInst(extra.b);
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
@ -6607,10 +6619,10 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue {
const reduce = f.air.instructions.items(.data)[inst].reduce;
const target = mod.getTarget();
const scalar_ty = f.air.typeOfIndex(inst);
const scalar_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(reduce.operand);
try reap(f, inst, &.{reduce.operand});
const operand_ty = f.air.typeOf(reduce.operand);
const operand_ty = f.typeOf(reduce.operand);
const writer = f.object.writer();
const use_operator = scalar_ty.bitSize(mod) <= 64;
@ -6762,7 +6774,7 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue {
fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const mod = f.object.dg.module;
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const len = @intCast(usize, inst_ty.arrayLen());
const elements = @ptrCast([]const Air.Inst.Ref, f.air.extra[ty_pl.payload..][0..len]);
const gpa = f.object.dg.gpa;
@ -6892,10 +6904,10 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const extra = f.air.extraData(Air.UnionInit, ty_pl.payload).data;
const union_ty = f.air.typeOfIndex(inst);
const union_ty = f.typeOfIndex(inst);
const union_obj = union_ty.cast(Type.Payload.Union).?.data;
const field_name = union_obj.fields.keys()[extra.field_index];
const payload_ty = f.air.typeOf(extra.init);
const payload_ty = f.typeOf(extra.init);
const payload = try f.resolveInst(extra.init);
try reap(f, inst, &.{extra.init});
@ -6965,7 +6977,7 @@ fn airWasmMemorySize(f: *Function, inst: Air.Inst.Index) !CValue {
const pl_op = f.air.instructions.items(.data)[inst].pl_op;
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const local = try f.allocLocal(inst, inst_ty);
try f.writeCValue(writer, local, .Other);
@ -6979,7 +6991,7 @@ fn airWasmMemoryGrow(f: *Function, inst: Air.Inst.Index) !CValue {
const pl_op = f.air.instructions.items(.data)[inst].pl_op;
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(pl_op.operand);
try reap(f, inst, &.{pl_op.operand});
const local = try f.allocLocal(inst, inst_ty);
@ -6999,7 +7011,7 @@ fn airFloatNeg(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
const operand_ty = f.air.typeOf(un_op);
const operand_ty = f.typeOf(un_op);
const scalar_ty = operand_ty.scalarType(mod);
const writer = f.object.writer();
@ -7025,7 +7037,7 @@ fn airUnFloatOp(f: *Function, inst: Air.Inst.Index, operation: []const u8) !CVal
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const writer = f.object.writer();
@ -7054,7 +7066,7 @@ fn airBinFloatOp(f: *Function, inst: Air.Inst.Index, operation: []const u8) !CVa
const rhs = try f.resolveInst(bin_op.rhs);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const writer = f.object.writer();
@ -7088,7 +7100,7 @@ fn airMulAdd(f: *Function, inst: Air.Inst.Index) !CValue {
const addend = try f.resolveInst(pl_op.operand);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs, pl_op.operand });
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(mod);
const writer = f.object.writer();
@ -7114,7 +7126,7 @@ fn airMulAdd(f: *Function, inst: Air.Inst.Index) !CValue {
}
fn airCVaStart(f: *Function, inst: Air.Inst.Index) !CValue {
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const fn_cty = try f.typeToCType(f.object.dg.decl.?.ty, .complete);
const param_len = fn_cty.castTag(.varargs_function).?.data.param_types.len;
@ -7133,7 +7145,7 @@ fn airCVaStart(f: *Function, inst: Air.Inst.Index) !CValue {
fn airCVaArg(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const va_list = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
@ -7164,7 +7176,7 @@ fn airCVaEnd(f: *Function, inst: Air.Inst.Index) !CValue {
fn airCVaCopy(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
const inst_ty = f.air.typeOfIndex(inst);
const inst_ty = f.typeOfIndex(inst);
const va_list = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});

File diff suppressed because it is too large Load Diff

View File

@ -233,7 +233,7 @@ pub const DeclGen = struct {
fn resolve(self: *DeclGen, inst: Air.Inst.Ref) !IdRef {
const mod = self.module;
if (self.air.value(inst, mod)) |val| {
const ty = self.air.typeOf(inst);
const ty = self.typeOf(inst);
if (ty.zigTypeTag(mod) == .Fn) {
const fn_decl_index = switch (val.tag()) {
.extern_fn => val.castTag(.extern_fn).?.data.owner_decl,
@ -1720,10 +1720,11 @@ pub const DeclGen = struct {
}
fn genInst(self: *DeclGen, inst: Air.Inst.Index) !void {
const mod = self.module;
const ip = &mod.intern_pool;
// TODO: remove now-redundant isUnused calls from AIR handler functions
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) {
if (self.liveness.isUnused(inst) and !self.air.mustLower(inst, ip.*))
return;
}
const air_tags = self.air.instructions.items(.tag);
const maybe_result_id: ?IdRef = switch (air_tags[inst]) {
@ -1847,7 +1848,7 @@ pub const DeclGen = struct {
const lhs_id = try self.resolve(bin_op.lhs);
const rhs_id = try self.resolve(bin_op.rhs);
const result_id = self.spv.allocId();
const result_type_id = try self.resolveTypeId(self.air.typeOfIndex(inst));
const result_type_id = try self.resolveTypeId(self.typeOfIndex(inst));
try self.func.body.emit(self.spv.gpa, opcode, .{
.id_result_type = result_type_id,
.id_result = result_id,
@ -1862,7 +1863,7 @@ pub const DeclGen = struct {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const lhs_id = try self.resolve(bin_op.lhs);
const rhs_id = try self.resolve(bin_op.rhs);
const result_type_id = try self.resolveTypeId(self.air.typeOfIndex(inst));
const result_type_id = try self.resolveTypeId(self.typeOfIndex(inst));
// the shift and the base must be the same type in SPIR-V, but in Zig the shift is a smaller int.
const shift_id = self.spv.allocId();
@ -1907,15 +1908,15 @@ pub const DeclGen = struct {
if (self.liveness.isUnused(inst)) return null;
// LHS and RHS are guaranteed to have the same type, and AIR guarantees
// the result to be the same as the LHS and RHS, which matches SPIR-V.
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
var lhs_id = try self.resolve(bin_op.lhs);
var rhs_id = try self.resolve(bin_op.rhs);
const result_ty_ref = try self.resolveType(ty, .direct);
assert(self.air.typeOf(bin_op.lhs).eql(ty, self.module));
assert(self.air.typeOf(bin_op.rhs).eql(ty, self.module));
assert(self.typeOf(bin_op.lhs).eql(ty, self.module));
assert(self.typeOf(bin_op.rhs).eql(ty, self.module));
// Binary operations are generally applicable to both scalar and vector operations
// in SPIR-V, but int and float versions of operations require different opcodes.
@ -1971,8 +1972,8 @@ pub const DeclGen = struct {
const lhs = try self.resolve(extra.lhs);
const rhs = try self.resolve(extra.rhs);
const operand_ty = self.air.typeOf(extra.lhs);
const result_ty = self.air.typeOfIndex(inst);
const operand_ty = self.typeOf(extra.lhs);
const result_ty = self.typeOfIndex(inst);
const info = try self.arithmeticTypeInfo(operand_ty);
switch (info.class) {
@ -2064,14 +2065,14 @@ pub const DeclGen = struct {
fn airShuffle(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
const mod = self.module;
if (self.liveness.isUnused(inst)) return null;
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const extra = self.air.extraData(Air.Shuffle, ty_pl.payload).data;
const a = try self.resolve(extra.a);
const b = try self.resolve(extra.b);
const mask = self.air.values[extra.mask];
const mask_len = extra.mask_len;
const a_len = self.air.typeOf(extra.a).vectorLen();
const a_len = self.typeOf(extra.a).vectorLen();
const result_id = self.spv.allocId();
const result_type_id = try self.resolveTypeId(ty);
@ -2162,8 +2163,8 @@ pub const DeclGen = struct {
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const ptr_id = try self.resolve(bin_op.lhs);
const offset_id = try self.resolve(bin_op.rhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const result_ty = self.air.typeOfIndex(inst);
const ptr_ty = self.typeOf(bin_op.lhs);
const result_ty = self.typeOfIndex(inst);
return try self.ptrAdd(result_ty, ptr_ty, ptr_id, offset_id);
}
@ -2173,11 +2174,11 @@ pub const DeclGen = struct {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const ptr_id = try self.resolve(bin_op.lhs);
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const offset_id = try self.resolve(bin_op.rhs);
const offset_ty = self.air.typeOf(bin_op.rhs);
const offset_ty = self.typeOf(bin_op.rhs);
const offset_ty_ref = try self.resolveType(offset_ty, .direct);
const result_ty = self.air.typeOfIndex(inst);
const result_ty = self.typeOfIndex(inst);
const negative_offset_id = self.spv.allocId();
try self.func.body.emit(self.spv.gpa, .OpSNegate, .{
@ -2298,8 +2299,8 @@ pub const DeclGen = struct {
const lhs_id = try self.resolve(bin_op.lhs);
const rhs_id = try self.resolve(bin_op.rhs);
const bool_ty_id = try self.resolveTypeId(Type.bool);
const ty = self.air.typeOf(bin_op.lhs);
assert(ty.eql(self.air.typeOf(bin_op.rhs), self.module));
const ty = self.typeOf(bin_op.lhs);
assert(ty.eql(self.typeOf(bin_op.rhs), self.module));
return try self.cmp(op, bool_ty_id, ty, lhs_id, rhs_id);
}
@ -2337,8 +2338,8 @@ pub const DeclGen = struct {
if (self.liveness.isUnused(inst)) return null;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand_id = try self.resolve(ty_op.operand);
const operand_ty = self.air.typeOf(ty_op.operand);
const result_ty = self.air.typeOfIndex(inst);
const operand_ty = self.typeOf(ty_op.operand);
const result_ty = self.typeOfIndex(inst);
return try self.bitCast(result_ty, operand_ty, operand_id);
}
@ -2347,7 +2348,7 @@ pub const DeclGen = struct {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand_id = try self.resolve(ty_op.operand);
const dest_ty = self.air.typeOfIndex(inst);
const dest_ty = self.typeOfIndex(inst);
const dest_ty_id = try self.resolveTypeId(dest_ty);
const mod = self.module;
@ -2391,10 +2392,10 @@ pub const DeclGen = struct {
if (self.liveness.isUnused(inst)) return null;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand_ty = self.air.typeOf(ty_op.operand);
const operand_ty = self.typeOf(ty_op.operand);
const operand_id = try self.resolve(ty_op.operand);
const operand_info = try self.arithmeticTypeInfo(operand_ty);
const dest_ty = self.air.typeOfIndex(inst);
const dest_ty = self.typeOfIndex(inst);
const dest_ty_id = try self.resolveTypeId(dest_ty);
const result_id = self.spv.allocId();
@ -2418,7 +2419,7 @@ pub const DeclGen = struct {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand_id = try self.resolve(ty_op.operand);
const dest_ty = self.air.typeOfIndex(inst);
const dest_ty = self.typeOfIndex(inst);
const dest_info = try self.arithmeticTypeInfo(dest_ty);
const dest_ty_id = try self.resolveTypeId(dest_ty);
@ -2455,20 +2456,20 @@ pub const DeclGen = struct {
fn airSliceField(self: *DeclGen, inst: Air.Inst.Index, field: u32) !?IdRef {
if (self.liveness.isUnused(inst)) return null;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const field_ty = self.air.typeOfIndex(inst);
const field_ty = self.typeOfIndex(inst);
const operand_id = try self.resolve(ty_op.operand);
return try self.extractField(field_ty, operand_id, field);
}
fn airSliceElemPtr(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const slice_ty = self.air.typeOf(bin_op.lhs);
const slice_ty = self.typeOf(bin_op.lhs);
if (!slice_ty.isVolatilePtr() and self.liveness.isUnused(inst)) return null;
const slice_id = try self.resolve(bin_op.lhs);
const index_id = try self.resolve(bin_op.rhs);
const ptr_ty = self.air.typeOfIndex(inst);
const ptr_ty = self.typeOfIndex(inst);
const ptr_ty_ref = try self.resolveType(ptr_ty, .direct);
const slice_ptr = try self.extractField(ptr_ty, slice_id, 0);
@ -2477,7 +2478,7 @@ pub const DeclGen = struct {
fn airSliceElemVal(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const slice_ty = self.air.typeOf(bin_op.lhs);
const slice_ty = self.typeOf(bin_op.lhs);
if (!slice_ty.isVolatilePtr() and self.liveness.isUnused(inst)) return null;
const slice_id = try self.resolve(bin_op.lhs);
@ -2514,7 +2515,7 @@ pub const DeclGen = struct {
const mod = self.module;
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const elem_ty = ptr_ty.childType();
// TODO: Make this return a null ptr or something
if (!elem_ty.hasRuntimeBitsIgnoreComptime(mod)) return null;
@ -2526,7 +2527,7 @@ pub const DeclGen = struct {
fn airPtrElemVal(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const ptr_id = try self.resolve(bin_op.lhs);
const index_id = try self.resolve(bin_op.rhs);
@ -2544,7 +2545,7 @@ pub const DeclGen = struct {
fn airGetUnionTag(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const un_ty = self.air.typeOf(ty_op.operand);
const un_ty = self.typeOf(ty_op.operand);
const mod = self.module;
const layout = un_ty.unionGetLayout(mod);
@ -2565,7 +2566,7 @@ pub const DeclGen = struct {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const struct_field = self.air.extraData(Air.StructField, ty_pl.payload).data;
const struct_ty = self.air.typeOf(struct_field.struct_operand);
const struct_ty = self.typeOf(struct_field.struct_operand);
const object_id = try self.resolve(struct_field.struct_operand);
const field_index = struct_field.field_index;
const field_ty = struct_ty.structFieldType(field_index);
@ -2604,8 +2605,8 @@ pub const DeclGen = struct {
if (self.liveness.isUnused(inst)) return null;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const struct_ptr = try self.resolve(ty_op.operand);
const struct_ptr_ty = self.air.typeOf(ty_op.operand);
const result_ptr_ty = self.air.typeOfIndex(inst);
const struct_ptr_ty = self.typeOf(ty_op.operand);
const result_ptr_ty = self.typeOfIndex(inst);
return try self.structFieldPtr(result_ptr_ty, struct_ptr_ty, struct_ptr, field_index);
}
@ -2661,7 +2662,7 @@ pub const DeclGen = struct {
fn airAlloc(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
if (self.liveness.isUnused(inst)) return null;
const ptr_ty = self.air.typeOfIndex(inst);
const ptr_ty = self.typeOfIndex(inst);
assert(ptr_ty.ptrAddressSpace() == .generic);
const child_ty = ptr_ty.childType();
const child_ty_ref = try self.resolveType(child_ty, .indirect);
@ -2694,7 +2695,7 @@ pub const DeclGen = struct {
incoming_blocks.deinit(self.gpa);
}
const ty = self.air.typeOfIndex(inst);
const ty = self.typeOfIndex(inst);
const inst_datas = self.air.instructions.items(.data);
const extra = self.air.extraData(Air.Block, inst_datas[inst].ty_pl.payload);
const body = self.air.extra[extra.end..][0..extra.data.body_len];
@ -2727,7 +2728,7 @@ pub const DeclGen = struct {
fn airBr(self: *DeclGen, inst: Air.Inst.Index) !void {
const br = self.air.instructions.items(.data)[inst].br;
const block = self.blocks.get(br.block_inst).?;
const operand_ty = self.air.typeOf(br.operand);
const operand_ty = self.typeOf(br.operand);
const mod = self.module;
if (operand_ty.hasRuntimeBits(mod)) {
@ -2777,7 +2778,7 @@ pub const DeclGen = struct {
fn airLoad(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const ptr_ty = self.air.typeOf(ty_op.operand);
const ptr_ty = self.typeOf(ty_op.operand);
const operand = try self.resolve(ty_op.operand);
if (!ptr_ty.isVolatilePtr() and self.liveness.isUnused(inst)) return null;
@ -2787,7 +2788,7 @@ pub const DeclGen = struct {
fn airStore(self: *DeclGen, inst: Air.Inst.Index) !void {
const mod = self.module;
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const ptr = try self.resolve(bin_op.lhs);
const value = try self.resolve(bin_op.rhs);
const ptr_ty_ref = try self.resolveType(ptr_ty, .direct);
@ -2819,7 +2820,7 @@ pub const DeclGen = struct {
fn airRet(self: *DeclGen, inst: Air.Inst.Index) !void {
const operand = self.air.instructions.items(.data)[inst].un_op;
const operand_ty = self.air.typeOf(operand);
const operand_ty = self.typeOf(operand);
const mod = self.module;
if (operand_ty.hasRuntimeBits(mod)) {
const operand_id = try self.resolve(operand);
@ -2832,7 +2833,7 @@ pub const DeclGen = struct {
fn airRetLoad(self: *DeclGen, inst: Air.Inst.Index) !void {
const mod = self.module;
const un_op = self.air.instructions.items(.data)[inst].un_op;
const ptr_ty = self.air.typeOf(un_op);
const ptr_ty = self.typeOf(un_op);
const ret_ty = ptr_ty.childType();
if (!ret_ty.hasRuntimeBitsIgnoreComptime(mod)) {
@ -2853,8 +2854,8 @@ pub const DeclGen = struct {
const extra = self.air.extraData(Air.Try, pl_op.payload);
const body = self.air.extra[extra.end..][0..extra.data.body_len];
const err_union_ty = self.air.typeOf(pl_op.operand);
const payload_ty = self.air.typeOfIndex(inst);
const err_union_ty = self.typeOf(pl_op.operand);
const payload_ty = self.typeOfIndex(inst);
const err_ty_ref = try self.resolveType(Type.anyerror, .direct);
const bool_ty_ref = try self.resolveType(Type.bool, .direct);
@ -2911,7 +2912,7 @@ pub const DeclGen = struct {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand_id = try self.resolve(ty_op.operand);
const err_union_ty = self.air.typeOf(ty_op.operand);
const err_union_ty = self.typeOf(ty_op.operand);
const err_ty_ref = try self.resolveType(Type.anyerror, .direct);
if (err_union_ty.errorUnionSet().errorSetIsEmpty()) {
@ -2934,7 +2935,7 @@ pub const DeclGen = struct {
if (self.liveness.isUnused(inst)) return null;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const err_union_ty = self.air.typeOfIndex(inst);
const err_union_ty = self.typeOfIndex(inst);
const payload_ty = err_union_ty.errorUnionPayload();
const operand_id = try self.resolve(ty_op.operand);
const eu_layout = self.errorUnionLayout(payload_ty);
@ -2966,7 +2967,7 @@ pub const DeclGen = struct {
const mod = self.module;
const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand_id = try self.resolve(un_op);
const optional_ty = self.air.typeOf(un_op);
const optional_ty = self.typeOf(un_op);
var buf: Type.Payload.ElemType = undefined;
const payload_ty = optional_ty.optionalChild(&buf);
@ -3030,8 +3031,8 @@ pub const DeclGen = struct {
const mod = self.module;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand_id = try self.resolve(ty_op.operand);
const optional_ty = self.air.typeOf(ty_op.operand);
const payload_ty = self.air.typeOfIndex(inst);
const optional_ty = self.typeOf(ty_op.operand);
const payload_ty = self.typeOfIndex(inst);
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) return null;
@ -3047,14 +3048,14 @@ pub const DeclGen = struct {
const mod = self.module;
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const payload_ty = self.air.typeOf(ty_op.operand);
const payload_ty = self.typeOf(ty_op.operand);
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
return try self.constBool(true, .direct);
}
const operand_id = try self.resolve(ty_op.operand);
const optional_ty = self.air.typeOfIndex(inst);
const optional_ty = self.typeOfIndex(inst);
if (optional_ty.optionalReprIsPayload(mod)) {
return operand_id;
}
@ -3068,7 +3069,7 @@ pub const DeclGen = struct {
const mod = self.module;
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const cond = try self.resolve(pl_op.operand);
const cond_ty = self.air.typeOf(pl_op.operand);
const cond_ty = self.typeOf(pl_op.operand);
const switch_br = self.air.extraData(Air.SwitchBr, pl_op.payload);
const cond_words: u32 = switch (cond_ty.zigTypeTag(mod)) {
@ -3317,7 +3318,7 @@ pub const DeclGen = struct {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const extra = self.air.extraData(Air.Call, pl_op.payload);
const args = @ptrCast([]const Air.Inst.Ref, self.air.extra[extra.end..][0..extra.data.args_len]);
const callee_ty = self.air.typeOf(pl_op.operand);
const callee_ty = self.typeOf(pl_op.operand);
const zig_fn_ty = switch (callee_ty.zigTypeTag(mod)) {
.Fn => callee_ty,
.Pointer => return self.fail("cannot call function pointers", .{}),
@ -3339,7 +3340,7 @@ pub const DeclGen = struct {
// before starting to emit OpFunctionCall instructions. Hence the
// temporary params buffer.
const arg_id = try self.resolve(arg);
const arg_ty = self.air.typeOf(arg);
const arg_ty = self.typeOf(arg);
if (!arg_ty.hasRuntimeBitsIgnoreComptime(mod)) continue;
params[n_params] = arg_id;
@ -3363,4 +3364,14 @@ pub const DeclGen = struct {
return result_id;
}
fn typeOf(self: *DeclGen, inst: Air.Inst.Ref) Type {
const mod = self.module;
return self.air.typeOf(inst, mod.intern_pool);
}
fn typeOfIndex(self: *DeclGen, inst: Air.Inst.Index) Type {
const mod = self.module;
return self.air.typeOfIndex(inst, mod.intern_pool);
}
};

View File

@ -306,6 +306,7 @@ const Writer = struct {
.struct_field_ptr => try w.writeStructField(s, inst),
.struct_field_val => try w.writeStructField(s, inst),
.constant => try w.writeConstant(s, inst),
.interned => try w.writeInterned(s, inst),
.assembly => try w.writeAssembly(s, inst),
.dbg_stmt => try w.writeDbgStmt(s, inst),
@ -515,7 +516,7 @@ const Writer = struct {
const pl_op = w.air.instructions.items(.data)[inst].pl_op;
const extra = w.air.extraData(Air.Bin, pl_op.payload).data;
const elem_ty = w.air.typeOfIndex(inst).childType();
const elem_ty = w.typeOfIndex(inst).childType();
try w.writeType(s, elem_ty);
try s.writeAll(", ");
try w.writeOperand(s, inst, 0, pl_op.operand);
@ -614,6 +615,14 @@ const Writer = struct {
try s.print(", {}", .{val.fmtValue(ty, w.module)});
}
fn writeInterned(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
const mod = w.module;
const ip_index = w.air.instructions.items(.data)[inst].interned;
const ty = ip_index.toType();
try w.writeType(s, ty);
try s.print(", {}", .{ip_index.toValue().fmtValue(ty, mod)});
}
fn writeAssembly(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
const ty_pl = w.air.instructions.items(.data)[inst].ty_pl;
const extra = w.air.extraData(Air.Asm, ty_pl.payload);
@ -622,7 +631,7 @@ const Writer = struct {
var extra_i: usize = extra.end;
var op_index: usize = 0;
const ret_ty = w.air.typeOfIndex(inst);
const ret_ty = w.typeOfIndex(inst);
try w.writeType(s, ret_ty);
if (is_volatile) {
@ -985,4 +994,9 @@ const Writer = struct {
try s.print("%{d}", .{inst});
if (dies) try s.writeByte('!');
}
fn typeOfIndex(w: *Writer, inst: Air.Inst.Index) Type {
const mod = w.module;
return w.air.typeOfIndex(inst, mod.intern_pool);
}
};

View File

@ -2827,21 +2827,35 @@ pub const Type = struct {
};
}
/// TODO add enums with no fields here
pub fn isNoReturn(ty: Type) bool {
switch (ty.tag()) {
.noreturn => return true,
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
const names = err_set_obj.names.keys();
return names.len == 0;
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
const names = name_map.keys();
return names.len == 0;
},
switch (@enumToInt(ty.ip_index)) {
@enumToInt(InternPool.Index.first_type)...@enumToInt(InternPool.Index.noreturn_type) - 1 => return false,
@enumToInt(InternPool.Index.noreturn_type) => return true,
@enumToInt(InternPool.Index.noreturn_type) + 1...@enumToInt(InternPool.Index.last_type) => return false,
@enumToInt(InternPool.Index.first_value)...@enumToInt(InternPool.Index.last_value) => unreachable,
@enumToInt(InternPool.Index.generic_poison) => unreachable,
// TODO add empty error sets here
// TODO add enums with no fields here
else => return false,
@enumToInt(InternPool.Index.none) => switch (ty.tag()) {
.noreturn => return true,
.error_set => {
const err_set_obj = ty.castTag(.error_set).?.data;
const names = err_set_obj.names.keys();
return names.len == 0;
},
.error_set_merged => {
const name_map = ty.castTag(.error_set_merged).?.data;
const names = name_map.keys();
return names.len == 0;
},
else => return false,
},
}
}