Air: store interned values in Air.Inst.Ref

Previously, interned values were represented as AIR instructions using
the `interned` tag. Now, the AIR ref directly encodes the InternPool
index. The encoding works as follows:
* If the ref matches one of the static values, it corresponds to the same InternPool index.
* Otherwise, if the MSB is 0, the ref corresponds to an InternPool index.
* Otherwise, if the MSB is 1, the ref corresponds to an AIR instruction index (after removing the MSB).

Note that since most static InternPool indices are low values (the
exceptions being `.none` and `.var_args_param_type`), the first rule is
almost a nop.
This commit is contained in:
mlugg 2023-06-24 22:21:43 +01:00 committed by Andrew Kelley
parent dae516dbdf
commit ff37ccd298
13 changed files with 206 additions and 326 deletions

View File

@ -438,9 +438,6 @@ pub const Inst = struct {
/// was executed on the operand.
/// Uses the `ty_pl` field. Payload is `TryPtr`.
try_ptr,
/// A comptime-known value via an index into the InternPool.
/// Uses the `interned` field.
interned,
/// Notes the beginning of a source code statement and marks the line and column.
/// Result type is always void.
/// Uses the `dbg_stmt` field.
@ -879,6 +876,12 @@ pub const Inst = struct {
/// The position of an AIR instruction within the `Air` instructions array.
pub const Index = u32;
/// Either a reference to a value stored in the InternPool, or a reference to an AIR instruction.
/// The most-significant bit of the value is a tag bit. This bit is 1 if the value represents an
/// instruction index and 0 if it represents an InternPool index.
///
/// The hardcoded refs `none` and `var_args_param_type` are exceptions to this rule: they have
/// their tag bit set but refer to the InternPool.
pub const Ref = enum(u32) {
u0_type = @intFromEnum(InternPool.Index.u0_type),
i0_type = @intFromEnum(InternPool.Index.i0_type),
@ -979,7 +982,6 @@ pub const Inst = struct {
pub const Data = union {
no_op: void,
un_op: Ref,
interned: InternPool.Index,
bin_op: struct {
lhs: Ref,
@ -1216,11 +1218,11 @@ pub fn getMainBody(air: Air) []const Air.Inst.Index {
}
pub fn typeOf(air: *const Air, inst: Air.Inst.Ref, ip: *const InternPool) Type {
const ref_int = @intFromEnum(inst);
if (ref_int < InternPool.static_keys.len) {
return InternPool.static_keys[ref_int].typeOf().toType();
if (refToInterned(inst)) |ip_index| {
return ip.typeOf(ip_index).toType();
} else {
return air.typeOfIndex(refToIndex(inst).?, ip);
}
return air.typeOfIndex(ref_int - ref_start_index, ip);
}
pub fn typeOfIndex(air: *const Air, inst: Air.Inst.Index, ip: *const InternPool) Type {
@ -1342,8 +1344,6 @@ pub fn typeOfIndex(air: *const Air, inst: Air.Inst.Index, ip: *const InternPool)
.try_ptr,
=> return air.getRefType(datas[inst].ty_pl.ty),
.interned => return ip.typeOf(datas[inst].interned).toType(),
.not,
.bitcast,
.load,
@ -1479,18 +1479,8 @@ pub fn typeOfIndex(air: *const Air, inst: Air.Inst.Index, ip: *const InternPool)
}
pub fn getRefType(air: Air, ref: Air.Inst.Ref) Type {
const ref_int = @intFromEnum(ref);
if (ref_int < ref_start_index) {
const ip_index = @as(InternPool.Index, @enumFromInt(ref_int));
return ip_index.toType();
}
const inst_index = ref_int - ref_start_index;
const air_tags = air.instructions.items(.tag);
const air_datas = air.instructions.items(.data);
return switch (air_tags[inst_index]) {
.interned => air_datas[inst_index].interned.toType(),
else => unreachable,
};
_ = air; // TODO: remove this parameter
return refToInterned(ref).?.toType();
}
/// Returns the requested data, as well as the new index which is at the start of the
@ -1521,40 +1511,56 @@ pub fn deinit(air: *Air, gpa: std.mem.Allocator) void {
air.* = undefined;
}
pub const ref_start_index: u32 = InternPool.static_len;
pub fn refToInternedAllowNone(ref: Inst.Ref) ?InternPool.Index {
return switch (ref) {
.var_args_param_type => .var_args_param_type,
.none => .none,
else => if (@intFromEnum(ref) >> 31 == 0) {
return @as(InternPool.Index, @enumFromInt(@intFromEnum(ref)));
} else null,
};
}
pub fn refToInterned(ref: Inst.Ref) ?InternPool.Index {
assert(ref != .none);
return refToInternedAllowNone(ref);
}
pub fn internedToRef(ip_index: InternPool.Index) Inst.Ref {
assert(@intFromEnum(ip_index) >> 31 == 0);
return switch (ip_index) {
.var_args_param_type => .var_args_param_type,
.none => .none,
else => @enumFromInt(@as(u31, @intCast(@intFromEnum(ip_index)))),
};
}
pub fn refToIndexAllowNone(ref: Inst.Ref) ?Inst.Index {
return switch (ref) {
.var_args_param_type, .none => null,
else => if (@intFromEnum(ref) >> 31 != 0) {
return @as(u31, @truncate(@intFromEnum(ref)));
} else null,
};
}
pub fn refToIndex(ref: Inst.Ref) ?Inst.Index {
assert(ref != .none);
return refToIndexAllowNone(ref);
}
pub fn indexToRef(inst: Inst.Index) Inst.Ref {
return @as(Inst.Ref, @enumFromInt(ref_start_index + inst));
}
pub fn refToIndex(inst: Inst.Ref) ?Inst.Index {
assert(inst != .none);
const ref_int = @intFromEnum(inst);
if (ref_int >= ref_start_index) {
return ref_int - ref_start_index;
} else {
return null;
}
}
pub fn refToIndexAllowNone(inst: Inst.Ref) ?Inst.Index {
if (inst == .none) return null;
return refToIndex(inst);
assert(inst >> 31 == 0);
return @enumFromInt((1 << 31) | inst);
}
/// Returns `null` if runtime-known.
pub fn value(air: Air, inst: Inst.Ref, mod: *Module) !?Value {
const ref_int = @intFromEnum(inst);
if (ref_int < ref_start_index) {
const ip_index = @as(InternPool.Index, @enumFromInt(ref_int));
if (refToInterned(inst)) |ip_index| {
return ip_index.toValue();
}
const inst_index = @as(Air.Inst.Index, @intCast(ref_int - ref_start_index));
const air_datas = air.instructions.items(.data);
switch (air.instructions.items(.tag)[inst_index]) {
.interned => return air_datas[inst_index].interned.toValue(),
else => return air.typeOfIndex(inst_index, &mod.intern_pool).onePossibleValue(mod),
}
const index = refToIndex(inst).?;
return air.typeOfIndex(index, &mod.intern_pool).onePossibleValue(mod);
}
pub fn nullTerminatedString(air: Air, index: usize) [:0]const u8 {
@ -1709,7 +1715,6 @@ pub fn mustLower(air: Air, inst: Air.Inst.Index, ip: *const InternPool) bool {
.cmp_neq_optimized,
.cmp_vector,
.cmp_vector_optimized,
.interned,
.is_null,
.is_non_null,
.is_null_ptr,

View File

@ -324,7 +324,6 @@ pub fn categorizeOperand(
.inferred_alloc,
.inferred_alloc_comptime,
.ret_ptr,
.interned,
.trap,
.breakpoint,
.dbg_stmt,
@ -981,7 +980,7 @@ fn analyzeInst(
.work_group_id,
=> return analyzeOperands(a, pass, data, inst, .{ .none, .none, .none }),
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.trap,
.unreach,
@ -1264,7 +1263,6 @@ fn analyzeOperands(
operands: [bpi - 1]Air.Inst.Ref,
) Allocator.Error!void {
const gpa = a.gpa;
const inst_tags = a.air.instructions.items(.tag);
const ip = a.intern_pool;
switch (pass) {
@ -1273,10 +1271,6 @@ fn analyzeOperands(
for (operands) |op_ref| {
const operand = Air.refToIndexAllowNone(op_ref) orelse continue;
// Don't compute any liveness for constants
if (inst_tags[operand] == .interned) continue;
_ = try data.live_set.put(gpa, operand, {});
}
},
@ -1307,9 +1301,6 @@ fn analyzeOperands(
const op_ref = operands[i];
const operand = Air.refToIndexAllowNone(op_ref) orelse continue;
// Don't compute any liveness for constants
if (inst_tags[operand] == .interned) continue;
const mask = @as(Bpi, 1) << @as(OperandInt, @intCast(i));
if ((try data.live_set.fetchPut(gpa, operand, {})) == null) {
@ -1837,10 +1828,6 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type {
const operand = Air.refToIndex(op_ref) orelse return;
// Don't compute any liveness for constants
const inst_tags = big.a.air.instructions.items(.tag);
if (inst_tags[operand] == .interned) return
// If our result is unused and the instruction doesn't need to be lowered, backends will
// skip the lowering of this instruction, so we don't want to record uses of operands.
// That way, we can mark as many instructions as possible unused.

View File

@ -44,7 +44,6 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void {
.inferred_alloc,
.inferred_alloc_comptime,
.ret_ptr,
.interned,
.breakpoint,
.dbg_stmt,
.dbg_inline_begin,
@ -559,10 +558,6 @@ fn verifyOperand(self: *Verify, inst: Air.Inst.Index, op_ref: Air.Inst.Ref, dies
assert(!dies);
return;
};
if (self.air.instructions.items(.tag)[operand] == .interned) {
assert(!dies);
return;
}
if (dies) {
if (!self.live.remove(operand)) return invalid("%{}: dead operand %{} reused and killed again", .{ inst, operand });
} else {
@ -583,7 +578,6 @@ fn verifyInstOperands(
}
fn verifyInst(self: *Verify, inst: Air.Inst.Index) Error!void {
if (self.air.instructions.items(.tag)[inst] == .interned) return;
if (self.liveness.isUnused(inst)) {
assert(!self.live.contains(inst));
} else {

View File

@ -2068,28 +2068,26 @@ fn resolveMaybeUndefValAllowVariablesMaybeRuntime(
) CompileError!?Value {
assert(inst != .none);
// First section of indexes correspond to a set number of constant values.
const int = @intFromEnum(inst);
if (int < InternPool.static_len) {
return @as(InternPool.Index, @enumFromInt(int)).toValue();
if (@intFromEnum(inst) < InternPool.static_len) {
return @as(InternPool.Index, @enumFromInt(@intFromEnum(inst))).toValue();
}
const i = int - InternPool.static_len;
const air_tags = sema.air_instructions.items(.tag);
if (try sema.typeHasOnePossibleValue(sema.typeOf(inst))) |opv| {
if (air_tags[i] == .interned) {
const interned = sema.air_instructions.items(.data)[i].interned;
const val = interned.toValue();
if (Air.refToInterned(inst)) |ip_index| {
const val = ip_index.toValue();
if (val.getVariable(sema.mod) != null) return val;
}
return opv;
}
const air_datas = sema.air_instructions.items(.data);
const val = switch (air_tags[i]) {
.inferred_alloc => unreachable,
.inferred_alloc_comptime => unreachable,
.interned => air_datas[i].interned.toValue(),
else => return null,
const ip_index = Air.refToInterned(inst) orelse {
switch (air_tags[Air.refToIndex(inst).?]) {
.inferred_alloc => unreachable,
.inferred_alloc_comptime => unreachable,
else => return null,
}
};
const val = ip_index.toValue();
if (val.isRuntimeValue(sema.mod)) make_runtime.* = true;
if (val.isPtrToThreadLocal(sema.mod)) make_runtime.* = true;
return val;
@ -3868,18 +3866,23 @@ fn zirResolveInferredAlloc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Com
},
});
if (std.debug.runtime_safety) {
// The inferred_alloc_comptime should never be referenced again
sema.air_instructions.set(ptr_inst, .{ .tag = undefined, .data = undefined });
}
try sema.maybeQueueFuncBodyAnalysis(decl_index);
// Change it to an interned.
sema.air_instructions.set(ptr_inst, .{
.tag = .interned,
.data = .{ .interned = try mod.intern(.{ .ptr = .{
.ty = final_ptr_ty.toIntern(),
.addr = if (!iac.is_const) .{ .mut_decl = .{
.decl = decl_index,
.runtime_index = block.runtime_index,
} } else .{ .decl = decl_index },
} }) },
});
const interned = try mod.intern(.{ .ptr = .{
.ty = final_ptr_ty.toIntern(),
.addr = if (!iac.is_const) .{ .mut_decl = .{
.decl = decl_index,
.runtime_index = block.runtime_index,
} } else .{ .decl = decl_index },
} });
// Remap the ZIR operand to the resolved pointer value
sema.inst_map.putAssumeCapacity(Zir.refToIndex(inst_data.operand).?, Air.internedToRef(interned));
},
.inferred_alloc => {
const ia1 = sema.air_instructions.items(.data)[ptr_inst].inferred_alloc;
@ -3966,17 +3969,22 @@ fn zirResolveInferredAlloc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Com
};
try mod.declareDeclDependency(sema.owner_decl_index, new_decl_index);
// Even though we reuse the constant instruction, we still remove it from the
// block so that codegen does not see it.
// Remove the instruction from the block so that codegen does not see it.
block.instructions.shrinkRetainingCapacity(search_index);
try sema.maybeQueueFuncBodyAnalysis(new_decl_index);
sema.air_instructions.set(ptr_inst, .{
.tag = .interned,
.data = .{ .interned = try mod.intern(.{ .ptr = .{
.ty = final_ptr_ty.toIntern(),
.addr = .{ .decl = new_decl_index },
} }) },
});
if (std.debug.runtime_safety) {
// The inferred_alloc should never be referenced again
sema.air_instructions.set(ptr_inst, .{ .tag = undefined, .data = undefined });
}
const interned = try mod.intern(.{ .ptr = .{
.ty = final_ptr_ty.toIntern(),
.addr = .{ .decl = new_decl_index },
} });
// Remap the ZIR oeprand to the resolved pointer value
sema.inst_map.putAssumeCapacity(Zir.refToIndex(inst_data.operand).?, Air.internedToRef(interned));
// Unless the block is comptime, `alloc_inferred` always produces
// a runtime constant. The final inferred type needs to be
@ -4404,7 +4412,6 @@ fn validateUnionInit(
const air_tags = sema.air_instructions.items(.tag);
const air_datas = sema.air_instructions.items(.data);
const field_ptr_air_ref = sema.inst_map.get(field_ptr).?;
const field_ptr_air_inst = Air.refToIndex(field_ptr_air_ref).?;
// Our task here is to determine if the union is comptime-known. In such case,
// we erase the runtime AIR instructions for initializing the union, and replace
@ -4434,7 +4441,7 @@ fn validateUnionInit(
var make_runtime = false;
while (block_index > 0) : (block_index -= 1) {
const store_inst = block.instructions.items[block_index];
if (store_inst == field_ptr_air_inst) break;
if (Air.indexToRef(store_inst) == field_ptr_air_ref) break;
switch (air_tags[store_inst]) {
.store, .store_safe => {},
else => continue,
@ -4453,7 +4460,7 @@ fn validateUnionInit(
if (air_tags[block_inst] != .dbg_stmt) break;
}
if (block_index > 0 and
field_ptr_air_inst == block.instructions.items[block_index - 1])
field_ptr_air_ref == Air.indexToRef(block.instructions.items[block_index - 1]))
{
first_block_index = @min(first_block_index, block_index - 1);
} else {
@ -4622,7 +4629,6 @@ fn validateStructInit(
}
const field_ptr_air_ref = sema.inst_map.get(field_ptr).?;
const field_ptr_air_inst = Air.refToIndex(field_ptr_air_ref).?;
//std.debug.print("validateStructInit (field_ptr_air_inst=%{d}):\n", .{
// field_ptr_air_inst,
@ -4652,7 +4658,7 @@ fn validateStructInit(
var block_index = block.instructions.items.len - 1;
while (block_index > 0) : (block_index -= 1) {
const store_inst = block.instructions.items[block_index];
if (store_inst == field_ptr_air_inst) {
if (Air.indexToRef(store_inst) == field_ptr_air_ref) {
struct_is_comptime = false;
continue :field;
}
@ -4675,7 +4681,7 @@ fn validateStructInit(
if (air_tags[block_inst] != .dbg_stmt) break;
}
if (block_index > 0 and
field_ptr_air_inst == block.instructions.items[block_index - 1])
field_ptr_air_ref == Air.indexToRef(block.instructions.items[block_index - 1]))
{
first_block_index = @min(first_block_index, block_index - 1);
} else {
@ -4865,7 +4871,6 @@ fn zirValidateArrayInit(
}
const elem_ptr_air_ref = sema.inst_map.get(elem_ptr).?;
const elem_ptr_air_inst = Air.refToIndex(elem_ptr_air_ref).?;
// We expect to see something like this in the current block AIR:
// %a = elem_ptr(...)
@ -4890,7 +4895,7 @@ fn zirValidateArrayInit(
var block_index = block.instructions.items.len - 1;
while (block_index > 0) : (block_index -= 1) {
const store_inst = block.instructions.items[block_index];
if (store_inst == elem_ptr_air_inst) {
if (Air.indexToRef(store_inst) == elem_ptr_air_ref) {
array_is_comptime = false;
continue :outer;
}
@ -4913,7 +4918,7 @@ fn zirValidateArrayInit(
if (air_tags[block_inst] != .dbg_stmt) break;
}
if (block_index > 0 and
elem_ptr_air_inst == block.instructions.items[block_index - 1])
elem_ptr_air_ref == Air.indexToRef(block.instructions.items[block_index - 1]))
{
first_block_index = @min(first_block_index, block_index - 1);
} else {
@ -5785,8 +5790,7 @@ fn analyzeBlockBody(
sema.air_instructions.items(.data)[br].br.operand = coerced_operand;
continue;
}
assert(coerce_block.instructions.items[coerce_block.instructions.items.len - 1] ==
Air.refToIndex(coerced_operand).?);
assert(Air.indexToRef(coerce_block.instructions.items[coerce_block.instructions.items.len - 1]) == coerced_operand);
// Convert the br instruction to a block instruction that has the coercion
// and then a new br inside that returns the coerced instruction.
@ -30397,8 +30401,8 @@ fn analyzeDeclVal(
}
const decl_ref = try sema.analyzeDeclRefInner(decl_index, false);
const result = try sema.analyzeLoad(block, src, decl_ref, src);
if (Air.refToIndex(result)) |index| {
if (sema.air_instructions.items(.tag)[index] == .interned and !block.is_typeof) {
if (Air.refToInterned(result) != null) {
if (!block.is_typeof) {
try sema.decl_val_table.put(sema.gpa, decl_index, result);
}
}
@ -30720,7 +30724,7 @@ fn analyzeIsNonErrComptimeOnly(
}
} else if (operand == .undef) {
return sema.addConstUndef(Type.bool);
} else {
} else if (@intFromEnum(operand) < InternPool.static_len) {
// None of the ref tags can be errors.
return Air.Inst.Ref.bool_true;
}
@ -35494,14 +35498,10 @@ pub fn getTmpAir(sema: Sema) Air {
};
}
// TODO: make this non-fallible or remove it entirely
pub fn addType(sema: *Sema, ty: Type) !Air.Inst.Ref {
if (@intFromEnum(ty.toIntern()) < Air.ref_start_index)
return @as(Air.Inst.Ref, @enumFromInt(@intFromEnum(ty.toIntern())));
try sema.air_instructions.append(sema.gpa, .{
.tag = .interned,
.data = .{ .interned = ty.toIntern() },
});
return Air.indexToRef(@as(u32, @intCast(sema.air_instructions.len - 1)));
_ = sema;
return Air.internedToRef(ty.toIntern());
}
fn addIntUnsigned(sema: *Sema, ty: Type, int: u64) CompileError!Air.Inst.Ref {
@ -35513,14 +35513,10 @@ fn addConstUndef(sema: *Sema, ty: Type) CompileError!Air.Inst.Ref {
return sema.addConstant((try sema.mod.intern(.{ .undef = ty.toIntern() })).toValue());
}
pub fn addConstant(sema: *Sema, val: Value) SemaError!Air.Inst.Ref {
if (@intFromEnum(val.toIntern()) < Air.ref_start_index)
return @as(Air.Inst.Ref, @enumFromInt(@intFromEnum(val.toIntern())));
try sema.air_instructions.append(sema.gpa, .{
.tag = .interned,
.data = .{ .interned = val.toIntern() },
});
return Air.indexToRef(@as(u32, @intCast(sema.air_instructions.len - 1)));
// TODO: make this non-fallible or remove it entirely
pub fn addConstant(sema: *Sema, val: Value) !Air.Inst.Ref {
_ = sema;
return Air.internedToRef(val.toIntern());
}
pub fn addExtra(sema: *Sema, extra: anytype) Allocator.Error!u32 {

View File

@ -845,7 +845,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.ptr_elem_val => try self.airPtrElemVal(inst),
.ptr_elem_ptr => try self.airPtrElemPtr(inst),
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.unreach => self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -920,7 +920,6 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
/// Asserts there is already capacity to insert into top branch inst_table.
fn processDeath(self: *Self, inst: Air.Inst.Index) void {
assert(self.air.instructions.items(.tag)[inst] != .interned);
// When editing this function, note that the logic must synchronize with `reuseOperand`.
const prev_value = self.getResolvedInstValue(inst);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@ -953,9 +952,7 @@ fn finishAir(self: *Self, inst: Air.Inst.Index, result: MCValue, operands: [Live
const dies = @as(u1, @truncate(tomb_bits)) != 0;
tomb_bits >>= 1;
if (!dies) continue;
const op_int = @intFromEnum(op);
if (op_int < Air.ref_start_index) continue;
const op_index = @as(Air.Inst.Index, @intCast(op_int - Air.ref_start_index));
const op_index = Air.refToIndex(op) orelse continue;
self.processDeath(op_index);
}
const is_used = @as(u1, @truncate(tomb_bits)) == 0;
@ -4696,9 +4693,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
// that death now instead of later as this has an effect on
// whether it needs to be spilled in the branches
if (self.liveness.operandDies(inst, 0)) {
const op_int = @intFromEnum(pl_op.operand);
if (op_int >= Air.ref_start_index) {
const op_index = @as(Air.Inst.Index, @intCast(op_int - Air.ref_start_index));
if (Air.refToIndex(pl_op.operand)) |op_index| {
self.processDeath(op_index);
}
}
@ -6149,22 +6144,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
.val = (try self.air.value(inst, mod)).?,
});
switch (self.air.instructions.items(.tag)[inst_index]) {
.interned => {
// Constants have static lifetimes, so they are always memoized in the outer most table.
const branch = &self.branch_stack.items[0];
const gop = try branch.inst_table.getOrPut(self.gpa, inst_index);
if (!gop.found_existing) {
const interned = self.air.instructions.items(.data)[inst_index].interned;
gop.value_ptr.* = try self.genTypedValue(.{
.ty = inst_ty,
.val = interned.toValue(),
});
}
return gop.value_ptr.*;
},
else => return self.getResolvedInstValue(inst_index),
}
return self.getResolvedInstValue(inst_index);
}
fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue {

View File

@ -829,7 +829,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.ptr_elem_val => try self.airPtrElemVal(inst),
.ptr_elem_ptr => try self.airPtrElemPtr(inst),
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.unreach => self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -904,7 +904,6 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
/// Asserts there is already capacity to insert into top branch inst_table.
fn processDeath(self: *Self, inst: Air.Inst.Index) void {
assert(self.air.instructions.items(.tag)[inst] != .interned);
// When editing this function, note that the logic must synchronize with `reuseOperand`.
const prev_value = self.getResolvedInstValue(inst);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@ -939,9 +938,7 @@ fn finishAir(self: *Self, inst: Air.Inst.Index, result: MCValue, operands: [Live
const dies = @as(u1, @truncate(tomb_bits)) != 0;
tomb_bits >>= 1;
if (!dies) continue;
const op_int = @intFromEnum(op);
if (op_int < Air.ref_start_index) continue;
const op_index = @as(Air.Inst.Index, @intCast(op_int - Air.ref_start_index));
const op_index = Air.refToIndex(op) orelse continue;
self.processDeath(op_index);
}
const is_used = @as(u1, @truncate(tomb_bits)) == 0;
@ -4651,9 +4648,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
// that death now instead of later as this has an effect on
// whether it needs to be spilled in the branches
if (self.liveness.operandDies(inst, 0)) {
const op_int = @intFromEnum(pl_op.operand);
if (op_int >= Air.ref_start_index) {
const op_index = @as(Air.Inst.Index, @intCast(op_int - Air.ref_start_index));
if (Air.refToIndex(pl_op.operand)) |op_index| {
self.processDeath(op_index);
}
}
@ -6102,22 +6097,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
.val = (try self.air.value(inst, mod)).?,
});
switch (self.air.instructions.items(.tag)[inst_index]) {
.interned => {
// Constants have static lifetimes, so they are always memoized in the outer most table.
const branch = &self.branch_stack.items[0];
const gop = try branch.inst_table.getOrPut(self.gpa, inst_index);
if (!gop.found_existing) {
const interned = self.air.instructions.items(.data)[inst_index].interned;
gop.value_ptr.* = try self.genTypedValue(.{
.ty = inst_ty,
.val = interned.toValue(),
});
}
return gop.value_ptr.*;
},
else => return self.getResolvedInstValue(inst_index),
}
return self.getResolvedInstValue(inst_index);
}
fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue {

View File

@ -664,7 +664,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.ptr_elem_val => try self.airPtrElemVal(inst),
.ptr_elem_ptr => try self.airPtrElemPtr(inst),
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.unreach => self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -731,7 +731,6 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
/// Asserts there is already capacity to insert into top branch inst_table.
fn processDeath(self: *Self, inst: Air.Inst.Index) void {
assert(self.air.instructions.items(.tag)[inst] != .interned);
// When editing this function, note that the logic must synchronize with `reuseOperand`.
const prev_value = self.getResolvedInstValue(inst);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@ -757,9 +756,7 @@ fn finishAir(self: *Self, inst: Air.Inst.Index, result: MCValue, operands: [Live
const dies = @as(u1, @truncate(tomb_bits)) != 0;
tomb_bits >>= 1;
if (!dies) continue;
const op_int = @intFromEnum(op);
if (op_int < Air.ref_start_index) continue;
const op_index = @as(Air.Inst.Index, @intCast(op_int - Air.ref_start_index));
const op_index = Air.refToIndex(op) orelse continue;
self.processDeath(op_index);
}
const is_used = @as(u1, @truncate(tomb_bits)) == 0;
@ -2556,22 +2553,7 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
.val = (try self.air.value(inst, mod)).?,
});
switch (self.air.instructions.items(.tag)[inst_index]) {
.interned => {
// Constants have static lifetimes, so they are always memoized in the outer most table.
const branch = &self.branch_stack.items[0];
const gop = try branch.inst_table.getOrPut(self.gpa, inst_index);
if (!gop.found_existing) {
const interned = self.air.instructions.items(.data)[inst_index].interned;
gop.value_ptr.* = try self.genTypedValue(.{
.ty = inst_ty,
.val = interned.toValue(),
});
}
return gop.value_ptr.*;
},
else => return self.getResolvedInstValue(inst_index),
}
return self.getResolvedInstValue(inst_index);
}
fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue {

View File

@ -677,7 +677,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.ptr_elem_val => try self.airPtrElemVal(inst),
.ptr_elem_ptr => try self.airPtrElemPtr(inst),
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.unreach => self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -1515,9 +1515,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
// that death now instead of later as this has an effect on
// whether it needs to be spilled in the branches
if (self.liveness.operandDies(inst, 0)) {
const op_int = @intFromEnum(pl_op.operand);
if (op_int >= Air.ref_start_index) {
const op_index = @as(Air.Inst.Index, @intCast(op_int - Air.ref_start_index));
if (Air.refToIndex(pl_op.operand)) |op_index| {
self.processDeath(op_index);
}
}
@ -3570,9 +3568,7 @@ fn finishAir(self: *Self, inst: Air.Inst.Index, result: MCValue, operands: [Live
const dies = @as(u1, @truncate(tomb_bits)) != 0;
tomb_bits >>= 1;
if (!dies) continue;
const op_int = @intFromEnum(op);
if (op_int < Air.ref_start_index) continue;
const op_index = @as(Air.Inst.Index, @intCast(op_int - Air.ref_start_index));
const op_index = Air.refToIndex(op) orelse continue;
self.processDeath(op_index);
}
const is_used = @as(u1, @truncate(tomb_bits)) == 0;
@ -4422,7 +4418,6 @@ fn performReloc(self: *Self, inst: Mir.Inst.Index) !void {
/// Asserts there is already capacity to insert into top branch inst_table.
fn processDeath(self: *Self, inst: Air.Inst.Index) void {
assert(self.air.instructions.items(.tag)[inst] != .interned);
// When editing this function, note that the logic must synchronize with `reuseOperand`.
const prev_value = self.getResolvedInstValue(inst);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@ -4550,22 +4545,7 @@ fn resolveInst(self: *Self, ref: Air.Inst.Ref) InnerError!MCValue {
if (!ty.hasRuntimeBitsIgnoreComptime(mod)) return .none;
if (Air.refToIndex(ref)) |inst| {
switch (self.air.instructions.items(.tag)[inst]) {
.interned => {
// Constants have static lifetimes, so they are always memoized in the outer most table.
const branch = &self.branch_stack.items[0];
const gop = try branch.inst_table.getOrPut(self.gpa, inst);
if (!gop.found_existing) {
const interned = self.air.instructions.items(.data)[inst].interned;
gop.value_ptr.* = try self.genTypedValue(.{
.ty = ty,
.val = interned.toValue(),
});
}
return gop.value_ptr.*;
},
else => return self.getResolvedInstValue(inst),
}
return self.getResolvedInstValue(inst);
}
return self.genTypedValue(.{

View File

@ -854,9 +854,9 @@ const BigTomb = struct {
lbt: Liveness.BigTomb,
fn feed(bt: *BigTomb, op_ref: Air.Inst.Ref) void {
_ = Air.refToIndex(op_ref) orelse return; // constants do not have to be freed regardless
const dies = bt.lbt.feed();
if (!dies) return;
// This will be a nop for interned constants.
processDeath(bt.gen, op_ref);
}
@ -882,8 +882,7 @@ fn iterateBigTomb(func: *CodeGen, inst: Air.Inst.Index, operand_count: usize) !B
}
fn processDeath(func: *CodeGen, ref: Air.Inst.Ref) void {
const inst = Air.refToIndex(ref) orelse return;
assert(func.air.instructions.items(.tag)[inst] != .interned);
if (Air.refToIndex(ref) == null) return;
// Branches are currently only allowed to free locals allocated
// within their own branch.
// TODO: Upon branch consolidation free any locals if needed.
@ -1832,7 +1831,7 @@ fn buildPointerOffset(func: *CodeGen, ptr_value: WValue, offset: u64, action: en
fn genInst(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const air_tags = func.air.instructions.items(.tag);
return switch (air_tags[inst]) {
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.add => func.airBinOp(inst, .add),
.add_sat => func.airSatBinOp(inst, .add),

View File

@ -81,7 +81,7 @@ end_di_column: u32,
/// which is a relative jump, based on the address following the reloc.
exitlude_jump_relocs: std.ArrayListUnmanaged(Mir.Inst.Index) = .{},
const_tracking: InstTrackingMap = .{},
const_tracking: ConstTrackingMap = .{},
inst_tracking: InstTrackingMap = .{},
// Key is the block instruction
@ -403,6 +403,7 @@ pub const MCValue = union(enum) {
};
const InstTrackingMap = std.AutoArrayHashMapUnmanaged(Air.Inst.Index, InstTracking);
const ConstTrackingMap = std.AutoArrayHashMapUnmanaged(InternPool.Index, InstTracking);
const InstTracking = struct {
long: MCValue,
short: MCValue,
@ -1927,7 +1928,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.ptr_elem_val => try self.airPtrElemVal(inst),
.ptr_elem_ptr => try self.airPtrElemPtr(inst),
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.unreach => if (self.wantSafety()) try self.airTrap() else self.finishAirBookkeeping(),
.optional_payload => try self.airOptionalPayload(inst),
@ -2099,7 +2100,6 @@ fn feed(self: *Self, bt: *Liveness.BigTomb, operand: Air.Inst.Ref) void {
/// Asserts there is already capacity to insert into top branch inst_table.
fn processDeath(self: *Self, inst: Air.Inst.Index) void {
assert(self.air.instructions.items(.tag)[inst] != .interned);
self.inst_tracking.getPtr(inst).?.die(self, inst);
}
@ -2871,13 +2871,6 @@ fn activeIntBits(self: *Self, dst_air: Air.Inst.Ref) u16 {
const dst_info = dst_ty.intInfo(mod);
if (Air.refToIndex(dst_air)) |inst| {
switch (air_tag[inst]) {
.interned => {
const src_val = air_data[inst].interned.toValue();
var space: Value.BigIntSpace = undefined;
const src_int = src_val.toBigInt(&space, mod);
return @as(u16, @intCast(src_int.bitCountTwosComp())) +
@intFromBool(src_int.positive and dst_info.signedness == .signed);
},
.intcast => {
const src_ty = self.typeOf(air_data[inst].ty_op.operand);
const src_info = src_ty.intInfo(mod);
@ -2894,6 +2887,11 @@ fn activeIntBits(self: *Self, dst_air: Air.Inst.Ref) u16 {
},
else => {},
}
} else if (Air.refToInterned(dst_air)) |ip_index| {
var space: Value.BigIntSpace = undefined;
const src_int = ip_index.toValue().toBigInt(&space, mod);
return @as(u16, @intCast(src_int.bitCountTwosComp())) +
@intFromBool(src_int.positive and dst_info.signedness == .signed);
}
return dst_info.bits;
}
@ -11635,32 +11633,26 @@ fn resolveInst(self: *Self, ref: Air.Inst.Ref) InnerError!MCValue {
// If the type has no codegen bits, no need to store it.
if (!ty.hasRuntimeBitsIgnoreComptime(mod)) return .none;
if (Air.refToIndex(ref)) |inst| {
const mcv = switch (self.air.instructions.items(.tag)[inst]) {
.interned => tracking: {
const gop = try self.const_tracking.getOrPut(self.gpa, inst);
if (!gop.found_existing) gop.value_ptr.* = InstTracking.init(try self.genTypedValue(.{
.ty = ty,
.val = self.air.instructions.items(.data)[inst].interned.toValue(),
}));
break :tracking gop.value_ptr;
},
else => self.inst_tracking.getPtr(inst).?,
}.short;
switch (mcv) {
.none, .unreach, .dead => unreachable,
else => return mcv,
}
}
const mcv = if (Air.refToIndex(ref)) |inst| mcv: {
break :mcv self.inst_tracking.getPtr(inst).?.short;
} else mcv: {
const ip_index = Air.refToInterned(ref).?;
const gop = try self.const_tracking.getOrPut(self.gpa, ip_index);
if (!gop.found_existing) gop.value_ptr.* = InstTracking.init(try self.genTypedValue(.{
.ty = ty,
.val = ip_index.toValue(),
}));
break :mcv gop.value_ptr.short;
};
return self.genTypedValue(.{ .ty = ty, .val = (try self.air.value(ref, mod)).? });
switch (mcv) {
.none, .unreach, .dead => unreachable,
else => return mcv,
}
}
fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) *InstTracking {
const tracking = switch (self.air.instructions.items(.tag)[inst]) {
.interned => &self.const_tracking,
else => &self.inst_tracking,
}.getPtr(inst).?;
const tracking = self.inst_tracking.getPtr(inst).?;
return switch (tracking.short) {
.none, .unreach, .dead => unreachable,
else => tracking,

View File

@ -53,7 +53,7 @@ const BlockData = struct {
result: CValue,
};
pub const CValueMap = std.AutoHashMap(Air.Inst.Index, CValue);
pub const CValueMap = std.AutoHashMap(Air.Inst.Ref, CValue);
pub const LazyFnKey = union(enum) {
tag_name: Decl.Index,
@ -282,31 +282,29 @@ pub const Function = struct {
allocs: std.AutoArrayHashMapUnmanaged(LocalIndex, bool) = .{},
fn resolveInst(f: *Function, ref: Air.Inst.Ref) !CValue {
if (Air.refToIndex(ref)) |inst| {
const gop = try f.value_map.getOrPut(inst);
if (gop.found_existing) return gop.value_ptr.*;
const gop = try f.value_map.getOrPut(ref);
if (gop.found_existing) return gop.value_ptr.*;
const mod = f.object.dg.module;
const val = (try f.air.value(ref, mod)).?;
const ty = f.typeOf(ref);
const mod = f.object.dg.module;
const val = (try f.air.value(ref, mod)).?;
const ty = f.typeOf(ref);
const result: CValue = if (lowersToArray(ty, mod)) result: {
const writer = f.object.code_header.writer();
const alignment = 0;
const decl_c_value = try f.allocLocalValue(ty, alignment);
const gpa = f.object.dg.gpa;
try f.allocs.put(gpa, decl_c_value.new_local, false);
try writer.writeAll("static ");
try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, Const, alignment, .complete);
try writer.writeAll(" = ");
try f.object.dg.renderValue(writer, ty, val, .StaticInitializer);
try writer.writeAll(";\n ");
break :result decl_c_value;
} else .{ .constant = ref };
const result: CValue = if (lowersToArray(ty, mod)) result: {
const writer = f.object.code_header.writer();
const alignment = 0;
const decl_c_value = try f.allocLocalValue(ty, alignment);
const gpa = f.object.dg.gpa;
try f.allocs.put(gpa, decl_c_value.new_local, false);
try writer.writeAll("static ");
try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, Const, alignment, .complete);
try writer.writeAll(" = ");
try f.object.dg.renderValue(writer, ty, val, .StaticInitializer);
try writer.writeAll(";\n ");
break :result decl_c_value;
} else .{ .constant = ref };
gop.value_ptr.* = result;
return result;
} else return .{ .constant = ref };
gop.value_ptr.* = result;
return result;
}
fn wantSafety(f: *Function) bool {
@ -2823,7 +2821,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
const result_value = switch (air_tags[inst]) {
// zig fmt: off
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.arg => try airArg(f, inst),
@ -3091,7 +3089,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
if (result_value == .new_local) {
log.debug("map %{d} to t{d}", .{ inst, result_value.new_local });
}
try f.value_map.putNoClobber(inst, switch (result_value) {
try f.value_map.putNoClobber(Air.indexToRef(inst), switch (result_value) {
.none => continue,
.new_local => |i| .{ .local = i },
else => result_value,
@ -7439,7 +7437,7 @@ fn formatIntLiteral(
} else data.val.toBigInt(&int_buf, mod);
assert(int.fitsInTwosComp(data.int_info.signedness, data.int_info.bits));
const c_bits = @as(usize, @intCast(data.cty.byteSize(data.dg.ctypes.set, target) * 8));
const c_bits: usize = @intCast(data.cty.byteSize(data.dg.ctypes.set, target) * 8);
var one_limbs: [BigInt.calcLimbLen(1)]BigIntLimb = undefined;
const one = BigInt.Mutable.init(&one_limbs, 1).toConst();
@ -7745,8 +7743,7 @@ fn reap(f: *Function, inst: Air.Inst.Index, operands: []const Air.Inst.Ref) !voi
fn die(f: *Function, inst: Air.Inst.Index, ref: Air.Inst.Ref) !void {
const ref_inst = Air.refToIndex(ref) orelse return;
assert(f.air.instructions.items(.tag)[ref_inst] != .interned);
const c_value = (f.value_map.fetchRemove(ref_inst) orelse return).value;
const c_value = (f.value_map.fetchRemove(ref) orelse return).value;
const local_index = switch (c_value) {
.local, .new_local => |l| l,
else => return,

View File

@ -4557,7 +4557,7 @@ pub const FuncGen = struct {
.vector_store_elem => try self.airVectorStoreElem(inst),
.inferred_alloc, .inferred_alloc_comptime, .interned => unreachable,
.inferred_alloc, .inferred_alloc_comptime => unreachable,
.unreach => self.airUnreach(inst),
.dbg_stmt => self.airDbgStmt(inst),
@ -5762,19 +5762,22 @@ pub const FuncGen = struct {
return self.loadByRef(elem_ptr, elem_ty, elem_ty.abiAlignment(mod), false);
} else {
const lhs_index = Air.refToIndex(bin_op.lhs).?;
const elem_llvm_ty = try o.lowerType(elem_ty);
if (self.air.instructions.items(.tag)[lhs_index] == .load) {
const load_data = self.air.instructions.items(.data)[lhs_index];
const load_ptr = load_data.ty_op.operand;
const load_ptr_tag = self.air.instructions.items(.tag)[Air.refToIndex(load_ptr).?];
switch (load_ptr_tag) {
.struct_field_ptr, .struct_field_ptr_index_0, .struct_field_ptr_index_1, .struct_field_ptr_index_2, .struct_field_ptr_index_3 => {
const load_ptr_inst = try self.resolveInst(load_ptr);
const gep = self.builder.buildInBoundsGEP(array_llvm_ty, load_ptr_inst, &indices, indices.len, "");
return self.builder.buildLoad(elem_llvm_ty, gep, "");
},
else => {},
if (Air.refToIndex(bin_op.lhs)) |lhs_index| {
if (self.air.instructions.items(.tag)[lhs_index] == .load) {
const load_data = self.air.instructions.items(.data)[lhs_index];
const load_ptr = load_data.ty_op.operand;
if (Air.refToIndex(load_ptr)) |load_ptr_index| {
const load_ptr_tag = self.air.instructions.items(.tag)[load_ptr_index];
switch (load_ptr_tag) {
.struct_field_ptr, .struct_field_ptr_index_0, .struct_field_ptr_index_1, .struct_field_ptr_index_2, .struct_field_ptr_index_3 => {
const load_ptr_inst = try self.resolveInst(load_ptr);
const gep = self.builder.buildInBoundsGEP(array_llvm_ty, load_ptr_inst, &indices, indices.len, "");
return self.builder.buildLoad(elem_llvm_ty, gep, "");
},
else => {},
}
}
}
}
const elem_ptr = self.builder.buildInBoundsGEP(array_llvm_ty, array_llvm_val, &indices, indices.len, "");

View File

@ -49,8 +49,6 @@ pub fn write(stream: anytype, module: *Module, air: Air, liveness: ?Liveness) vo
.indent = 2,
.skip_body = false,
};
writer.writeAllConstants(stream) catch return;
stream.writeByte('\n') catch return;
writer.writeBody(stream, air.getMainBody()) catch return;
}
@ -88,15 +86,6 @@ const Writer = struct {
indent: usize,
skip_body: bool,
fn writeAllConstants(w: *Writer, s: anytype) @TypeOf(s).Error!void {
for (w.air.instructions.items(.tag), 0..) |tag, i| {
if (tag != .interned) continue;
const inst = @as(Air.Inst.Index, @intCast(i));
try w.writeInst(s, inst);
try s.writeByte('\n');
}
}
fn writeBody(w: *Writer, s: anytype, body: []const Air.Inst.Index) @TypeOf(s).Error!void {
for (body) |inst| {
try w.writeInst(s, inst);
@ -299,7 +288,6 @@ const Writer = struct {
.struct_field_val => try w.writeStructField(s, inst),
.inferred_alloc => @panic("TODO"),
.inferred_alloc_comptime => @panic("TODO"),
.interned => try w.writeInterned(s, inst),
.assembly => try w.writeAssembly(s, inst),
.dbg_stmt => try w.writeDbgStmt(s, inst),
@ -596,14 +584,6 @@ const Writer = struct {
try s.print(", {d}", .{extra.field_index});
}
fn writeInterned(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
const mod = w.module;
const ip_index = w.air.instructions.items(.data)[inst].interned;
const ty = mod.intern_pool.indexToKey(ip_index).typeOf().toType();
try w.writeType(s, ty);
try s.print(", {}", .{ip_index.toValue().fmtValue(ty, mod)});
}
fn writeAssembly(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
const ty_pl = w.air.instructions.items(.data)[inst].ty_pl;
const extra = w.air.extraData(Air.Asm, ty_pl.payload);
@ -956,13 +936,18 @@ const Writer = struct {
operand: Air.Inst.Ref,
dies: bool,
) @TypeOf(s).Error!void {
const i = @intFromEnum(operand);
if (i < InternPool.static_len) {
if (@intFromEnum(operand) < InternPool.static_len) {
return s.print("@{}", .{operand});
} else if (Air.refToInterned(operand)) |ip_index| {
const mod = w.module;
const ty = mod.intern_pool.indexToKey(ip_index).typeOf().toType();
try s.print("<{}, {}>", .{
ty.fmt(mod),
ip_index.toValue().fmtValue(ty, mod),
});
} else {
return w.writeInstIndex(s, Air.refToIndex(operand).?, dies);
}
return w.writeInstIndex(s, i - InternPool.static_len, dies);
}
fn writeInstIndex(