From 884d957b6c291961536c10401f60264da26cba30 Mon Sep 17 00:00:00 2001 From: mlugg Date: Tue, 26 Mar 2024 00:05:08 +0000 Subject: [PATCH] compiler: eliminate legacy Value representation Good riddance! Most of these changes are trivial. There's a fix for a minor bug this exposed in `Value.readFromPackedMemory`, but aside from that, it's all just things like changing `intern` calls to `toIntern`. --- src/Module.zig | 11 +- src/Sema.zig | 116 ++---- src/TypedValue.zig | 619 ++++++++++++---------------- src/Value.zig | 779 ++++++++---------------------------- src/arch/wasm/CodeGen.zig | 2 +- src/arch/x86_64/CodeGen.zig | 2 +- src/codegen.zig | 2 +- src/codegen/c.zig | 4 +- src/codegen/llvm.zig | 2 +- src/type.zig | 2 +- 10 files changed, 479 insertions(+), 1060 deletions(-) diff --git a/src/Module.zig b/src/Module.zig index a11194c103..4391472fa5 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -497,13 +497,6 @@ pub const Decl = struct { }; } - pub fn internValue(decl: *Decl, zcu: *Zcu) Allocator.Error!InternPool.Index { - assert(decl.has_tv); - const ip_index = try decl.val.intern(decl.typeOf(zcu), zcu); - decl.val = Value.fromInterned(ip_index); - return ip_index; - } - pub fn isFunction(decl: Decl, zcu: *const Zcu) !bool { const tv = try decl.typedValue(zcu); return tv.ty.zigTypeTag(zcu) == .Fn; @@ -3763,7 +3756,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !SemaDeclResult { }, } - decl.val = Value.fromInterned((try decl_tv.val.intern(decl_tv.ty, mod))); + decl.val = decl_tv.val; // Function linksection, align, and addrspace were already set by Sema if (!is_func) { decl.alignment = blk: { @@ -5624,8 +5617,6 @@ pub fn markDeclAlive(mod: *Module, decl: *Decl) Allocator.Error!void { if (decl.alive) return; decl.alive = true; - _ = try decl.internValue(mod); - // This is the first time we are marking this Decl alive. We must // therefore recurse into its value and mark any Decl it references // as also alive, so that any Decl referenced does not get garbage collected. diff --git a/src/Sema.zig b/src/Sema.zig index 7db1462787..0d387faacf 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -7835,7 +7835,7 @@ fn analyzeCall( if (is_comptime_call) { const result_val = try sema.resolveConstValue(block, .unneeded, result, undefined); - const result_interned = try result_val.intern2(sema.fn_ret_ty, mod); + const result_interned = result_val.toIntern(); // Transform ad-hoc inferred error set types into concrete error sets. const result_transformed = try sema.resolveAdHocInferredErrorSet(block, call_src, result_interned); @@ -7856,8 +7856,7 @@ fn analyzeCall( } if (try sema.resolveValue(result)) |result_val| { - const result_interned = try result_val.intern2(sema.fn_ret_ty, mod); - const result_transformed = try sema.resolveAdHocInferredErrorSet(block, call_src, result_interned); + const result_transformed = try sema.resolveAdHocInferredErrorSet(block, call_src, result_val.toIntern()); break :res2 Air.internedToRef(result_transformed); } @@ -8042,7 +8041,7 @@ fn analyzeInlineCallArg( // when the hash function is called. const resolved_arg_val = try ics.caller().resolveLazyValue(arg_val); should_memoize.* = should_memoize.* and !resolved_arg_val.canMutateComptimeVarState(mod); - memoized_arg_values[arg_i.*] = try resolved_arg_val.intern(Type.fromInterned(param_ty), mod); + memoized_arg_values[arg_i.*] = resolved_arg_val.toIntern(); } else { ics.callee().inst_map.putAssumeCapacityNoClobber(inst, casted_arg); } @@ -8081,7 +8080,7 @@ fn analyzeInlineCallArg( // when the hash function is called. const resolved_arg_val = try ics.caller().resolveLazyValue(arg_val); should_memoize.* = should_memoize.* and !resolved_arg_val.canMutateComptimeVarState(mod); - memoized_arg_values[arg_i.*] = try resolved_arg_val.intern(ics.caller().typeOf(uncasted_arg), mod); + memoized_arg_values[arg_i.*] = resolved_arg_val.toIntern(); } else { if (zir_tags[@intFromEnum(inst)] == .param_anytype_comptime) { _ = try ics.caller().resolveConstValue(arg_block, arg_src, uncasted_arg, .{ @@ -14270,7 +14269,7 @@ fn zirBitNot(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. const elems = try sema.arena.alloc(InternPool.Index, vec_len); for (elems, 0..) |*elem, i| { const elem_val = try val.elemValue(mod, i); - elem.* = try (try elem_val.bitwiseNot(scalar_type, sema.arena, mod)).intern(scalar_type, mod); + elem.* = (try elem_val.bitwiseNot(scalar_type, sema.arena, mod)).toIntern(); } return Air.internedToRef((try mod.intern(.{ .aggregate = .{ .ty = operand_type.toIntern(), @@ -14521,7 +14520,7 @@ fn zirArrayCat(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai } }; const coerced_elem_val_inst = try sema.coerce(block, resolved_elem_ty, elem_val_inst, operand_src); const coerced_elem_val = try sema.resolveConstValue(block, operand_src, coerced_elem_val_inst, undefined); - element_vals[elem_i] = try coerced_elem_val.intern(resolved_elem_ty, mod); + element_vals[elem_i] = coerced_elem_val.toIntern(); } while (elem_i < result_len) : (elem_i += 1) { const rhs_elem_i = elem_i - lhs_len; @@ -14534,7 +14533,7 @@ fn zirArrayCat(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai } }; const coerced_elem_val_inst = try sema.coerce(block, resolved_elem_ty, elem_val_inst, operand_src); const coerced_elem_val = try sema.resolveConstValue(block, operand_src, coerced_elem_val_inst, undefined); - element_vals[elem_i] = try coerced_elem_val.intern(resolved_elem_ty, mod); + element_vals[elem_i] = coerced_elem_val.toIntern(); } return sema.addConstantMaybeRef(try mod.intern(.{ .aggregate = .{ .ty = result_ty.toIntern(), @@ -15813,7 +15812,7 @@ fn intRem( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try sema.intRemScalar(lhs_elem, rhs_elem, scalar_ty)).intern(scalar_ty, mod); + scalar.* = (try sema.intRemScalar(lhs_elem, rhs_elem, scalar_ty)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -17753,7 +17752,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai const info = ty.intInfo(mod); const field_values = .{ // signedness: Signedness, - try (try mod.enumValueFieldIndex(signedness_ty, @intFromEnum(info.signedness))).intern(signedness_ty, mod), + (try mod.enumValueFieldIndex(signedness_ty, @intFromEnum(info.signedness))).toIntern(), // bits: u16, (try mod.intValue(Type.u16, info.bits)).toIntern(), }; @@ -17823,7 +17822,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai const field_values = .{ // size: Size, - try (try mod.enumValueFieldIndex(ptr_size_ty, @intFromEnum(info.flags.size))).intern(ptr_size_ty, mod), + (try mod.enumValueFieldIndex(ptr_size_ty, @intFromEnum(info.flags.size))).toIntern(), // is_const: bool, Value.makeBool(info.flags.is_const).toIntern(), // is_volatile: bool, @@ -17831,7 +17830,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai // alignment: comptime_int, alignment.toIntern(), // address_space: AddressSpace - try (try mod.enumValueFieldIndex(addrspace_ty, @intFromEnum(info.flags.address_space))).intern(addrspace_ty, mod), + (try mod.enumValueFieldIndex(addrspace_ty, @intFromEnum(info.flags.address_space))).toIntern(), // child: type, info.child, // is_allowzero: bool, @@ -19975,8 +19974,8 @@ fn unionInit( const tag_val = try mod.enumValueFieldIndex(tag_ty, field_index); return Air.internedToRef((try mod.intern(.{ .un = .{ .ty = union_ty.toIntern(), - .tag = try tag_val.intern(tag_ty, mod), - .val = try init_val.intern(field_ty, mod), + .tag = tag_val.toIntern(), + .val = init_val.toIntern(), } }))); } @@ -20099,8 +20098,8 @@ fn zirStructInit( if (try sema.resolveValue(init_inst)) |val| { const struct_val = Value.fromInterned((try mod.intern(.{ .un = .{ .ty = resolved_ty.toIntern(), - .tag = try tag_val.intern(tag_ty, mod), - .val = try val.intern(field_ty, mod), + .tag = tag_val.toIntern(), + .val = val.toIntern(), } }))); const final_val_inst = try sema.coerce(block, result_ty, Air.internedToRef(struct_val.toIntern()), src); const final_val = (try sema.resolveValue(final_val_inst)).?; @@ -20400,7 +20399,7 @@ fn structInitAnon( return sema.failWithOwnedErrorMsg(block, msg); } if (try sema.resolveValue(init)) |init_val| { - field_val.* = try init_val.intern(Type.fromInterned(field_ty.*), mod); + field_val.* = init_val.toIntern(); } else { field_val.* = .none; runtime_index = @intCast(i_usize); @@ -20577,13 +20576,9 @@ fn zirArrayInit( const runtime_index = opt_runtime_index orelse { const elem_vals = try sema.arena.alloc(InternPool.Index, resolved_args.len); - for (elem_vals, resolved_args, 0..) |*val, arg, i| { - const elem_ty = if (is_tuple) - array_ty.structFieldType(i, mod) - else - array_ty.elemType2(mod); + for (elem_vals, resolved_args) |*val, arg| { // We checked that all args are comptime above. - val.* = try ((sema.resolveValue(arg) catch unreachable).?).intern(elem_ty, mod); + val.* = (sema.resolveValue(arg) catch unreachable).?.toIntern(); } const arr_val = try mod.intern(.{ .aggregate = .{ .ty = array_ty.toIntern(), @@ -20998,7 +20993,7 @@ fn maybeConstantUnaryMath( const elems = try sema.arena.alloc(InternPool.Index, vec_len); for (elems, 0..) |*elem, i| { const elem_val = try val.elemValue(sema.mod, i); - elem.* = try (try eval(elem_val, scalar_ty, sema.arena, sema.mod)).intern(scalar_ty, mod); + elem.* = (try eval(elem_val, scalar_ty, sema.arena, sema.mod)).toIntern(); } return Air.internedToRef((try mod.intern(.{ .aggregate = .{ .ty = result_ty.toIntern(), @@ -23216,7 +23211,8 @@ fn zirTruncate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai const elems = try sema.arena.alloc(InternPool.Index, operand_ty.vectorLen(mod)); for (elems, 0..) |*elem, i| { const elem_val = try val.elemValue(mod, i); - elem.* = try (try elem_val.intTrunc(operand_scalar_ty, sema.arena, dest_info.signedness, dest_info.bits, mod)).intern(dest_scalar_ty, mod); + const uncoerced_elem = try elem_val.intTrunc(operand_scalar_ty, sema.arena, dest_info.signedness, dest_info.bits, mod); + elem.* = (try mod.getCoerced(uncoerced_elem, dest_scalar_ty)).toIntern(); } return Air.internedToRef((try mod.intern(.{ .aggregate = .{ .ty = dest_ty.toIntern(), @@ -23330,7 +23326,7 @@ fn zirByteSwap(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai const elems = try sema.arena.alloc(InternPool.Index, vec_len); for (elems, 0..) |*elem, i| { const elem_val = try val.elemValue(mod, i); - elem.* = try (try elem_val.byteSwap(scalar_ty, mod, sema.arena)).intern(scalar_ty, mod); + elem.* = (try elem_val.byteSwap(scalar_ty, mod, sema.arena)).toIntern(); } return Air.internedToRef((try mod.intern(.{ .aggregate = .{ .ty = operand_ty.toIntern(), @@ -23378,7 +23374,7 @@ fn zirBitReverse(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError! const elems = try sema.arena.alloc(InternPool.Index, vec_len); for (elems, 0..) |*elem, i| { const elem_val = try val.elemValue(mod, i); - elem.* = try (try elem_val.bitReverse(scalar_ty, mod, sema.arena)).intern(scalar_ty, mod); + elem.* = (try elem_val.bitReverse(scalar_ty, mod, sema.arena)).toIntern(); } return Air.internedToRef((try mod.intern(.{ .aggregate = .{ .ty = operand_ty.toIntern(), @@ -24311,7 +24307,7 @@ fn analyzeShuffle( } const int = mask_elem_val.toSignedInt(mod); const unsigned: u32 = @intCast(if (int >= 0) int else ~int); - values[i] = try (try (if (int >= 0) a_val else b_val).elemValue(mod, unsigned)).intern(elem_ty, mod); + values[i] = (try (if (int >= 0) a_val else b_val).elemValue(mod, unsigned)).toIntern(); } return Air.internedToRef((try mod.intern(.{ .aggregate = .{ .ty = res_ty.toIntern(), @@ -24417,7 +24413,7 @@ fn zirSelect(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) C for (elems, 0..) |*elem, i| { const pred_elem_val = try pred_val.elemValue(mod, i); const should_choose_a = pred_elem_val.toBool(); - elem.* = try (try (if (should_choose_a) a_val else b_val).elemValue(mod, i)).intern(elem_ty, mod); + elem.* = (try (if (should_choose_a) a_val else b_val).elemValue(mod, i)).toIntern(); } return Air.internedToRef((try mod.intern(.{ .aggregate = .{ @@ -27789,7 +27785,7 @@ fn structFieldPtrByIndex( const val = try mod.intern(.{ .ptr = .{ .ty = ptr_field_ty.toIntern(), .addr = .{ .field = .{ - .base = try struct_ptr_val.intern(struct_ptr_ty, mod), + .base = struct_ptr_val.toIntern(), .index = field_index, } }, } }); @@ -31611,7 +31607,7 @@ fn coerceCompatiblePtrs( } // The comptime Value representation is compatible with both types. return Air.internedToRef( - (try mod.getCoerced(Value.fromInterned((try val.intern(inst_ty, mod))), dest_ty)).toIntern(), + (try mod.getCoerced(val, dest_ty)).toIntern(), ); } try sema.requireRuntimeBlock(block, inst_src, null); @@ -31948,7 +31944,7 @@ fn coerceArrayLike( ref.* = coerced; if (runtime_src == null) { if (try sema.resolveValue(coerced)) |elem_val| { - val.* = try elem_val.intern(dest_elem_ty, mod); + val.* = elem_val.toIntern(); } else { runtime_src = elem_src; } @@ -32013,7 +32009,7 @@ fn coerceTupleToArray( ref.* = coerced; if (runtime_src == null) { if (try sema.resolveValue(coerced)) |elem_val| { - val.* = try elem_val.intern(dest_elem_ty, mod); + val.* = elem_val.toIntern(); } else { runtime_src = elem_src; } @@ -33250,10 +33246,7 @@ fn analyzeSlice( }; if (!new_ptr_val.isUndef(mod)) { - return Air.internedToRef((try mod.getCoerced( - Value.fromInterned((try new_ptr_val.intern(new_ptr_ty, mod))), - return_ty, - )).toIntern()); + return Air.internedToRef((try mod.getCoerced(new_ptr_val, return_ty)).toIntern()); } // Special case: @as([]i32, undefined)[x..x] @@ -33765,7 +33758,7 @@ fn wrapErrorUnionPayload( if (try sema.resolveValue(coerced)) |val| { return Air.internedToRef((try mod.intern(.{ .error_union = .{ .ty = dest_ty.toIntern(), - .val = .{ .payload = try val.intern(dest_payload_ty, mod) }, + .val = .{ .payload = val.toIntern() }, } }))); } try sema.requireRuntimeBlock(block, inst_src, null); @@ -36941,15 +36934,14 @@ fn semaStructFieldInits( }); }; - const field_init = try default_val.intern(field_ty, mod); - if (Value.fromInterned(field_init).canMutateComptimeVarState(mod)) { + if (Value.fromInterned(default_val.toIntern()).canMutateComptimeVarState(mod)) { const init_src = mod.fieldSrcLoc(decl_index, .{ .index = field_i, .range = .value, }).lazy; return sema.fail(&block_scope, init_src, "field default value contains reference to comptime-mutable memory", .{}); } - struct_type.field_inits.get(ip)[field_i] = field_init; + struct_type.field_inits.get(ip)[field_i] = default_val.toIntern(); } } } @@ -37756,7 +37748,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value { return sema.failWithOwnedErrorMsg(null, msg); } if (try sema.typeHasOnePossibleValue(field_ty)) |field_opv| { - field_val.* = try field_opv.intern(field_ty, mod); + field_val.* = field_opv.toIntern(); } else return null; } @@ -38310,7 +38302,7 @@ fn intAddInner(sema: *Sema, lhs: Value, rhs: Value, ty: Type, overflow_idx: *usi }, else => |e| return e, }; - scalar.* = try val.intern(scalar_ty, mod); + scalar.* = val.toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -38400,7 +38392,7 @@ fn intSubInner(sema: *Sema, lhs: Value, rhs: Value, ty: Type, overflow_idx: *usi }, else => |e| return e, }; - scalar.* = try val.intern(scalar_ty, mod); + scalar.* = val.toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -38470,8 +38462,8 @@ fn intSubWithOverflow( const lhs_elem = try lhs.elemValue(sema.mod, i); const rhs_elem = try rhs.elemValue(sema.mod, i); const of_math_result = try sema.intSubWithOverflowScalar(lhs_elem, rhs_elem, scalar_ty); - of.* = try of_math_result.overflow_bit.intern(Type.u1, mod); - scalar.* = try of_math_result.wrapped_result.intern(scalar_ty, mod); + of.* = of_math_result.overflow_bit.toIntern(); + scalar.* = of_math_result.wrapped_result.toIntern(); } return Value.OverflowArithmeticResult{ .overflow_bit = Value.fromInterned((try mod.intern(.{ .aggregate = .{ @@ -38528,10 +38520,9 @@ fn intFromFloat( if (float_ty.zigTypeTag(mod) == .Vector) { const elem_ty = float_ty.scalarType(mod); const result_data = try sema.arena.alloc(InternPool.Index, float_ty.vectorLen(mod)); - const scalar_ty = int_ty.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(sema.mod, i); - scalar.* = try (try sema.intFromFloatScalar(block, src, elem_val, elem_ty, int_ty.scalarType(mod), mode)).intern(scalar_ty, mod); + scalar.* = (try sema.intFromFloatScalar(block, src, elem_val, elem_ty, int_ty.scalarType(mod), mode)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = int_ty.toIntern(), @@ -38724,8 +38715,8 @@ fn intAddWithOverflow( const lhs_elem = try lhs.elemValue(sema.mod, i); const rhs_elem = try rhs.elemValue(sema.mod, i); const of_math_result = try sema.intAddWithOverflowScalar(lhs_elem, rhs_elem, scalar_ty); - of.* = try of_math_result.overflow_bit.intern(Type.u1, mod); - scalar.* = try of_math_result.wrapped_result.intern(scalar_ty, mod); + of.* = of_math_result.overflow_bit.toIntern(); + scalar.* = of_math_result.wrapped_result.toIntern(); } return Value.OverflowArithmeticResult{ .overflow_bit = Value.fromInterned((try mod.intern(.{ .aggregate = .{ @@ -38835,7 +38826,7 @@ fn compareVector( const lhs_elem = try lhs.elemValue(sema.mod, i); const rhs_elem = try rhs.elemValue(sema.mod, i); const res_bool = try sema.compareScalar(lhs_elem, op, rhs_elem, ty.scalarType(mod)); - scalar.* = try Value.makeBool(res_bool).intern(Type.bool, mod); + scalar.* = Value.makeBool(res_bool).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = (try mod.vectorType(.{ .len = ty.vectorLen(mod), .child = .bool_type })).toIntern(), @@ -39014,29 +39005,6 @@ fn validateRuntimeValue(sema: *Sema, block: *Block, val_src: LazySrcLoc, val: Ai /// Returns true if any value contained in `val` is undefined. fn anyUndef(sema: *Sema, val: Value) !bool { const mod = sema.mod; - if (val.ip_index == .none) return switch (val.tag()) { - .eu_payload => try sema.anyUndef(val.castTag(.eu_payload).?.data), - .opt_payload => try sema.anyUndef(val.castTag(.opt_payload).?.data), - .repeated => try sema.anyUndef(val.castTag(.repeated).?.data), - .slice => { - const slice = val.castTag(.slice).?.data; - for (0..@intCast(slice.len.toUnsignedInt(mod))) |idx| { - if (try sema.anyUndef((try slice.ptr.maybeElemValueFull(sema, mod, idx)).?)) return true; - } - return false; - }, - .bytes => false, - .aggregate => for (val.castTag(.aggregate).?.data) |elem| { - if (try sema.anyUndef(elem)) break true; - } else false, - .@"union" => { - const un = val.castTag(.@"union").?.data; - if (un.tag) |t| { - if (try sema.anyUndef(t)) return true; - } - return sema.anyUndef(un.val); - }, - }; return switch (val.toIntern()) { .undef => true, else => switch (mod.intern_pool.indexToKey(val.toIntern())) { diff --git a/src/TypedValue.zig b/src/TypedValue.zig index 5d9c062417..4679ce20ee 100644 --- a/src/TypedValue.zig +++ b/src/TypedValue.zig @@ -77,386 +77,275 @@ pub fn print( var val = tv.val; var ty = tv.ty; const ip = &mod.intern_pool; - while (true) switch (val.ip_index) { - .none => switch (val.tag()) { - .aggregate => return printAggregate(ty, val, writer, level, mod), - .@"union" => { - if (level == 0) { - return writer.writeAll(".{ ... }"); + while (true) switch (ip.indexToKey(val.toIntern())) { + .int_type, + .ptr_type, + .array_type, + .vector_type, + .opt_type, + .anyframe_type, + .error_union_type, + .simple_type, + .struct_type, + .anon_struct_type, + .union_type, + .opaque_type, + .enum_type, + .func_type, + .error_set_type, + .inferred_error_set_type, + => return Type.print(val.toType(), writer, mod), + .undef => return writer.writeAll("undefined"), + .simple_value => |simple_value| switch (simple_value) { + .void => return writer.writeAll("{}"), + .empty_struct => return printAggregate(ty, val, writer, level, mod), + .generic_poison => return writer.writeAll("(generic poison)"), + else => return writer.writeAll(@tagName(simple_value)), + }, + .variable => return writer.writeAll("(variable)"), + .extern_func => |extern_func| return writer.print("(extern function '{}')", .{ + mod.declPtr(extern_func.decl).name.fmt(ip), + }), + .func => |func| return writer.print("(function '{}')", .{ + mod.declPtr(func.owner_decl).name.fmt(ip), + }), + .int => |int| switch (int.storage) { + inline .u64, .i64, .big_int => |x| return writer.print("{}", .{x}), + .lazy_align => |lazy_ty| return writer.print("{d}", .{ + Type.fromInterned(lazy_ty).abiAlignment(mod), + }), + .lazy_size => |lazy_ty| return writer.print("{d}", .{ + Type.fromInterned(lazy_ty).abiSize(mod), + }), + }, + .err => |err| return writer.print("error.{}", .{ + err.name.fmt(ip), + }), + .error_union => |error_union| switch (error_union.val) { + .err_name => |err_name| return writer.print("error.{}", .{ + err_name.fmt(ip), + }), + .payload => |payload| { + val = Value.fromInterned(payload); + ty = ty.errorUnionPayload(mod); + }, + }, + .enum_literal => |enum_literal| return writer.print(".{}", .{ + enum_literal.fmt(ip), + }), + .enum_tag => |enum_tag| { + if (level == 0) { + return writer.writeAll("(enum)"); + } + const enum_type = ip.loadEnumType(ty.toIntern()); + if (enum_type.tagValueIndex(ip, val.toIntern())) |tag_index| { + try writer.print(".{i}", .{enum_type.names.get(ip)[tag_index].fmt(ip)}); + return; + } + try writer.writeAll("@enumFromInt("); + try print(.{ + .ty = Type.fromInterned(ip.typeOf(enum_tag.int)), + .val = Value.fromInterned(enum_tag.int), + }, writer, level - 1, mod); + try writer.writeAll(")"); + return; + }, + .empty_enum_value => return writer.writeAll("(empty enum value)"), + .float => |float| switch (float.storage) { + inline else => |x| return writer.print("{d}", .{@as(f64, @floatCast(x))}), + }, + .slice => |slice| { + const ptr_ty = switch (ip.indexToKey(slice.ptr)) { + .ptr => |ptr| ty: { + if (ptr.addr == .int) return print(.{ + .ty = Type.fromInterned(ptr.ty), + .val = Value.fromInterned(slice.ptr), + }, writer, level - 1, mod); + break :ty ip.indexToKey(ptr.ty).ptr_type; + }, + .undef => |ptr_ty| ip.indexToKey(ptr_ty).ptr_type, + else => unreachable, + }; + if (level == 0) { + return writer.writeAll(".{ ... }"); + } + const elem_ty = Type.fromInterned(ptr_ty.child); + const len = Value.fromInterned(slice.len).toUnsignedInt(mod); + if (elem_ty.eql(Type.u8, mod)) str: { + const max_len = @min(len, max_string_len); + var buf: [max_string_len]u8 = undefined; + for (buf[0..max_len], 0..) |*c, i| { + const maybe_elem = try val.maybeElemValue(mod, i); + const elem = maybe_elem orelse return writer.writeAll(".{ (reinterpreted data) }"); + if (elem.isUndef(mod)) break :str; + c.* = @as(u8, @intCast(elem.toUnsignedInt(mod))); } - const payload = val.castTag(.@"union").?.data; - try writer.writeAll(".{ "); - - if (payload.tag) |tag| { + const truncated = if (len > max_string_len) " (truncated)" else ""; + return writer.print("\"{}{s}\"", .{ std.zig.fmtEscapes(buf[0..max_len]), truncated }); + } + try writer.writeAll(".{ "); + const max_len = @min(len, max_aggregate_items); + for (0..max_len) |i| { + if (i != 0) try writer.writeAll(", "); + const maybe_elem = try val.maybeElemValue(mod, i); + const elem = maybe_elem orelse return writer.writeAll("(reinterpreted data) }"); + try print(.{ + .ty = elem_ty, + .val = elem, + }, writer, level - 1, mod); + } + if (len > max_aggregate_items) { + try writer.writeAll(", ..."); + } + return writer.writeAll(" }"); + }, + .ptr => |ptr| { + switch (ptr.addr) { + .decl => |decl_index| { + const decl = mod.declPtr(decl_index); + if (level == 0) return writer.print("(decl '{}')", .{decl.name.fmt(ip)}); + return print(.{ + .ty = decl.typeOf(mod), + .val = decl.val, + }, writer, level - 1, mod); + }, + .anon_decl => |anon_decl| { + const decl_val = anon_decl.val; + if (level == 0) return writer.print("(anon decl '{d}')", .{ + @intFromEnum(decl_val), + }); + return print(.{ + .ty = Type.fromInterned(ip.typeOf(decl_val)), + .val = Value.fromInterned(decl_val), + }, writer, level - 1, mod); + }, + .comptime_alloc => { + // TODO: we need a Sema to print this! + return writer.writeAll("(comptime alloc)"); + }, + .comptime_field => |field_val_ip| { + return print(.{ + .ty = Type.fromInterned(ip.typeOf(field_val_ip)), + .val = Value.fromInterned(field_val_ip), + }, writer, level - 1, mod); + }, + .int => |int_ip| { + try writer.writeAll("@ptrFromInt("); try print(.{ - .ty = Type.fromInterned(ip.loadUnionType(ty.toIntern()).enum_tag_ty), - .val = tag, + .ty = Type.usize, + .val = Value.fromInterned(int_ip), + }, writer, level - 1, mod); + try writer.writeByte(')'); + }, + .eu_payload => |eu_ip| { + try writer.writeAll("(payload of "); + try print(.{ + .ty = Type.fromInterned(ip.typeOf(eu_ip)), + .val = Value.fromInterned(eu_ip), + }, writer, level - 1, mod); + try writer.writeAll(")"); + }, + .opt_payload => |opt_ip| { + try print(.{ + .ty = Type.fromInterned(ip.typeOf(opt_ip)), + .val = Value.fromInterned(opt_ip), + }, writer, level - 1, mod); + try writer.writeAll(".?"); + }, + .elem => |elem| { + if (level == 0) { + try writer.writeAll("(...)"); + } else { + try print(.{ + .ty = Type.fromInterned(ip.typeOf(elem.base)), + .val = Value.fromInterned(elem.base), + }, writer, level - 1, mod); + } + try writer.print("[{}]", .{elem.index}); + }, + .field => |field| { + const ptr_container_ty = Type.fromInterned(ip.typeOf(field.base)); + if (level == 0) { + try writer.writeAll("(...)"); + } else { + try print(.{ + .ty = ptr_container_ty, + .val = Value.fromInterned(field.base), + }, writer, level - 1, mod); + } + + const container_ty = ptr_container_ty.childType(mod); + switch (container_ty.zigTypeTag(mod)) { + .Struct => { + if (container_ty.structFieldName(@intCast(field.index), mod).unwrap()) |field_name| { + try writer.print(".{i}", .{field_name.fmt(ip)}); + } else { + try writer.print("[{d}]", .{field.index}); + } + }, + .Union => { + const field_name = mod.typeToUnion(container_ty).?.loadTagType(ip).names.get(ip)[@intCast(field.index)]; + try writer.print(".{i}", .{field_name.fmt(ip)}); + }, + .Pointer => { + std.debug.assert(container_ty.isSlice(mod)); + try writer.writeAll(switch (field.index) { + Value.slice_ptr_index => ".ptr", + Value.slice_len_index => ".len", + else => unreachable, + }); + }, + else => unreachable, + } + }, + } + return; + }, + .opt => |opt| switch (opt.val) { + .none => return writer.writeAll("null"), + else => |payload| { + val = Value.fromInterned(payload); + ty = ty.optionalChild(mod); + }, + }, + .aggregate => |aggregate| switch (aggregate.storage) { + .bytes => |bytes| { + // Strip the 0 sentinel off of strings before printing + const zero_sent = blk: { + const sent = ty.sentinel(mod) orelse break :blk false; + break :blk sent.eql(Value.zero_u8, Type.u8, mod); + }; + const str = if (zero_sent) bytes[0 .. bytes.len - 1] else bytes; + return writer.print("\"{}\"", .{std.zig.fmtEscapes(str)}); + }, + .elems, .repeated_elem => return printAggregate(ty, val, writer, level, mod), + }, + .un => |un| { + try writer.writeAll(".{ "); + if (level > 0) { + if (un.tag != .none) { + try print(.{ + .ty = ty.unionTagTypeHypothetical(mod), + .val = Value.fromInterned(un.tag), }, writer, level - 1, mod); try writer.writeAll(" = "); - const field_ty = ty.unionFieldType(tag, mod).?; + const field_ty = ty.unionFieldType(Value.fromInterned(un.tag), mod).?; try print(.{ .ty = field_ty, - .val = payload.val, + .val = Value.fromInterned(un.val), }, writer, level - 1, mod); } else { try writer.writeAll("(unknown tag) = "); const backing_ty = try ty.unionBackingType(mod); try print(.{ .ty = backing_ty, - .val = payload.val, + .val = Value.fromInterned(un.val), }, writer, level - 1, mod); } - - return writer.writeAll(" }"); - }, - .bytes => return writer.print("\"{}\"", .{std.zig.fmtEscapes(val.castTag(.bytes).?.data)}), - .repeated => { - if (level == 0) { - return writer.writeAll(".{ ... }"); - } - var i: u32 = 0; - try writer.writeAll(".{ "); - const elem_tv = TypedValue{ - .ty = ty.elemType2(mod), - .val = val.castTag(.repeated).?.data, - }; - const len = ty.arrayLen(mod); - const max_len = @min(len, max_aggregate_items); - while (i < max_len) : (i += 1) { - if (i != 0) try writer.writeAll(", "); - try print(elem_tv, writer, level - 1, mod); - } - if (len > max_aggregate_items) { - try writer.writeAll(", ..."); - } - return writer.writeAll(" }"); - }, - .slice => { - if (level == 0) { - return writer.writeAll(".{ ... }"); - } - const payload = val.castTag(.slice).?.data; - const elem_ty = ty.elemType2(mod); - const len = payload.len.toUnsignedInt(mod); - - if (elem_ty.eql(Type.u8, mod)) str: { - const max_len: usize = @min(len, max_string_len); - var buf: [max_string_len]u8 = undefined; - - var i: u32 = 0; - while (i < max_len) : (i += 1) { - const maybe_elem_val = payload.ptr.maybeElemValue(mod, i) catch |err| switch (err) { - error.OutOfMemory => @panic("OOM"), // TODO: eliminate this panic - }; - const elem_val = maybe_elem_val orelse return writer.writeAll(".{ (reinterpreted data) }"); - if (elem_val.isUndef(mod)) break :str; - buf[i] = std.math.cast(u8, elem_val.toUnsignedInt(mod)) orelse break :str; - } - - // TODO would be nice if this had a bit of unicode awareness. - const truncated = if (len > max_string_len) " (truncated)" else ""; - return writer.print("\"{}{s}\"", .{ std.zig.fmtEscapes(buf[0..max_len]), truncated }); - } - - try writer.writeAll(".{ "); - - const max_len = @min(len, max_aggregate_items); - var i: u32 = 0; - while (i < max_len) : (i += 1) { - if (i != 0) try writer.writeAll(", "); - const maybe_elem_val = payload.ptr.maybeElemValue(mod, i) catch |err| switch (err) { - error.OutOfMemory => @panic("OOM"), // TODO: eliminate this panic - }; - const elem_val = maybe_elem_val orelse return writer.writeAll("(reinterpreted data) }"); - try print(.{ - .ty = elem_ty, - .val = elem_val, - }, writer, level - 1, mod); - } - if (len > max_aggregate_items) { - try writer.writeAll(", ..."); - } - return writer.writeAll(" }"); - }, - .eu_payload => { - val = val.castTag(.eu_payload).?.data; - ty = ty.errorUnionPayload(mod); - }, - .opt_payload => { - val = val.castTag(.opt_payload).?.data; - ty = ty.optionalChild(mod); - }, - }, - else => switch (ip.indexToKey(val.toIntern())) { - .int_type, - .ptr_type, - .array_type, - .vector_type, - .opt_type, - .anyframe_type, - .error_union_type, - .simple_type, - .struct_type, - .anon_struct_type, - .union_type, - .opaque_type, - .enum_type, - .func_type, - .error_set_type, - .inferred_error_set_type, - => return Type.print(val.toType(), writer, mod), - .undef => return writer.writeAll("undefined"), - .simple_value => |simple_value| switch (simple_value) { - .void => return writer.writeAll("{}"), - .empty_struct => return printAggregate(ty, val, writer, level, mod), - .generic_poison => return writer.writeAll("(generic poison)"), - else => return writer.writeAll(@tagName(simple_value)), - }, - .variable => return writer.writeAll("(variable)"), - .extern_func => |extern_func| return writer.print("(extern function '{}')", .{ - mod.declPtr(extern_func.decl).name.fmt(ip), - }), - .func => |func| return writer.print("(function '{}')", .{ - mod.declPtr(func.owner_decl).name.fmt(ip), - }), - .int => |int| switch (int.storage) { - inline .u64, .i64, .big_int => |x| return writer.print("{}", .{x}), - .lazy_align => |lazy_ty| return writer.print("{d}", .{ - Type.fromInterned(lazy_ty).abiAlignment(mod), - }), - .lazy_size => |lazy_ty| return writer.print("{d}", .{ - Type.fromInterned(lazy_ty).abiSize(mod), - }), - }, - .err => |err| return writer.print("error.{}", .{ - err.name.fmt(ip), - }), - .error_union => |error_union| switch (error_union.val) { - .err_name => |err_name| return writer.print("error.{}", .{ - err_name.fmt(ip), - }), - .payload => |payload| { - val = Value.fromInterned(payload); - ty = ty.errorUnionPayload(mod); - }, - }, - .enum_literal => |enum_literal| return writer.print(".{}", .{ - enum_literal.fmt(ip), - }), - .enum_tag => |enum_tag| { - if (level == 0) { - return writer.writeAll("(enum)"); - } - const enum_type = ip.loadEnumType(ty.toIntern()); - if (enum_type.tagValueIndex(ip, val.toIntern())) |tag_index| { - try writer.print(".{i}", .{enum_type.names.get(ip)[tag_index].fmt(ip)}); - return; - } - try writer.writeAll("@enumFromInt("); - try print(.{ - .ty = Type.fromInterned(ip.typeOf(enum_tag.int)), - .val = Value.fromInterned(enum_tag.int), - }, writer, level - 1, mod); - try writer.writeAll(")"); - return; - }, - .empty_enum_value => return writer.writeAll("(empty enum value)"), - .float => |float| switch (float.storage) { - inline else => |x| return writer.print("{d}", .{@as(f64, @floatCast(x))}), - }, - .slice => |slice| { - const ptr_ty = switch (ip.indexToKey(slice.ptr)) { - .ptr => |ptr| ty: { - if (ptr.addr == .int) return print(.{ - .ty = Type.fromInterned(ptr.ty), - .val = Value.fromInterned(slice.ptr), - }, writer, level - 1, mod); - break :ty ip.indexToKey(ptr.ty).ptr_type; - }, - .undef => |ptr_ty| ip.indexToKey(ptr_ty).ptr_type, - else => unreachable, - }; - if (level == 0) { - return writer.writeAll(".{ ... }"); - } - const elem_ty = Type.fromInterned(ptr_ty.child); - const len = Value.fromInterned(slice.len).toUnsignedInt(mod); - if (elem_ty.eql(Type.u8, mod)) str: { - const max_len = @min(len, max_string_len); - var buf: [max_string_len]u8 = undefined; - for (buf[0..max_len], 0..) |*c, i| { - const maybe_elem = try val.maybeElemValue(mod, i); - const elem = maybe_elem orelse return writer.writeAll(".{ (reinterpreted data) }"); - if (elem.isUndef(mod)) break :str; - c.* = @as(u8, @intCast(elem.toUnsignedInt(mod))); - } - const truncated = if (len > max_string_len) " (truncated)" else ""; - return writer.print("\"{}{s}\"", .{ std.zig.fmtEscapes(buf[0..max_len]), truncated }); - } - try writer.writeAll(".{ "); - const max_len = @min(len, max_aggregate_items); - for (0..max_len) |i| { - if (i != 0) try writer.writeAll(", "); - const maybe_elem = try val.maybeElemValue(mod, i); - const elem = maybe_elem orelse return writer.writeAll("(reinterpreted data) }"); - try print(.{ - .ty = elem_ty, - .val = elem, - }, writer, level - 1, mod); - } - if (len > max_aggregate_items) { - try writer.writeAll(", ..."); - } - return writer.writeAll(" }"); - }, - .ptr => |ptr| { - switch (ptr.addr) { - .decl => |decl_index| { - const decl = mod.declPtr(decl_index); - if (level == 0) return writer.print("(decl '{}')", .{decl.name.fmt(ip)}); - return print(.{ - .ty = decl.typeOf(mod), - .val = decl.val, - }, writer, level - 1, mod); - }, - .anon_decl => |anon_decl| { - const decl_val = anon_decl.val; - if (level == 0) return writer.print("(anon decl '{d}')", .{ - @intFromEnum(decl_val), - }); - return print(.{ - .ty = Type.fromInterned(ip.typeOf(decl_val)), - .val = Value.fromInterned(decl_val), - }, writer, level - 1, mod); - }, - .comptime_alloc => { - // TODO: we need a Sema to print this! - return writer.writeAll("(comptime alloc)"); - }, - .comptime_field => |field_val_ip| { - return print(.{ - .ty = Type.fromInterned(ip.typeOf(field_val_ip)), - .val = Value.fromInterned(field_val_ip), - }, writer, level - 1, mod); - }, - .int => |int_ip| { - try writer.writeAll("@ptrFromInt("); - try print(.{ - .ty = Type.usize, - .val = Value.fromInterned(int_ip), - }, writer, level - 1, mod); - try writer.writeByte(')'); - }, - .eu_payload => |eu_ip| { - try writer.writeAll("(payload of "); - try print(.{ - .ty = Type.fromInterned(ip.typeOf(eu_ip)), - .val = Value.fromInterned(eu_ip), - }, writer, level - 1, mod); - try writer.writeAll(")"); - }, - .opt_payload => |opt_ip| { - try print(.{ - .ty = Type.fromInterned(ip.typeOf(opt_ip)), - .val = Value.fromInterned(opt_ip), - }, writer, level - 1, mod); - try writer.writeAll(".?"); - }, - .elem => |elem| { - if (level == 0) { - try writer.writeAll("(...)"); - } else { - try print(.{ - .ty = Type.fromInterned(ip.typeOf(elem.base)), - .val = Value.fromInterned(elem.base), - }, writer, level - 1, mod); - } - try writer.print("[{}]", .{elem.index}); - }, - .field => |field| { - const ptr_container_ty = Type.fromInterned(ip.typeOf(field.base)); - if (level == 0) { - try writer.writeAll("(...)"); - } else { - try print(.{ - .ty = ptr_container_ty, - .val = Value.fromInterned(field.base), - }, writer, level - 1, mod); - } - - const container_ty = ptr_container_ty.childType(mod); - switch (container_ty.zigTypeTag(mod)) { - .Struct => { - if (container_ty.structFieldName(@intCast(field.index), mod).unwrap()) |field_name| { - try writer.print(".{i}", .{field_name.fmt(ip)}); - } else { - try writer.print("[{d}]", .{field.index}); - } - }, - .Union => { - const field_name = mod.typeToUnion(container_ty).?.loadTagType(ip).names.get(ip)[@intCast(field.index)]; - try writer.print(".{i}", .{field_name.fmt(ip)}); - }, - .Pointer => { - std.debug.assert(container_ty.isSlice(mod)); - try writer.writeAll(switch (field.index) { - Value.slice_ptr_index => ".ptr", - Value.slice_len_index => ".len", - else => unreachable, - }); - }, - else => unreachable, - } - }, - } - return; - }, - .opt => |opt| switch (opt.val) { - .none => return writer.writeAll("null"), - else => |payload| { - val = Value.fromInterned(payload); - ty = ty.optionalChild(mod); - }, - }, - .aggregate => |aggregate| switch (aggregate.storage) { - .bytes => |bytes| { - // Strip the 0 sentinel off of strings before printing - const zero_sent = blk: { - const sent = ty.sentinel(mod) orelse break :blk false; - break :blk sent.eql(Value.zero_u8, Type.u8, mod); - }; - const str = if (zero_sent) bytes[0 .. bytes.len - 1] else bytes; - return writer.print("\"{}\"", .{std.zig.fmtEscapes(str)}); - }, - .elems, .repeated_elem => return printAggregate(ty, val, writer, level, mod), - }, - .un => |un| { - try writer.writeAll(".{ "); - if (level > 0) { - if (un.tag != .none) { - try print(.{ - .ty = ty.unionTagTypeHypothetical(mod), - .val = Value.fromInterned(un.tag), - }, writer, level - 1, mod); - try writer.writeAll(" = "); - const field_ty = ty.unionFieldType(Value.fromInterned(un.tag), mod).?; - try print(.{ - .ty = field_ty, - .val = Value.fromInterned(un.val), - }, writer, level - 1, mod); - } else { - try writer.writeAll("(unknown tag) = "); - const backing_ty = try ty.unionBackingType(mod); - try print(.{ - .ty = backing_ty, - .val = Value.fromInterned(un.val), - }, writer, level - 1, mod); - } - } else try writer.writeAll("..."); - return writer.writeAll(" }"); - }, - .memoized_call => unreachable, + } else try writer.writeAll("..."); + return writer.writeAll(" }"); }, + .memoized_call => unreachable, }; } diff --git a/src/Value.zig b/src/Value.zig index af24d68d7c..040961fa38 100644 --- a/src/Value.zig +++ b/src/Value.zig @@ -13,124 +13,8 @@ const Sema = @import("Sema.zig"); const InternPool = @import("InternPool.zig"); const Value = @This(); -/// We are migrating towards using this for every Value object. However, many -/// values are still represented the legacy way. This is indicated by using -/// InternPool.Index.none. ip_index: InternPool.Index, -/// This is the raw data, with no bookkeeping, no memory awareness, -/// no de-duplication, and no type system awareness. -/// This union takes advantage of the fact that the first page of memory -/// is unmapped, giving us 4096 possible enum tags that have no payload. -legacy: extern union { - ptr_otherwise: *Payload, -}, - -// Keep in sync with tools/stage2_pretty_printers_common.py -pub const Tag = enum(usize) { - // The first section of this enum are tags that require no payload. - // After this, the tag requires a payload. - - /// When the type is error union: - /// * If the tag is `.@"error"`, the error union is an error. - /// * If the tag is `.eu_payload`, the error union is a payload. - /// * A nested error such as `anyerror!(anyerror!T)` in which the the outer error union - /// is non-error, but the inner error union is an error, is represented as - /// a tag of `.eu_payload`, with a sub-tag of `.@"error"`. - eu_payload, - /// When the type is optional: - /// * If the tag is `.null_value`, the optional is null. - /// * If the tag is `.opt_payload`, the optional is a payload. - /// * A nested optional such as `??T` in which the the outer optional - /// is non-null, but the inner optional is null, is represented as - /// a tag of `.opt_payload`, with a sub-tag of `.null_value`. - opt_payload, - /// Pointer and length as sub `Value` objects. - slice, - /// A slice of u8 whose memory is managed externally. - bytes, - /// This value is repeated some number of times. The amount of times to repeat - /// is stored externally. - repeated, - /// An instance of a struct, array, or vector. - /// Each element/field stored as a `Value`. - /// In the case of sentinel-terminated arrays, the sentinel value *is* stored, - /// so the slice length will be one more than the type's array length. - aggregate, - /// An instance of a union. - @"union", - - pub fn Type(comptime t: Tag) type { - return switch (t) { - .eu_payload, - .opt_payload, - .repeated, - => Payload.SubValue, - .slice => Payload.Slice, - .bytes => Payload.Bytes, - .aggregate => Payload.Aggregate, - .@"union" => Payload.Union, - }; - } - - pub fn create(comptime t: Tag, ally: Allocator, data: Data(t)) error{OutOfMemory}!Value { - const ptr = try ally.create(t.Type()); - ptr.* = .{ - .base = .{ .tag = t }, - .data = data, - }; - return Value{ - .ip_index = .none, - .legacy = .{ .ptr_otherwise = &ptr.base }, - }; - } - - pub fn Data(comptime t: Tag) type { - return std.meta.fieldInfo(t.Type(), .data).type; - } -}; - -pub fn initPayload(payload: *Payload) Value { - return Value{ - .ip_index = .none, - .legacy = .{ .ptr_otherwise = payload }, - }; -} - -pub fn tag(self: Value) Tag { - assert(self.ip_index == .none); - return self.legacy.ptr_otherwise.tag; -} - -/// Prefer `castTag` to this. -pub fn cast(self: Value, comptime T: type) ?*T { - if (self.ip_index != .none) { - return null; - } - if (@hasField(T, "base_tag")) { - return self.castTag(T.base_tag); - } - inline for (@typeInfo(Tag).Enum.fields) |field| { - const t = @as(Tag, @enumFromInt(field.value)); - if (self.legacy.ptr_otherwise.tag == t) { - if (T == t.Type()) { - return @fieldParentPtr(T, "base", self.legacy.ptr_otherwise); - } - return null; - } - } - unreachable; -} - -pub fn castTag(self: Value, comptime t: Tag) ?*t.Type() { - if (self.ip_index != .none) return null; - - if (self.legacy.ptr_otherwise.tag == t) - return @fieldParentPtr(t.Type(), "base", self.legacy.ptr_otherwise); - - return null; -} - pub fn format(val: Value, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { _ = val; _ = fmt; @@ -148,33 +32,7 @@ pub fn dump( out_stream: anytype, ) !void { comptime assert(fmt.len == 0); - if (start_val.ip_index != .none) { - try out_stream.print("(interned: {})", .{start_val.toIntern()}); - return; - } - var val = start_val; - while (true) switch (val.tag()) { - .aggregate => { - return out_stream.writeAll("(aggregate)"); - }, - .@"union" => { - return out_stream.writeAll("(union value)"); - }, - .bytes => return out_stream.print("\"{}\"", .{std.zig.fmtEscapes(val.castTag(.bytes).?.data)}), - .repeated => { - try out_stream.writeAll("(repeated) "); - val = val.castTag(.repeated).?.data; - }, - .eu_payload => { - try out_stream.writeAll("(eu_payload) "); - val = val.castTag(.repeated).?.data; - }, - .opt_payload => { - try out_stream.writeAll("(opt_payload) "); - val = val.castTag(.repeated).?.data; - }, - .slice => return out_stream.writeAll("(slice)"), - }; + try out_stream.print("(interned: {})", .{start_val.toIntern()}); } pub fn fmtDebug(val: Value) std.fmt.Formatter(dump) { @@ -252,162 +110,9 @@ fn arrayToIpString(val: Value, len_u64: u64, mod: *Module) !InternPool.NullTermi return ip.getOrPutTrailingString(gpa, len); } -pub fn intern2(val: Value, ty: Type, mod: *Module) Allocator.Error!InternPool.Index { - if (val.ip_index != .none) return val.ip_index; - return intern(val, ty, mod); -} - -pub fn intern(val: Value, ty: Type, mod: *Module) Allocator.Error!InternPool.Index { - if (val.ip_index != .none) return (try mod.getCoerced(val, ty)).toIntern(); - const ip = &mod.intern_pool; - switch (val.tag()) { - .eu_payload => { - const pl = val.castTag(.eu_payload).?.data; - return mod.intern(.{ .error_union = .{ - .ty = ty.toIntern(), - .val = .{ .payload = try pl.intern(ty.errorUnionPayload(mod), mod) }, - } }); - }, - .opt_payload => { - const pl = val.castTag(.opt_payload).?.data; - return mod.intern(.{ .opt = .{ - .ty = ty.toIntern(), - .val = try pl.intern(ty.optionalChild(mod), mod), - } }); - }, - .slice => { - const pl = val.castTag(.slice).?.data; - return mod.intern(.{ .slice = .{ - .ty = ty.toIntern(), - .len = try pl.len.intern(Type.usize, mod), - .ptr = try pl.ptr.intern(ty.slicePtrFieldType(mod), mod), - } }); - }, - .bytes => { - const pl = val.castTag(.bytes).?.data; - return mod.intern(.{ .aggregate = .{ - .ty = ty.toIntern(), - .storage = .{ .bytes = pl }, - } }); - }, - .repeated => { - const pl = val.castTag(.repeated).?.data; - return mod.intern(.{ .aggregate = .{ - .ty = ty.toIntern(), - .storage = .{ .repeated_elem = try pl.intern(ty.childType(mod), mod) }, - } }); - }, - .aggregate => { - const len = @as(usize, @intCast(ty.arrayLen(mod))); - const old_elems = val.castTag(.aggregate).?.data[0..len]; - const new_elems = try mod.gpa.alloc(InternPool.Index, old_elems.len); - defer mod.gpa.free(new_elems); - const ty_key = ip.indexToKey(ty.toIntern()); - for (new_elems, old_elems, 0..) |*new_elem, old_elem, field_i| - new_elem.* = try old_elem.intern(switch (ty_key) { - .struct_type => ty.structFieldType(field_i, mod), - .anon_struct_type => |info| Type.fromInterned(info.types.get(ip)[field_i]), - inline .array_type, .vector_type => |info| Type.fromInterned(info.child), - else => unreachable, - }, mod); - return mod.intern(.{ .aggregate = .{ - .ty = ty.toIntern(), - .storage = .{ .elems = new_elems }, - } }); - }, - .@"union" => { - const pl = val.castTag(.@"union").?.data; - if (pl.tag) |pl_tag| { - return mod.intern(.{ .un = .{ - .ty = ty.toIntern(), - .tag = try pl_tag.intern(ty.unionTagTypeHypothetical(mod), mod), - .val = try pl.val.intern(ty.unionFieldType(pl_tag, mod).?, mod), - } }); - } else { - return mod.intern(.{ .un = .{ - .ty = ty.toIntern(), - .tag = .none, - .val = try pl.val.intern(try ty.unionBackingType(mod), mod), - } }); - } - }, - } -} - -pub fn unintern(val: Value, arena: Allocator, mod: *Module) Allocator.Error!Value { - return if (val.ip_index == .none) val else switch (mod.intern_pool.indexToKey(val.toIntern())) { - .int_type, - .ptr_type, - .array_type, - .vector_type, - .opt_type, - .anyframe_type, - .error_union_type, - .simple_type, - .struct_type, - .anon_struct_type, - .union_type, - .opaque_type, - .enum_type, - .func_type, - .error_set_type, - .inferred_error_set_type, - - .undef, - .simple_value, - .variable, - .extern_func, - .func, - .int, - .err, - .enum_literal, - .enum_tag, - .empty_enum_value, - .float, - .ptr, - => val, - - .error_union => |error_union| switch (error_union.val) { - .err_name => val, - .payload => |payload| Tag.eu_payload.create(arena, Value.fromInterned(payload)), - }, - - .slice => |slice| Tag.slice.create(arena, .{ - .ptr = Value.fromInterned(slice.ptr), - .len = Value.fromInterned(slice.len), - }), - - .opt => |opt| switch (opt.val) { - .none => val, - else => |payload| Tag.opt_payload.create(arena, Value.fromInterned(payload)), - }, - - .aggregate => |aggregate| switch (aggregate.storage) { - .bytes => |bytes| Tag.bytes.create(arena, try arena.dupe(u8, bytes)), - .elems => |old_elems| { - const new_elems = try arena.alloc(Value, old_elems.len); - for (new_elems, old_elems) |*new_elem, old_elem| new_elem.* = Value.fromInterned(old_elem); - return Tag.aggregate.create(arena, new_elems); - }, - .repeated_elem => |elem| Tag.repeated.create(arena, Value.fromInterned(elem)), - }, - - .un => |un| Tag.@"union".create(arena, .{ - // toValue asserts that the value cannot be .none which is valid on unions. - .tag = if (un.tag == .none) null else Value.fromInterned(un.tag), - .val = Value.fromInterned(un.val), - }), - - .memoized_call => unreachable, - }; -} - pub fn fromInterned(i: InternPool.Index) Value { assert(i != .none); - return .{ - .ip_index = i, - .legacy = undefined, - }; + return .{ .ip_index = i }; } pub fn toIntern(val: Value) InternPool.Index { @@ -492,24 +197,24 @@ pub fn isFuncBody(val: Value, mod: *Module) bool { } pub fn getFunction(val: Value, mod: *Module) ?InternPool.Key.Func { - return if (val.ip_index != .none) switch (mod.intern_pool.indexToKey(val.toIntern())) { + return switch (mod.intern_pool.indexToKey(val.toIntern())) { .func => |x| x, else => null, - } else null; + }; } pub fn getExternFunc(val: Value, mod: *Module) ?InternPool.Key.ExternFunc { - return if (val.ip_index != .none) switch (mod.intern_pool.indexToKey(val.toIntern())) { + return switch (mod.intern_pool.indexToKey(val.toIntern())) { .extern_func => |extern_func| extern_func, else => null, - } else null; + }; } pub fn getVariable(val: Value, mod: *Module) ?InternPool.Key.Variable { - return if (val.ip_index != .none) switch (mod.intern_pool.indexToKey(val.toIntern())) { + return switch (mod.intern_pool.indexToKey(val.toIntern())) { .variable => |variable| variable, else => null, - } else null; + }; } /// If the value fits in a u64, return it, otherwise null. @@ -677,25 +382,14 @@ pub fn writeToMemory(val: Value, ty: Type, mod: *Module, buffer: []u8) error{ .auto => return error.IllDefinedMemoryLayout, .@"extern" => for (0..struct_type.field_types.len) |i| { const off: usize = @intCast(ty.structFieldOffset(i, mod)); - const field_val = switch (val.ip_index) { - .none => switch (val.tag()) { - .bytes => { - buffer[off] = val.castTag(.bytes).?.data[i]; - continue; - }, - .aggregate => val.castTag(.aggregate).?.data[i], - .repeated => val.castTag(.repeated).?.data, - else => unreachable, + const field_val = Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) { + .bytes => |bytes| { + buffer[off] = bytes[i]; + continue; }, - else => Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) { - .bytes => |bytes| { - buffer[off] = bytes[i]; - continue; - }, - .elems => |elems| elems[i], - .repeated_elem => |elem| elem, - }), - }; + .elems => |elems| elems[i], + .repeated_elem => |elem| elem, + }); const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]); try writeToMemory(field_val, field_ty, mod, buffer[off..]); }, @@ -842,19 +536,11 @@ pub fn writeToPackedMemory( assert(struct_type.layout == .@"packed"); var bits: u16 = 0; for (0..struct_type.field_types.len) |i| { - const field_val = switch (val.ip_index) { - .none => switch (val.tag()) { - .bytes => unreachable, - .aggregate => val.castTag(.aggregate).?.data[i], - .repeated => val.castTag(.repeated).?.data, - else => unreachable, - }, - else => Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) { - .bytes => unreachable, - .elems => |elems| elems[i], - .repeated_elem => |elem| elem, - }), - }; + const field_val = Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) { + .bytes => unreachable, + .elems => |elems| elems[i], + .repeated_elem => |elem| elem, + }); const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]); const field_bits: u16 = @intCast(field_ty.bitSize(mod)); try field_val.writeToPackedMemory(field_ty, mod, buffer, bit_offset + bits); @@ -972,7 +658,7 @@ pub fn readFromMemory( const elems = try arena.alloc(InternPool.Index, @as(usize, @intCast(ty.arrayLen(mod)))); var offset: usize = 0; for (elems) |*elem| { - elem.* = try (try readFromMemory(elem_ty, mod, buffer[offset..], arena)).intern(elem_ty, mod); + elem.* = (try readFromMemory(elem_ty, mod, buffer[offset..], arena)).toIntern(); offset += @as(usize, @intCast(elem_size)); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ @@ -997,7 +683,7 @@ pub fn readFromMemory( const field_ty = Type.fromInterned(field_types.get(ip)[i]); const off: usize = @intCast(ty.structFieldOffset(i, mod)); const sz: usize = @intCast(field_ty.abiSize(mod)); - field_val.* = try (try readFromMemory(field_ty, mod, buffer[off..(off + sz)], arena)).intern(field_ty, mod); + field_val.* = (try readFromMemory(field_ty, mod, buffer[off..(off + sz)], arena)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -1027,7 +713,7 @@ pub fn readFromMemory( .@"extern" => { const union_size = ty.abiSize(mod); const array_ty = try mod.arrayType(.{ .len = union_size, .child = .u8_type }); - const val = try (try readFromMemory(array_ty, mod, buffer, arena)).intern(array_ty, mod); + const val = (try readFromMemory(array_ty, mod, buffer, arena)).toIntern(); return Value.fromInterned((try mod.intern(.{ .un = .{ .ty = ty.toIntern(), .tag = .none, @@ -1094,7 +780,7 @@ pub fn readFromPackedMemory( return Value.true; } }, - .Int, .Enum => |ty_tag| { + .Int => { if (buffer.len == 0) return mod.intValue(ty, 0); const int_info = ty.intInfo(mod); const bits = int_info.bits; @@ -1102,21 +788,10 @@ pub fn readFromPackedMemory( // Fast path for integers <= u64 if (bits <= 64) { - const int_ty = switch (ty_tag) { - .Int => ty, - .Enum => ty.intTagType(mod), - else => unreachable, - }; - return mod.getCoerced(switch (int_info.signedness) { - .signed => return mod.intValue( - int_ty, - std.mem.readVarPackedInt(i64, buffer, bit_offset, bits, endian, .signed), - ), - .unsigned => return mod.intValue( - int_ty, - std.mem.readVarPackedInt(u64, buffer, bit_offset, bits, endian, .unsigned), - ), - }, ty); + return mod.intValue( + ty, + std.mem.readVarPackedInt(i64, buffer, bit_offset, bits, endian, int_info.signedness), + ); } // Slow path, we have to construct a big-int @@ -1129,6 +804,11 @@ pub fn readFromPackedMemory( bigint.readPackedTwosComplement(buffer, bit_offset, bits, endian, int_info.signedness); return mod.intValue_big(ty, bigint.toConst()); }, + .Enum => { + const int_ty = ty.intTagType(mod); + const int_val = try Value.readFromPackedMemory(int_ty, mod, buffer, bit_offset, arena); + return mod.getCoerced(int_val, ty); + }, .Float => return Value.fromInterned((try mod.intern(.{ .float = .{ .ty = ty.toIntern(), .storage = switch (ty.floatBits(target)) { @@ -1149,7 +829,7 @@ pub fn readFromPackedMemory( for (elems, 0..) |_, i| { // On big-endian systems, LLVM reverses the element order of vectors by default const tgt_elem_i = if (endian == .big) elems.len - i - 1 else i; - elems[tgt_elem_i] = try (try readFromPackedMemory(elem_ty, mod, buffer, bit_offset + bits, arena)).intern(elem_ty, mod); + elems[tgt_elem_i] = (try readFromPackedMemory(elem_ty, mod, buffer, bit_offset + bits, arena)).toIntern(); bits += elem_bit_size; } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ @@ -1166,7 +846,7 @@ pub fn readFromPackedMemory( for (field_vals, 0..) |*field_val, i| { const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]); const field_bits: u16 = @intCast(field_ty.bitSize(mod)); - field_val.* = try (try readFromPackedMemory(field_ty, mod, buffer, bit_offset + bits, arena)).intern(field_ty, mod); + field_val.* = (try readFromPackedMemory(field_ty, mod, buffer, bit_offset + bits, arena)).toIntern(); bits += field_bits; } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ @@ -1611,51 +1291,42 @@ pub fn maybeElemValue(val: Value, mod: *Module, index: usize) Allocator.Error!?V } pub fn maybeElemValueFull(val: Value, sema: ?*Sema, mod: *Module, index: usize) Allocator.Error!?Value { - return switch (val.ip_index) { - .none => switch (val.tag()) { - .bytes => try mod.intValue(Type.u8, val.castTag(.bytes).?.data[index]), - .repeated => val.castTag(.repeated).?.data, - .aggregate => val.castTag(.aggregate).?.data[index], - .slice => val.castTag(.slice).?.data.ptr.maybeElemValueFull(sema, mod, index), - else => null, + return switch (mod.intern_pool.indexToKey(val.toIntern())) { + .undef => |ty| Value.fromInterned((try mod.intern(.{ + .undef = Type.fromInterned(ty).elemType2(mod).toIntern(), + }))), + .slice => |slice| return Value.fromInterned(slice.ptr).maybeElemValueFull(sema, mod, index), + .ptr => |ptr| switch (ptr.addr) { + .decl => |decl| mod.declPtr(decl).val.maybeElemValueFull(sema, mod, index), + .anon_decl => |anon_decl| Value.fromInterned(anon_decl.val).maybeElemValueFull(sema, mod, index), + .comptime_alloc => |idx| if (sema) |s| Value.fromInterned( + try s.getComptimeAlloc(idx).val.intern(mod, s.arena), + ).maybeElemValueFull(sema, mod, index) else null, + .int, .eu_payload => null, + .opt_payload => |base| Value.fromInterned(base).maybeElemValueFull(sema, mod, index), + .comptime_field => |field_val| Value.fromInterned(field_val).maybeElemValueFull(sema, mod, index), + .elem => |elem| Value.fromInterned(elem.base).maybeElemValueFull(sema, mod, index + @as(usize, @intCast(elem.index))), + .field => |field| if (Value.fromInterned(field.base).pointerDecl(mod)) |decl_index| { + const base_decl = mod.declPtr(decl_index); + const field_val = try base_decl.val.fieldValue(mod, @as(usize, @intCast(field.index))); + return field_val.maybeElemValueFull(sema, mod, index); + } else null, }, - else => switch (mod.intern_pool.indexToKey(val.toIntern())) { - .undef => |ty| Value.fromInterned((try mod.intern(.{ - .undef = Type.fromInterned(ty).elemType2(mod).toIntern(), - }))), - .slice => |slice| return Value.fromInterned(slice.ptr).maybeElemValueFull(sema, mod, index), - .ptr => |ptr| switch (ptr.addr) { - .decl => |decl| mod.declPtr(decl).val.maybeElemValueFull(sema, mod, index), - .anon_decl => |anon_decl| Value.fromInterned(anon_decl.val).maybeElemValueFull(sema, mod, index), - .comptime_alloc => |idx| if (sema) |s| Value.fromInterned( - try s.getComptimeAlloc(idx).val.intern(mod, s.arena), - ).maybeElemValueFull(sema, mod, index) else null, - .int, .eu_payload => null, - .opt_payload => |base| Value.fromInterned(base).maybeElemValueFull(sema, mod, index), - .comptime_field => |field_val| Value.fromInterned(field_val).maybeElemValueFull(sema, mod, index), - .elem => |elem| Value.fromInterned(elem.base).maybeElemValueFull(sema, mod, index + @as(usize, @intCast(elem.index))), - .field => |field| if (Value.fromInterned(field.base).pointerDecl(mod)) |decl_index| { - const base_decl = mod.declPtr(decl_index); - const field_val = try base_decl.val.fieldValue(mod, @as(usize, @intCast(field.index))); - return field_val.maybeElemValueFull(sema, mod, index); - } else null, - }, - .opt => |opt| Value.fromInterned(opt.val).maybeElemValueFull(sema, mod, index), - .aggregate => |aggregate| { - const len = mod.intern_pool.aggregateTypeLen(aggregate.ty); - if (index < len) return Value.fromInterned(switch (aggregate.storage) { - .bytes => |bytes| try mod.intern(.{ .int = .{ - .ty = .u8_type, - .storage = .{ .u64 = bytes[index] }, - } }), - .elems => |elems| elems[index], - .repeated_elem => |elem| elem, - }); - assert(index == len); - return Value.fromInterned(mod.intern_pool.indexToKey(aggregate.ty).array_type.sentinel); - }, - else => null, + .opt => |opt| Value.fromInterned(opt.val).maybeElemValueFull(sema, mod, index), + .aggregate => |aggregate| { + const len = mod.intern_pool.aggregateTypeLen(aggregate.ty); + if (index < len) return Value.fromInterned(switch (aggregate.storage) { + .bytes => |bytes| try mod.intern(.{ .int = .{ + .ty = .u8_type, + .storage = .{ .u64 = bytes[index] }, + } }), + .elems => |elems| elems[index], + .repeated_elem => |elem| elem, + }); + assert(index == len); + return Value.fromInterned(mod.intern_pool.indexToKey(aggregate.ty).array_type.sentinel); }, + else => null, }; } @@ -1688,85 +1359,61 @@ pub fn sliceArray( ) error{OutOfMemory}!Value { // TODO: write something like getCoercedInts to avoid needing to dupe const mod = sema.mod; - return switch (val.ip_index) { - .none => switch (val.tag()) { - .slice => val.castTag(.slice).?.data.ptr.sliceArray(sema, start, end), - .bytes => Tag.bytes.create(sema.arena, val.castTag(.bytes).?.data[start..end]), - .repeated => val, - .aggregate => Tag.aggregate.create(sema.arena, val.castTag(.aggregate).?.data[start..end]), + return switch (mod.intern_pool.indexToKey(val.toIntern())) { + .ptr => |ptr| switch (ptr.addr) { + .decl => |decl| try mod.declPtr(decl).val.sliceArray(sema, start, end), + .comptime_alloc => |idx| try Value.fromInterned( + try sema.getComptimeAlloc(idx).val.intern(mod, sema.arena), + ).sliceArray(sema, start, end), + .comptime_field => |comptime_field| Value.fromInterned(comptime_field) + .sliceArray(sema, start, end), + .elem => |elem| Value.fromInterned(elem.base) + .sliceArray(sema, start + @as(usize, @intCast(elem.index)), end + @as(usize, @intCast(elem.index))), else => unreachable, }, - else => switch (mod.intern_pool.indexToKey(val.toIntern())) { - .ptr => |ptr| switch (ptr.addr) { - .decl => |decl| try mod.declPtr(decl).val.sliceArray(sema, start, end), - .comptime_alloc => |idx| try Value.fromInterned( - try sema.getComptimeAlloc(idx).val.intern(mod, sema.arena), - ).sliceArray(sema, start, end), - .comptime_field => |comptime_field| Value.fromInterned(comptime_field) - .sliceArray(sema, start, end), - .elem => |elem| Value.fromInterned(elem.base) - .sliceArray(sema, start + @as(usize, @intCast(elem.index)), end + @as(usize, @intCast(elem.index))), + .aggregate => |aggregate| Value.fromInterned((try mod.intern(.{ .aggregate = .{ + .ty = switch (mod.intern_pool.indexToKey(mod.intern_pool.typeOf(val.toIntern()))) { + .array_type => |array_type| try mod.arrayType(.{ + .len = @as(u32, @intCast(end - start)), + .child = array_type.child, + .sentinel = if (end == array_type.len) array_type.sentinel else .none, + }), + .vector_type => |vector_type| try mod.vectorType(.{ + .len = @as(u32, @intCast(end - start)), + .child = vector_type.child, + }), else => unreachable, + }.toIntern(), + .storage = switch (aggregate.storage) { + .bytes => .{ .bytes = try sema.arena.dupe(u8, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.bytes[start..end]) }, + .elems => .{ .elems = try sema.arena.dupe(InternPool.Index, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.elems[start..end]) }, + .repeated_elem => |elem| .{ .repeated_elem = elem }, }, - .aggregate => |aggregate| Value.fromInterned((try mod.intern(.{ .aggregate = .{ - .ty = switch (mod.intern_pool.indexToKey(mod.intern_pool.typeOf(val.toIntern()))) { - .array_type => |array_type| try mod.arrayType(.{ - .len = @as(u32, @intCast(end - start)), - .child = array_type.child, - .sentinel = if (end == array_type.len) array_type.sentinel else .none, - }), - .vector_type => |vector_type| try mod.vectorType(.{ - .len = @as(u32, @intCast(end - start)), - .child = vector_type.child, - }), - else => unreachable, - }.toIntern(), - .storage = switch (aggregate.storage) { - .bytes => .{ .bytes = try sema.arena.dupe(u8, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.bytes[start..end]) }, - .elems => .{ .elems = try sema.arena.dupe(InternPool.Index, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.elems[start..end]) }, - .repeated_elem => |elem| .{ .repeated_elem = elem }, - }, - } }))), - else => unreachable, - }, + } }))), + else => unreachable, }; } pub fn fieldValue(val: Value, mod: *Module, index: usize) !Value { - return switch (val.ip_index) { - .none => switch (val.tag()) { - .aggregate => { - const field_values = val.castTag(.aggregate).?.data; - return field_values[index]; - }, - .@"union" => { - const payload = val.castTag(.@"union").?.data; - // TODO assert the tag is correct - return payload.val; - }, - else => unreachable, - }, - else => switch (mod.intern_pool.indexToKey(val.toIntern())) { - .undef => |ty| Value.fromInterned((try mod.intern(.{ - .undef = Type.fromInterned(ty).structFieldType(index, mod).toIntern(), - }))), - .aggregate => |aggregate| Value.fromInterned(switch (aggregate.storage) { - .bytes => |bytes| try mod.intern(.{ .int = .{ - .ty = .u8_type, - .storage = .{ .u64 = bytes[index] }, - } }), - .elems => |elems| elems[index], - .repeated_elem => |elem| elem, - }), - // TODO assert the tag is correct - .un => |un| Value.fromInterned(un.val), - else => unreachable, - }, + return switch (mod.intern_pool.indexToKey(val.toIntern())) { + .undef => |ty| Value.fromInterned((try mod.intern(.{ + .undef = Type.fromInterned(ty).structFieldType(index, mod).toIntern(), + }))), + .aggregate => |aggregate| Value.fromInterned(switch (aggregate.storage) { + .bytes => |bytes| try mod.intern(.{ .int = .{ + .ty = .u8_type, + .storage = .{ .u64 = bytes[index] }, + } }), + .elems => |elems| elems[index], + .repeated_elem => |elem| elem, + }), + // TODO assert the tag is correct + .un => |un| Value.fromInterned(un.val), + else => unreachable, }; } pub fn unionTag(val: Value, mod: *Module) ?Value { - if (val.ip_index == .none) return val.castTag(.@"union").?.data.tag; return switch (mod.intern_pool.indexToKey(val.toIntern())) { .undef, .enum_tag => val, .un => |un| if (un.tag != .none) Value.fromInterned(un.tag) else return null, @@ -1775,7 +1422,6 @@ pub fn unionTag(val: Value, mod: *Module) ?Value { } pub fn unionValue(val: Value, mod: *Module) Value { - if (val.ip_index == .none) return val.castTag(.@"union").?.data.val; return switch (mod.intern_pool.indexToKey(val.toIntern())) { .un => |un| Value.fromInterned(un.val), else => unreachable, @@ -1821,7 +1467,7 @@ pub fn elemPtr( } pub fn isUndef(val: Value, mod: *Module) bool { - return val.ip_index != .none and mod.intern_pool.isUndef(val.toIntern()); + return mod.intern_pool.isUndef(val.toIntern()); } /// TODO: check for cases such as array that is not marked undef but all the element @@ -1915,7 +1561,7 @@ pub fn floatFromIntAdvanced(val: Value, arena: Allocator, int_ty: Type, float_ty const scalar_ty = float_ty.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try floatFromIntScalar(elem_val, scalar_ty, mod, opt_sema)).intern(scalar_ty, mod); + scalar.* = (try floatFromIntScalar(elem_val, scalar_ty, mod, opt_sema)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_ty.toIntern(), @@ -1993,7 +1639,7 @@ pub fn intAddSat( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try intAddSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod); + scalar.* = (try intAddSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2043,7 +1689,7 @@ pub fn intSubSat( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try intSubSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod); + scalar.* = (try intSubSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2095,8 +1741,8 @@ pub fn intMulWithOverflow( const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); const of_math_result = try intMulWithOverflowScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod); - of.* = try of_math_result.overflow_bit.intern(Type.u1, mod); - scalar.* = try of_math_result.wrapped_result.intern(scalar_ty, mod); + of.* = of_math_result.overflow_bit.toIntern(); + scalar.* = of_math_result.wrapped_result.toIntern(); } return OverflowArithmeticResult{ .overflow_bit = Value.fromInterned((try mod.intern(.{ .aggregate = .{ @@ -2161,7 +1807,7 @@ pub fn numberMulWrap( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try numberMulWrapScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod); + scalar.* = (try numberMulWrapScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2207,7 +1853,7 @@ pub fn intMulSat( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try intMulSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod); + scalar.* = (try intMulSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2283,7 +1929,7 @@ pub fn bitwiseNot(val: Value, ty: Type, arena: Allocator, mod: *Module) !Value { const scalar_ty = ty.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try bitwiseNotScalar(elem_val, scalar_ty, arena, mod)).intern(scalar_ty, mod); + scalar.* = (try bitwiseNotScalar(elem_val, scalar_ty, arena, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2326,7 +1972,7 @@ pub fn bitwiseAnd(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: * for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try bitwiseAndScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod); + scalar.* = (try bitwiseAndScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2365,7 +2011,7 @@ pub fn bitwiseNand(lhs: Value, rhs: Value, ty: Type, arena: Allocator, mod: *Mod for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try bitwiseNandScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod); + scalar.* = (try bitwiseNandScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2393,7 +2039,7 @@ pub fn bitwiseOr(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *M for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try bitwiseOrScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod); + scalar.* = (try bitwiseOrScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2431,7 +2077,7 @@ pub fn bitwiseXor(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: * for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try bitwiseXorScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod); + scalar.* = (try bitwiseXorScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2497,7 +2143,7 @@ fn intDivInner(lhs: Value, rhs: Value, ty: Type, overflow_idx: *usize, allocator }, else => |e| return e, }; - scalar.* = try val.intern(scalar_ty, mod); + scalar.* = val.toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2545,7 +2191,7 @@ pub fn intDivFloor(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try intDivFloorScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod); + scalar.* = (try intDivFloorScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2587,7 +2233,7 @@ pub fn intMod(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Modu for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try intModScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod); + scalar.* = (try intModScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2624,7 +2270,6 @@ pub fn intModScalar(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: /// Returns true if the value is a floating point type and is NaN. Returns false otherwise. pub fn isNan(val: Value, mod: *const Module) bool { - if (val.ip_index == .none) return false; return switch (mod.intern_pool.indexToKey(val.toIntern())) { .float => |float| switch (float.storage) { inline else => |x| std.math.isNan(x), @@ -2635,7 +2280,6 @@ pub fn isNan(val: Value, mod: *const Module) bool { /// Returns true if the value is a floating point type and is infinite. Returns false otherwise. pub fn isInf(val: Value, mod: *const Module) bool { - if (val.ip_index == .none) return false; return switch (mod.intern_pool.indexToKey(val.toIntern())) { .float => |float| switch (float.storage) { inline else => |x| std.math.isInf(x), @@ -2645,7 +2289,6 @@ pub fn isInf(val: Value, mod: *const Module) bool { } pub fn isNegativeInf(val: Value, mod: *const Module) bool { - if (val.ip_index == .none) return false; return switch (mod.intern_pool.indexToKey(val.toIntern())) { .float => |float| switch (float.storage) { inline else => |x| std.math.isNegativeInf(x), @@ -2661,7 +2304,7 @@ pub fn floatRem(lhs: Value, rhs: Value, float_type: Type, arena: Allocator, mod: for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try floatRemScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatRemScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -2694,7 +2337,7 @@ pub fn floatMod(lhs: Value, rhs: Value, float_type: Type, arena: Allocator, mod: for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try floatModScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatModScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -2755,7 +2398,7 @@ fn intMulInner(lhs: Value, rhs: Value, ty: Type, overflow_idx: *usize, allocator }, else => |e| return e, }; - scalar.* = try val.intern(scalar_ty, mod); + scalar.* = val.toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2797,7 +2440,7 @@ pub fn intTrunc(val: Value, ty: Type, allocator: Allocator, signedness: std.buil const scalar_ty = ty.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, bits, mod)).intern(scalar_ty, mod); + scalar.* = (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, bits, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2822,7 +2465,7 @@ pub fn intTruncBitsAsValue( for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); const bits_elem = try bits.elemValue(mod, i); - scalar.* = try (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, @as(u16, @intCast(bits_elem.toUnsignedInt(mod))), mod)).intern(scalar_ty, mod); + scalar.* = (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, @as(u16, @intCast(bits_elem.toUnsignedInt(mod))), mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2862,7 +2505,7 @@ pub fn shl(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try shlScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod); + scalar.* = (try shlScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -2912,8 +2555,8 @@ pub fn shlWithOverflow( const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); const of_math_result = try shlWithOverflowScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod); - of.* = try of_math_result.overflow_bit.intern(Type.u1, mod); - scalar.* = try of_math_result.wrapped_result.intern(scalar_ty, mod); + of.* = of_math_result.overflow_bit.toIntern(); + scalar.* = of_math_result.wrapped_result.toIntern(); } return OverflowArithmeticResult{ .overflow_bit = Value.fromInterned((try mod.intern(.{ .aggregate = .{ @@ -2973,7 +2616,7 @@ pub fn shlSat( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try shlSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod); + scalar.* = (try shlSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -3023,7 +2666,7 @@ pub fn shlTrunc( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try shlTruncScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod); + scalar.* = (try shlTruncScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -3053,7 +2696,7 @@ pub fn shr(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try shrScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod); + scalar.* = (try shrScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -3105,7 +2748,7 @@ pub fn floatNeg( const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try floatNegScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatNegScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3148,7 +2791,7 @@ pub fn floatAdd( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try floatAddScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatAddScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3192,7 +2835,7 @@ pub fn floatSub( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try floatSubScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatSubScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3236,7 +2879,7 @@ pub fn floatDiv( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try floatDivScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatDivScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3280,7 +2923,7 @@ pub fn floatDivFloor( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try floatDivFloorScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatDivFloorScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3324,7 +2967,7 @@ pub fn floatDivTrunc( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try floatDivTruncScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatDivTruncScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3368,7 +3011,7 @@ pub fn floatMul( for (result_data, 0..) |*scalar, i| { const lhs_elem = try lhs.elemValue(mod, i); const rhs_elem = try rhs.elemValue(mod, i); - scalar.* = try (try floatMulScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floatMulScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3405,7 +3048,7 @@ pub fn sqrt(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try sqrtScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try sqrtScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3437,7 +3080,7 @@ pub fn sin(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try sinScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try sinScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3469,7 +3112,7 @@ pub fn cos(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try cosScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try cosScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3501,7 +3144,7 @@ pub fn tan(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try tanScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try tanScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3533,7 +3176,7 @@ pub fn exp(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try expScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try expScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3565,7 +3208,7 @@ pub fn exp2(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try exp2Scalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try exp2Scalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3597,7 +3240,7 @@ pub fn log(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try logScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try logScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3629,7 +3272,7 @@ pub fn log2(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try log2Scalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try log2Scalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3661,7 +3304,7 @@ pub fn log10(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Valu const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try log10Scalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try log10Scalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3693,7 +3336,7 @@ pub fn abs(val: Value, ty: Type, arena: Allocator, mod: *Module) !Value { const scalar_ty = ty.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try absScalar(elem_val, scalar_ty, mod, arena)).intern(scalar_ty, mod); + scalar.* = (try absScalar(elem_val, scalar_ty, mod, arena)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = ty.toIntern(), @@ -3744,7 +3387,7 @@ pub fn floor(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Valu const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try floorScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try floorScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3776,7 +3419,7 @@ pub fn ceil(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try ceilScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try ceilScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3808,7 +3451,7 @@ pub fn round(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Valu const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try roundScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try roundScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3840,7 +3483,7 @@ pub fn trunc(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Valu const scalar_ty = float_type.scalarType(mod); for (result_data, 0..) |*scalar, i| { const elem_val = try val.elemValue(mod, i); - scalar.* = try (try truncScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod); + scalar.* = (try truncScalar(elem_val, scalar_ty, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3881,7 +3524,7 @@ pub fn mulAdd( const mulend1_elem = try mulend1.elemValue(mod, i); const mulend2_elem = try mulend2.elemValue(mod, i); const addend_elem = try addend.elemValue(mod, i); - scalar.* = try (try mulAddScalar(scalar_ty, mulend1_elem, mulend2_elem, addend_elem, mod)).intern(scalar_ty, mod); + scalar.* = (try mulAddScalar(scalar_ty, mulend1_elem, mulend2_elem, addend_elem, mod)).toIntern(); } return Value.fromInterned((try mod.intern(.{ .aggregate = .{ .ty = float_type.toIntern(), @@ -3957,98 +3600,26 @@ pub fn intValueBounds(val: Value, mod: *Module) !?[2]Value { }; } -/// This type is not copyable since it may contain pointers to its inner data. -pub const Payload = struct { - tag: Tag, - - pub const Slice = struct { - base: Payload, - data: struct { - ptr: Value, - len: Value, - }, - }; - - pub const Bytes = struct { - base: Payload, - /// Includes the sentinel, if any. - data: []const u8, - }; - - pub const SubValue = struct { - base: Payload, - data: Value, - }; - - pub const Aggregate = struct { - base: Payload, - /// Field values. The types are according to the struct or array type. - /// The length is provided here so that copying a Value does not depend on the Type. - data: []Value, - }; - - pub const Union = struct { - pub const base_tag = Tag.@"union"; - - base: Payload = .{ .tag = base_tag }, - data: Data, - - pub const Data = struct { - tag: ?Value, - val: Value, - }; - }; -}; - pub const BigIntSpace = InternPool.Key.Int.Storage.BigIntSpace; -pub const zero_usize: Value = .{ .ip_index = .zero_usize, .legacy = undefined }; -pub const zero_u8: Value = .{ .ip_index = .zero_u8, .legacy = undefined }; -pub const zero_comptime_int: Value = .{ .ip_index = .zero, .legacy = undefined }; -pub const one_comptime_int: Value = .{ .ip_index = .one, .legacy = undefined }; -pub const negative_one_comptime_int: Value = .{ .ip_index = .negative_one, .legacy = undefined }; -pub const undef: Value = .{ .ip_index = .undef, .legacy = undefined }; -pub const @"void": Value = .{ .ip_index = .void_value, .legacy = undefined }; -pub const @"null": Value = .{ .ip_index = .null_value, .legacy = undefined }; -pub const @"false": Value = .{ .ip_index = .bool_false, .legacy = undefined }; -pub const @"true": Value = .{ .ip_index = .bool_true, .legacy = undefined }; -pub const @"unreachable": Value = .{ .ip_index = .unreachable_value, .legacy = undefined }; +pub const zero_usize: Value = .{ .ip_index = .zero_usize }; +pub const zero_u8: Value = .{ .ip_index = .zero_u8 }; +pub const zero_comptime_int: Value = .{ .ip_index = .zero }; +pub const one_comptime_int: Value = .{ .ip_index = .one }; +pub const negative_one_comptime_int: Value = .{ .ip_index = .negative_one }; +pub const undef: Value = .{ .ip_index = .undef }; +pub const @"void": Value = .{ .ip_index = .void_value }; +pub const @"null": Value = .{ .ip_index = .null_value }; +pub const @"false": Value = .{ .ip_index = .bool_false }; +pub const @"true": Value = .{ .ip_index = .bool_true }; +pub const @"unreachable": Value = .{ .ip_index = .unreachable_value }; -pub const generic_poison: Value = .{ .ip_index = .generic_poison, .legacy = undefined }; -pub const generic_poison_type: Value = .{ .ip_index = .generic_poison_type, .legacy = undefined }; -pub const empty_struct: Value = .{ .ip_index = .empty_struct, .legacy = undefined }; +pub const generic_poison: Value = .{ .ip_index = .generic_poison }; +pub const generic_poison_type: Value = .{ .ip_index = .generic_poison_type }; +pub const empty_struct: Value = .{ .ip_index = .empty_struct }; pub fn makeBool(x: bool) Value { return if (x) Value.true else Value.false; } pub const RuntimeIndex = InternPool.RuntimeIndex; - -/// This function is used in the debugger pretty formatters in tools/ to fetch the -/// Tag to Payload mapping to facilitate fancy debug printing for this type. -fn dbHelper(self: *Value, tag_to_payload_map: *map: { - const tags = @typeInfo(Tag).Enum.fields; - var fields: [tags.len]std.builtin.Type.StructField = undefined; - for (&fields, tags) |*field, t| field.* = .{ - .name = t.name ++ "", - .type = *@field(Tag, t.name).Type(), - .default_value = null, - .is_comptime = false, - .alignment = 0, - }; - break :map @Type(.{ .Struct = .{ - .layout = .@"extern", - .fields = &fields, - .decls = &.{}, - .is_tuple = false, - } }); -}) void { - _ = self; - _ = tag_to_payload_map; -} - -comptime { - if (!builtin.strip_debug_info) { - _ = &dbHelper; - } -} diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index e90a68bea5..11c95e318a 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -2216,7 +2216,7 @@ fn airCall(func: *CodeGen, inst: Air.Inst.Index, modifier: std.builtin.CallModif }, else => {}, } - return func.fail("Expected a function, but instead found type '{}'", .{func_val.tag()}); + return func.fail("Expected a function, but instead found '{s}'", .{@tagName(ip.indexToKey(func_val.toIntern()))}); }; const sret = if (first_param_sret) blk: { diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 273358a6d2..8fe19d4e21 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -12258,7 +12258,7 @@ fn genCall(self: *Self, info: union(enum) { switch (switch (func_key) { else => func_key, .ptr => |ptr| switch (ptr.addr) { - .decl => |decl| mod.intern_pool.indexToKey(try mod.declPtr(decl).internValue(mod)), + .decl => |decl| mod.intern_pool.indexToKey(mod.declPtr(decl).val.toIntern()), else => func_key, }, }) { diff --git a/src/codegen.zig b/src/codegen.zig index 5bc0d1a81f..818284a8f0 100644 --- a/src/codegen.zig +++ b/src/codegen.zig @@ -925,7 +925,7 @@ fn genDeclRef( const ptr_bits = target.ptrBitWidth(); const ptr_bytes: u64 = @divExact(ptr_bits, 8); - const decl_index = switch (zcu.intern_pool.indexToKey(try ptr_decl.internValue(zcu))) { + const decl_index = switch (zcu.intern_pool.indexToKey(ptr_decl.val.toIntern())) { .func => |func| func.owner_decl, .extern_func => |extern_func| extern_func.decl, else => ptr_decl_index, diff --git a/src/codegen/c.zig b/src/codegen/c.zig index 32ad38cd4d..d1575feaba 100644 --- a/src/codegen/c.zig +++ b/src/codegen/c.zig @@ -2657,7 +2657,7 @@ fn genExports(o: *Object) !void { .anon, .flush => return, }; const decl = mod.declPtr(decl_index); - const tv: TypedValue = .{ .ty = decl.typeOf(mod), .val = Value.fromInterned((try decl.internValue(mod))) }; + const tv: TypedValue = .{ .ty = decl.typeOf(mod), .val = decl.val }; const fwd = o.dg.fwdDeclWriter(); const exports = mod.decl_exports.get(decl_index) orelse return; @@ -2894,7 +2894,7 @@ pub fn genDecl(o: *Object) !void { const mod = o.dg.module; const decl_index = o.dg.pass.decl; const decl = mod.declPtr(decl_index); - const tv: TypedValue = .{ .ty = decl.typeOf(mod), .val = Value.fromInterned((try decl.internValue(mod))) }; + const tv: TypedValue = .{ .ty = decl.typeOf(mod), .val = decl.val }; if (!tv.ty.isFnOrHasRuntimeBitsIgnoreComptime(mod)) return; if (tv.val.getExternFunc(mod)) |_| { diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index cef6a9f7f3..7bbe4e715a 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -5532,7 +5532,7 @@ pub const FuncGen = struct { const msg_decl_index = mod.panic_messages[@intFromEnum(panic_id)].unwrap().?; const msg_decl = mod.declPtr(msg_decl_index); const msg_len = msg_decl.typeOf(mod).childType(mod).arrayLen(mod); - const msg_ptr = try o.lowerValue(try msg_decl.internValue(mod)); + const msg_ptr = try o.lowerValue(msg_decl.val.toIntern()); const null_opt_addr_global = try fg.resolveNullOptUsize(); const target = mod.getTarget(); const llvm_usize = try o.lowerType(Type.usize); diff --git a/src/type.zig b/src/type.zig index 8a79cb445c..a382a5ad97 100644 --- a/src/type.zig +++ b/src/type.zig @@ -2481,7 +2481,7 @@ pub const Type = struct { } const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]); if (try field_ty.onePossibleValue(mod)) |field_opv| { - field_val.* = try field_opv.intern(field_ty, mod); + field_val.* = field_opv.toIntern(); } else return null; }