mirror of
https://github.com/ziglang/zig.git
synced 2026-01-20 22:35:24 +00:00
remove mod aliases for Zcus
This commit is contained in:
parent
9868ed44b3
commit
1c1feba08e
@ -4350,8 +4350,8 @@ fn workerCheckEmbedFile(comp: *Compilation, embed_file: *Zcu.EmbedFile) void {
|
||||
}
|
||||
|
||||
fn detectEmbedFileUpdate(comp: *Compilation, embed_file: *Zcu.EmbedFile) !void {
|
||||
const mod = comp.zcu.?;
|
||||
const ip = &mod.intern_pool;
|
||||
const zcu = comp.zcu.?;
|
||||
const ip = &zcu.intern_pool;
|
||||
var file = try embed_file.owner.root.openFile(embed_file.sub_file_path.toSlice(ip), .{});
|
||||
defer file.close();
|
||||
|
||||
@ -4663,10 +4663,10 @@ fn reportRetryableEmbedFileError(
|
||||
embed_file: *Zcu.EmbedFile,
|
||||
err: anyerror,
|
||||
) error{OutOfMemory}!void {
|
||||
const mod = comp.zcu.?;
|
||||
const gpa = mod.gpa;
|
||||
const zcu = comp.zcu.?;
|
||||
const gpa = zcu.gpa;
|
||||
const src_loc = embed_file.src_loc;
|
||||
const ip = &mod.intern_pool;
|
||||
const ip = &zcu.intern_pool;
|
||||
const err_msg = try Zcu.ErrorMsg.create(gpa, src_loc, "unable to load '{}/{s}': {s}", .{
|
||||
embed_file.owner.root,
|
||||
embed_file.sub_file_path.toSlice(ip),
|
||||
@ -4678,7 +4678,7 @@ fn reportRetryableEmbedFileError(
|
||||
{
|
||||
comp.mutex.lock();
|
||||
defer comp.mutex.unlock();
|
||||
try mod.failed_embed_files.putNoClobber(gpa, embed_file, err_msg);
|
||||
try zcu.failed_embed_files.putNoClobber(gpa, embed_file, err_msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
28
src/Type.zig
28
src/Type.zig
@ -3028,9 +3028,9 @@ pub fn getParentNamespace(ty: Type, zcu: *Zcu) InternPool.OptionalNamespaceIndex
|
||||
|
||||
// Works for vectors and vectors of integers.
|
||||
pub fn minInt(ty: Type, pt: Zcu.PerThread, dest_ty: Type) !Value {
|
||||
const mod = pt.zcu;
|
||||
const scalar = try minIntScalar(ty.scalarType(mod), pt, dest_ty.scalarType(mod));
|
||||
return if (ty.zigTypeTag(mod) == .Vector) Value.fromInterned(try pt.intern(.{ .aggregate = .{
|
||||
const zcu = pt.zcu;
|
||||
const scalar = try minIntScalar(ty.scalarType(zcu), pt, dest_ty.scalarType(zcu));
|
||||
return if (ty.zigTypeTag(zcu) == .Vector) Value.fromInterned(try pt.intern(.{ .aggregate = .{
|
||||
.ty = dest_ty.toIntern(),
|
||||
.storage = .{ .repeated_elem = scalar.toIntern() },
|
||||
} })) else scalar;
|
||||
@ -3038,8 +3038,8 @@ pub fn minInt(ty: Type, pt: Zcu.PerThread, dest_ty: Type) !Value {
|
||||
|
||||
/// Asserts that the type is an integer.
|
||||
pub fn minIntScalar(ty: Type, pt: Zcu.PerThread, dest_ty: Type) !Value {
|
||||
const mod = pt.zcu;
|
||||
const info = ty.intInfo(mod);
|
||||
const zcu = pt.zcu;
|
||||
const info = ty.intInfo(zcu);
|
||||
if (info.signedness == .unsigned) return pt.intValue(dest_ty, 0);
|
||||
if (info.bits == 0) return pt.intValue(dest_ty, -1);
|
||||
|
||||
@ -3048,7 +3048,7 @@ pub fn minIntScalar(ty: Type, pt: Zcu.PerThread, dest_ty: Type) !Value {
|
||||
return pt.intValue(dest_ty, n);
|
||||
}
|
||||
|
||||
var res = try std.math.big.int.Managed.init(mod.gpa);
|
||||
var res = try std.math.big.int.Managed.init(zcu.gpa);
|
||||
defer res.deinit();
|
||||
|
||||
try res.setTwosCompIntLimit(.min, info.signedness, info.bits);
|
||||
@ -3059,9 +3059,9 @@ pub fn minIntScalar(ty: Type, pt: Zcu.PerThread, dest_ty: Type) !Value {
|
||||
// Works for vectors and vectors of integers.
|
||||
/// The returned Value will have type dest_ty.
|
||||
pub fn maxInt(ty: Type, pt: Zcu.PerThread, dest_ty: Type) !Value {
|
||||
const mod = pt.zcu;
|
||||
const scalar = try maxIntScalar(ty.scalarType(mod), pt, dest_ty.scalarType(mod));
|
||||
return if (ty.zigTypeTag(mod) == .Vector) Value.fromInterned(try pt.intern(.{ .aggregate = .{
|
||||
const zcu = pt.zcu;
|
||||
const scalar = try maxIntScalar(ty.scalarType(zcu), pt, dest_ty.scalarType(zcu));
|
||||
return if (ty.zigTypeTag(zcu) == .Vector) Value.fromInterned(try pt.intern(.{ .aggregate = .{
|
||||
.ty = dest_ty.toIntern(),
|
||||
.storage = .{ .repeated_elem = scalar.toIntern() },
|
||||
} })) else scalar;
|
||||
@ -3546,12 +3546,12 @@ pub fn optEuBaseType(ty: Type, zcu: *const Zcu) Type {
|
||||
}
|
||||
|
||||
pub fn toUnsigned(ty: Type, pt: Zcu.PerThread) !Type {
|
||||
const mod = pt.zcu;
|
||||
return switch (ty.zigTypeTag(mod)) {
|
||||
.Int => pt.intType(.unsigned, ty.intInfo(mod).bits),
|
||||
const zcu = pt.zcu;
|
||||
return switch (ty.zigTypeTag(zcu)) {
|
||||
.Int => pt.intType(.unsigned, ty.intInfo(zcu).bits),
|
||||
.Vector => try pt.vectorType(.{
|
||||
.len = ty.vectorLen(mod),
|
||||
.child = (try ty.childType(mod).toUnsigned(pt)).toIntern(),
|
||||
.len = ty.vectorLen(zcu),
|
||||
.child = (try ty.childType(zcu).toUnsigned(pt)).toIntern(),
|
||||
}),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
@ -1555,8 +1555,8 @@ pub fn embedFile(
|
||||
import_string: []const u8,
|
||||
src_loc: Zcu.LazySrcLoc,
|
||||
) !InternPool.Index {
|
||||
const mod = pt.zcu;
|
||||
const gpa = mod.gpa;
|
||||
const zcu = pt.zcu;
|
||||
const gpa = zcu.gpa;
|
||||
|
||||
if (cur_file.mod.deps.get(import_string)) |pkg| {
|
||||
const resolved_path = try std.fs.path.resolve(gpa, &.{
|
||||
@ -1567,9 +1567,9 @@ pub fn embedFile(
|
||||
var keep_resolved_path = false;
|
||||
defer if (!keep_resolved_path) gpa.free(resolved_path);
|
||||
|
||||
const gop = try mod.embed_table.getOrPut(gpa, resolved_path);
|
||||
const gop = try zcu.embed_table.getOrPut(gpa, resolved_path);
|
||||
errdefer {
|
||||
assert(std.mem.eql(u8, mod.embed_table.pop().key, resolved_path));
|
||||
assert(std.mem.eql(u8, zcu.embed_table.pop().key, resolved_path));
|
||||
keep_resolved_path = false;
|
||||
}
|
||||
if (gop.found_existing) return gop.value_ptr.*.val;
|
||||
@ -1594,9 +1594,9 @@ pub fn embedFile(
|
||||
var keep_resolved_path = false;
|
||||
defer if (!keep_resolved_path) gpa.free(resolved_path);
|
||||
|
||||
const gop = try mod.embed_table.getOrPut(gpa, resolved_path);
|
||||
const gop = try zcu.embed_table.getOrPut(gpa, resolved_path);
|
||||
errdefer {
|
||||
assert(std.mem.eql(u8, mod.embed_table.pop().key, resolved_path));
|
||||
assert(std.mem.eql(u8, zcu.embed_table.pop().key, resolved_path));
|
||||
keep_resolved_path = false;
|
||||
}
|
||||
if (gop.found_existing) return gop.value_ptr.*.val;
|
||||
@ -1631,9 +1631,9 @@ fn newEmbedFile(
|
||||
result: **Zcu.EmbedFile,
|
||||
src_loc: Zcu.LazySrcLoc,
|
||||
) !InternPool.Index {
|
||||
const mod = pt.zcu;
|
||||
const gpa = mod.gpa;
|
||||
const ip = &mod.intern_pool;
|
||||
const zcu = pt.zcu;
|
||||
const gpa = zcu.gpa;
|
||||
const ip = &zcu.intern_pool;
|
||||
|
||||
const new_file = try gpa.create(Zcu.EmbedFile);
|
||||
errdefer gpa.destroy(new_file);
|
||||
@ -1655,7 +1655,7 @@ fn newEmbedFile(
|
||||
if (actual_read != size) return error.UnexpectedEndOfFile;
|
||||
bytes[0][size] = 0;
|
||||
|
||||
const comp = mod.comp;
|
||||
const comp = zcu.comp;
|
||||
switch (comp.cache_use) {
|
||||
.whole => |whole| if (whole.cache_manifest) |man| {
|
||||
const copied_resolved_path = try gpa.dupe(u8, resolved_path);
|
||||
@ -2857,9 +2857,9 @@ pub fn errorSetFromUnsortedNames(
|
||||
|
||||
/// Supports only pointers, not pointer-like optionals.
|
||||
pub fn ptrIntValue(pt: Zcu.PerThread, ty: Type, x: u64) Allocator.Error!Value {
|
||||
const mod = pt.zcu;
|
||||
assert(ty.zigTypeTag(mod) == .Pointer and !ty.isSlice(mod));
|
||||
assert(x != 0 or ty.isAllowzeroPtr(mod));
|
||||
const zcu = pt.zcu;
|
||||
assert(ty.zigTypeTag(zcu) == .Pointer and !ty.isSlice(zcu));
|
||||
assert(x != 0 or ty.isAllowzeroPtr(zcu));
|
||||
return Value.fromInterned(try pt.intern(.{ .ptr = .{
|
||||
.ty = ty.toIntern(),
|
||||
.base_addr = .int,
|
||||
@ -3008,10 +3008,10 @@ pub fn intFittingRange(pt: Zcu.PerThread, min: Value, max: Value) !Type {
|
||||
/// twos-complement integer; otherwise in an unsigned integer.
|
||||
/// Asserts that `val` is not undef. If `val` is negative, asserts that `sign` is true.
|
||||
pub fn intBitsForValue(pt: Zcu.PerThread, val: Value, sign: bool) u16 {
|
||||
const mod = pt.zcu;
|
||||
assert(!val.isUndef(mod));
|
||||
const zcu = pt.zcu;
|
||||
assert(!val.isUndef(zcu));
|
||||
|
||||
const key = mod.intern_pool.indexToKey(val.toIntern());
|
||||
const key = zcu.intern_pool.indexToKey(val.toIntern());
|
||||
switch (key.int.storage) {
|
||||
.i64 => |x| {
|
||||
if (std.math.cast(u64, x)) |casted| return Type.smallestUnsignedBits(casted) + @intFromBool(sign);
|
||||
|
||||
@ -365,8 +365,8 @@ pub fn generate(
|
||||
|
||||
fn gen(self: *Self) !void {
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const cc = self.fn_type.fnCallingConvention(mod);
|
||||
const zcu = pt.zcu;
|
||||
const cc = self.fn_type.fnCallingConvention(zcu);
|
||||
if (cc != .Naked) {
|
||||
// TODO Finish function prologue and epilogue for sparc64.
|
||||
|
||||
@ -494,8 +494,8 @@ fn gen(self: *Self) !void {
|
||||
|
||||
fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const ip = &mod.intern_pool;
|
||||
const zcu = pt.zcu;
|
||||
const ip = &zcu.intern_pool;
|
||||
const air_tags = self.air.instructions.items(.tag);
|
||||
|
||||
for (body) |inst| {
|
||||
@ -760,18 +760,18 @@ fn airAddSubWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ty_pl = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
|
||||
const extra = self.air.extraData(Air.Bin, ty_pl.payload).data;
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const lhs = try self.resolveInst(extra.lhs);
|
||||
const rhs = try self.resolveInst(extra.rhs);
|
||||
const lhs_ty = self.typeOf(extra.lhs);
|
||||
const rhs_ty = self.typeOf(extra.rhs);
|
||||
|
||||
switch (lhs_ty.zigTypeTag(mod)) {
|
||||
switch (lhs_ty.zigTypeTag(zcu)) {
|
||||
.Vector => return self.fail("TODO implement add_with_overflow/sub_with_overflow for vectors", .{}),
|
||||
.Int => {
|
||||
assert(lhs_ty.eql(rhs_ty, mod));
|
||||
const int_info = lhs_ty.intInfo(mod);
|
||||
assert(lhs_ty.eql(rhs_ty, zcu));
|
||||
const int_info = lhs_ty.intInfo(zcu);
|
||||
switch (int_info.bits) {
|
||||
32, 64 => {
|
||||
// Only say yes if the operation is
|
||||
@ -839,9 +839,9 @@ fn airAddSubWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const vector_ty = self.typeOfIndex(inst);
|
||||
const len = vector_ty.vectorLen(mod);
|
||||
const len = vector_ty.vectorLen(zcu);
|
||||
const ty_pl = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
|
||||
const elements = @as([]const Air.Inst.Ref, @ptrCast(self.air.extra[ty_pl.payload..][0..len]));
|
||||
const result: MCValue = res: {
|
||||
@ -874,13 +874,13 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const ptr_ty = self.typeOf(ty_op.operand);
|
||||
const ptr = try self.resolveInst(ty_op.operand);
|
||||
const array_ty = ptr_ty.childType(mod);
|
||||
const array_len = @as(u32, @intCast(array_ty.arrayLen(mod)));
|
||||
const array_ty = ptr_ty.childType(zcu);
|
||||
const array_len = @as(u32, @intCast(array_ty.arrayLen(zcu)));
|
||||
const ptr_bytes = 8;
|
||||
const stack_offset = try self.allocMem(inst, ptr_bytes * 2, .@"8");
|
||||
try self.genSetStack(ptr_ty, stack_offset, ptr);
|
||||
@ -1305,11 +1305,11 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
const args = @as([]const Air.Inst.Ref, @ptrCast(self.air.extra[extra.end .. extra.end + extra.data.args_len]));
|
||||
const ty = self.typeOf(callee);
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const ip = &mod.intern_pool;
|
||||
const fn_ty = switch (ty.zigTypeTag(mod)) {
|
||||
const zcu = pt.zcu;
|
||||
const ip = &zcu.intern_pool;
|
||||
const fn_ty = switch (ty.zigTypeTag(zcu)) {
|
||||
.Fn => ty,
|
||||
.Pointer => ty.childType(mod),
|
||||
.Pointer => ty.childType(zcu),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
@ -1361,7 +1361,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
return self.fail("TODO implement calling bitcasted functions", .{});
|
||||
},
|
||||
} else {
|
||||
assert(ty.zigTypeTag(mod) == .Pointer);
|
||||
assert(ty.zigTypeTag(zcu) == .Pointer);
|
||||
const mcv = try self.resolveInst(callee);
|
||||
try self.genSetReg(ty, .o7, mcv);
|
||||
|
||||
@ -1636,13 +1636,9 @@ fn airCtz(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airDbgInlineBlock(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const ty_pl = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
|
||||
const extra = self.air.extraData(Air.DbgInlineBlock, ty_pl.payload);
|
||||
const func = mod.funcInfo(extra.data.func);
|
||||
// TODO emit debug info for function change
|
||||
_ = func;
|
||||
try self.lowerBlock(inst, @ptrCast(self.air.extra[extra.end..][0..extra.data.body_len]));
|
||||
}
|
||||
|
||||
@ -1736,11 +1732,11 @@ fn airIntCast(self: *Self, inst: Air.Inst.Index) !void {
|
||||
return self.finishAir(inst, .dead, .{ ty_op.operand, .none, .none });
|
||||
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const operand_ty = self.typeOf(ty_op.operand);
|
||||
const operand = try self.resolveInst(ty_op.operand);
|
||||
const info_a = operand_ty.intInfo(mod);
|
||||
const info_b = self.typeOfIndex(inst).intInfo(mod);
|
||||
const info_a = operand_ty.intInfo(zcu);
|
||||
const info_b = self.typeOfIndex(inst).intInfo(zcu);
|
||||
if (info_a.signedness != info_b.signedness)
|
||||
return self.fail("TODO gen intcast sign safety in semantic analysis", .{});
|
||||
|
||||
@ -2025,18 +2021,18 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ty_pl = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
|
||||
const extra = self.air.extraData(Air.Bin, ty_pl.payload).data;
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const lhs = try self.resolveInst(extra.lhs);
|
||||
const rhs = try self.resolveInst(extra.rhs);
|
||||
const lhs_ty = self.typeOf(extra.lhs);
|
||||
const rhs_ty = self.typeOf(extra.rhs);
|
||||
|
||||
switch (lhs_ty.zigTypeTag(mod)) {
|
||||
switch (lhs_ty.zigTypeTag(zcu)) {
|
||||
.Vector => return self.fail("TODO implement mul_with_overflow for vectors", .{}),
|
||||
.Int => {
|
||||
assert(lhs_ty.eql(rhs_ty, mod));
|
||||
const int_info = lhs_ty.intInfo(mod);
|
||||
assert(lhs_ty.eql(rhs_ty, zcu));
|
||||
const int_info = lhs_ty.intInfo(zcu);
|
||||
switch (int_info.bits) {
|
||||
1...32 => {
|
||||
try self.spillConditionFlagsIfOccupied();
|
||||
@ -2090,7 +2086,7 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
|
||||
fn airNot(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand = try self.resolveInst(ty_op.operand);
|
||||
const operand_ty = self.typeOf(ty_op.operand);
|
||||
@ -2106,7 +2102,7 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
|
||||
};
|
||||
},
|
||||
else => {
|
||||
switch (operand_ty.zigTypeTag(mod)) {
|
||||
switch (operand_ty.zigTypeTag(zcu)) {
|
||||
.Bool => {
|
||||
const op_reg = switch (operand) {
|
||||
.register => |r| r,
|
||||
@ -2140,7 +2136,7 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
|
||||
},
|
||||
.Vector => return self.fail("TODO bitwise not for vectors", .{}),
|
||||
.Int => {
|
||||
const int_info = operand_ty.intInfo(mod);
|
||||
const int_info = operand_ty.intInfo(zcu);
|
||||
if (int_info.bits <= 64) {
|
||||
const op_reg = switch (operand) {
|
||||
.register => |r| r,
|
||||
@ -2323,17 +2319,17 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ty_pl = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
|
||||
const extra = self.air.extraData(Air.Bin, ty_pl.payload).data;
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const lhs = try self.resolveInst(extra.lhs);
|
||||
const rhs = try self.resolveInst(extra.rhs);
|
||||
const lhs_ty = self.typeOf(extra.lhs);
|
||||
const rhs_ty = self.typeOf(extra.rhs);
|
||||
|
||||
switch (lhs_ty.zigTypeTag(mod)) {
|
||||
switch (lhs_ty.zigTypeTag(zcu)) {
|
||||
.Vector => return self.fail("TODO implement mul_with_overflow for vectors", .{}),
|
||||
.Int => {
|
||||
const int_info = lhs_ty.intInfo(mod);
|
||||
const int_info = lhs_ty.intInfo(zcu);
|
||||
if (int_info.bits <= 64) {
|
||||
try self.spillConditionFlagsIfOccupied();
|
||||
|
||||
@ -4328,13 +4324,13 @@ fn minMax(
|
||||
rhs_ty: Type,
|
||||
) InnerError!MCValue {
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
assert(lhs_ty.eql(rhs_ty, mod));
|
||||
switch (lhs_ty.zigTypeTag(mod)) {
|
||||
const zcu = pt.zcu;
|
||||
assert(lhs_ty.eql(rhs_ty, zcu));
|
||||
switch (lhs_ty.zigTypeTag(zcu)) {
|
||||
.Float => return self.fail("TODO min/max on floats", .{}),
|
||||
.Vector => return self.fail("TODO min/max on vectors", .{}),
|
||||
.Int => {
|
||||
const int_info = lhs_ty.intInfo(mod);
|
||||
const int_info = lhs_ty.intInfo(zcu);
|
||||
if (int_info.bits <= 64) {
|
||||
// TODO skip register setting when one of the operands
|
||||
// is a small (fits in i13) immediate.
|
||||
@ -4556,8 +4552,8 @@ fn resolveInst(self: *Self, ref: Air.Inst.Ref) InnerError!MCValue {
|
||||
|
||||
fn ret(self: *Self, mcv: MCValue) !void {
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const ret_ty = self.fn_type.fnReturnType(mod);
|
||||
const zcu = pt.zcu;
|
||||
const ret_ty = self.fn_type.fnReturnType(zcu);
|
||||
try self.setRegOrMem(ret_ty, self.ret_mcv, mcv);
|
||||
|
||||
// Just add space for a branch instruction, patch this later
|
||||
@ -4744,9 +4740,9 @@ fn trunc(
|
||||
dest_ty: Type,
|
||||
) !MCValue {
|
||||
const pt = self.pt;
|
||||
const mod = pt.zcu;
|
||||
const info_a = operand_ty.intInfo(mod);
|
||||
const info_b = dest_ty.intInfo(mod);
|
||||
const zcu = pt.zcu;
|
||||
const info_a = operand_ty.intInfo(zcu);
|
||||
const info_b = dest_ty.intInfo(zcu);
|
||||
|
||||
if (info_b.bits <= 64) {
|
||||
const operand_reg = switch (operand) {
|
||||
|
||||
@ -1382,8 +1382,8 @@ fn updateLazySymbol(
|
||||
sym: link.File.LazySymbol,
|
||||
symbol_index: Symbol.Index,
|
||||
) !void {
|
||||
const mod = pt.zcu;
|
||||
const gpa = mod.gpa;
|
||||
const zcu = pt.zcu;
|
||||
const gpa = zcu.gpa;
|
||||
|
||||
var required_alignment: InternPool.Alignment = .none;
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
@ -1398,7 +1398,7 @@ fn updateLazySymbol(
|
||||
break :blk try self.strtab.insert(gpa, name);
|
||||
};
|
||||
|
||||
const src = Type.fromInterned(sym.ty).srcLocOrNull(mod) orelse Zcu.LazySrcLoc.unneeded;
|
||||
const src = Type.fromInterned(sym.ty).srcLocOrNull(zcu) orelse Zcu.LazySrcLoc.unneeded;
|
||||
const res = try codegen.generateLazySymbol(
|
||||
&elf_file.base,
|
||||
pt,
|
||||
@ -1513,7 +1513,7 @@ pub fn updateExports(
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const gpa = elf_file.base.comp.gpa;
|
||||
const metadata = switch (exported) {
|
||||
.nav => |nav| blk: {
|
||||
@ -1521,15 +1521,15 @@ pub fn updateExports(
|
||||
break :blk self.navs.getPtr(nav).?;
|
||||
},
|
||||
.uav => |uav| self.uavs.getPtr(uav) orelse blk: {
|
||||
const first_exp = mod.all_exports.items[export_indices[0]];
|
||||
const first_exp = zcu.all_exports.items[export_indices[0]];
|
||||
const res = try self.lowerUav(elf_file, pt, uav, .none, first_exp.src);
|
||||
switch (res) {
|
||||
.mcv => {},
|
||||
.fail => |em| {
|
||||
// TODO maybe it's enough to return an error here and let Zcu.processExportsInner
|
||||
// handle the error?
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
|
||||
try zcu.failed_exports.ensureUnusedCapacity(zcu.gpa, 1);
|
||||
zcu.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
|
||||
return;
|
||||
},
|
||||
}
|
||||
@ -1542,11 +1542,11 @@ pub fn updateExports(
|
||||
const esym_shndx = self.symtab.items(.shndx)[esym_index];
|
||||
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
const exp = zcu.all_exports.items[export_idx];
|
||||
if (exp.opts.section.unwrap()) |section_name| {
|
||||
if (!section_name.eqlSlice(".text", &mod.intern_pool)) {
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
|
||||
if (!section_name.eqlSlice(".text", &zcu.intern_pool)) {
|
||||
try zcu.failed_exports.ensureUnusedCapacity(zcu.gpa, 1);
|
||||
zcu.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.src,
|
||||
"Unimplemented: ExportOptions.section",
|
||||
@ -1560,8 +1560,8 @@ pub fn updateExports(
|
||||
.strong => elf.STB_GLOBAL,
|
||||
.weak => elf.STB_WEAK,
|
||||
.link_once => {
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
|
||||
try zcu.failed_exports.ensureUnusedCapacity(zcu.gpa, 1);
|
||||
zcu.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.src,
|
||||
"Unimplemented: GlobalLinkage.LinkOnce",
|
||||
@ -1571,7 +1571,7 @@ pub fn updateExports(
|
||||
},
|
||||
};
|
||||
const stt_bits: u8 = @as(u4, @truncate(esym.st_info));
|
||||
const exp_name = exp.opts.name.toSlice(&mod.intern_pool);
|
||||
const exp_name = exp.opts.name.toSlice(&zcu.intern_pool);
|
||||
const name_off = try self.strtab.insert(gpa, exp_name);
|
||||
const global_sym_index = if (metadata.@"export"(self, exp_name)) |exp_index|
|
||||
exp_index.*
|
||||
@ -1626,8 +1626,8 @@ pub fn deleteExport(
|
||||
.nav => |nav| self.navs.getPtr(nav),
|
||||
.uav => |uav| self.uavs.getPtr(uav),
|
||||
} orelse return;
|
||||
const mod = elf_file.base.comp.zcu.?;
|
||||
const exp_name = name.toSlice(&mod.intern_pool);
|
||||
const zcu = elf_file.base.comp.zcu.?;
|
||||
const exp_name = name.toSlice(&zcu.intern_pool);
|
||||
const esym_index = metadata.@"export"(self, exp_name) orelse return;
|
||||
log.debug("deleting export '{s}'", .{exp_name});
|
||||
const esym = &self.symtab.items(.elf_sym)[esym_index.*];
|
||||
|
||||
@ -1256,7 +1256,7 @@ pub fn updateExports(
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const mod = pt.zcu;
|
||||
const zcu = pt.zcu;
|
||||
const gpa = macho_file.base.comp.gpa;
|
||||
const metadata = switch (exported) {
|
||||
.nav => |nav| blk: {
|
||||
@ -1264,15 +1264,15 @@ pub fn updateExports(
|
||||
break :blk self.navs.getPtr(nav).?;
|
||||
},
|
||||
.uav => |uav| self.uavs.getPtr(uav) orelse blk: {
|
||||
const first_exp = mod.all_exports.items[export_indices[0]];
|
||||
const first_exp = zcu.all_exports.items[export_indices[0]];
|
||||
const res = try self.lowerUav(macho_file, pt, uav, .none, first_exp.src);
|
||||
switch (res) {
|
||||
.mcv => {},
|
||||
.fail => |em| {
|
||||
// TODO maybe it's enough to return an error here and let Zcu.processExportsInner
|
||||
// handle the error?
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
|
||||
try zcu.failed_exports.ensureUnusedCapacity(zcu.gpa, 1);
|
||||
zcu.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
|
||||
return;
|
||||
},
|
||||
}
|
||||
@ -1284,11 +1284,11 @@ pub fn updateExports(
|
||||
const nlist = self.symtab.items(.nlist)[nlist_idx];
|
||||
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
const exp = zcu.all_exports.items[export_idx];
|
||||
if (exp.opts.section.unwrap()) |section_name| {
|
||||
if (!section_name.eqlSlice("__text", &mod.intern_pool)) {
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
|
||||
if (!section_name.eqlSlice("__text", &zcu.intern_pool)) {
|
||||
try zcu.failed_exports.ensureUnusedCapacity(zcu.gpa, 1);
|
||||
zcu.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.src,
|
||||
"Unimplemented: ExportOptions.section",
|
||||
@ -1298,7 +1298,7 @@ pub fn updateExports(
|
||||
}
|
||||
}
|
||||
if (exp.opts.linkage == .link_once) {
|
||||
try mod.failed_exports.putNoClobber(mod.gpa, export_idx, try Zcu.ErrorMsg.create(
|
||||
try zcu.failed_exports.putNoClobber(zcu.gpa, export_idx, try Zcu.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.src,
|
||||
"Unimplemented: GlobalLinkage.link_once",
|
||||
@ -1307,7 +1307,7 @@ pub fn updateExports(
|
||||
continue;
|
||||
}
|
||||
|
||||
const exp_name = exp.opts.name.toSlice(&mod.intern_pool);
|
||||
const exp_name = exp.opts.name.toSlice(&zcu.intern_pool);
|
||||
const global_nlist_index = if (metadata.@"export"(self, exp_name)) |exp_index|
|
||||
exp_index.*
|
||||
else blk: {
|
||||
@ -1437,15 +1437,15 @@ pub fn deleteExport(
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
const mod = macho_file.base.comp.zcu.?;
|
||||
const zcu = macho_file.base.comp.zcu.?;
|
||||
|
||||
const metadata = switch (exported) {
|
||||
.nav => |nav| self.navs.getPtr(nav),
|
||||
.uav => |uav| self.uavs.getPtr(uav),
|
||||
} orelse return;
|
||||
const nlist_index = metadata.@"export"(self, name.toSlice(&mod.intern_pool)) orelse return;
|
||||
const nlist_index = metadata.@"export"(self, name.toSlice(&zcu.intern_pool)) orelse return;
|
||||
|
||||
log.debug("deleting export '{}'", .{name.fmt(&mod.intern_pool)});
|
||||
log.debug("deleting export '{}'", .{name.fmt(&zcu.intern_pool)});
|
||||
|
||||
const nlist = &self.symtab.items(.nlist)[nlist_index.*];
|
||||
self.symtab.items(.size)[nlist_index.*] = 0;
|
||||
|
||||
@ -317,8 +317,8 @@ pub fn createEmpty(
|
||||
|
||||
fn putFn(self: *Plan9, nav_index: InternPool.Nav.Index, out: FnNavOutput) !void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.comp.zcu.?;
|
||||
const file_scope = mod.navFileScopeIndex(nav_index);
|
||||
const zcu = self.base.comp.zcu.?;
|
||||
const file_scope = zcu.navFileScopeIndex(nav_index);
|
||||
const fn_map_res = try self.fn_nav_table.getOrPut(gpa, file_scope);
|
||||
if (fn_map_res.found_existing) {
|
||||
if (try fn_map_res.value_ptr.functions.fetchPut(gpa, nav_index, out)) |old_entry| {
|
||||
@ -326,7 +326,7 @@ fn putFn(self: *Plan9, nav_index: InternPool.Nav.Index, out: FnNavOutput) !void
|
||||
gpa.free(old_entry.value.lineinfo);
|
||||
}
|
||||
} else {
|
||||
const file = mod.fileByIndex(file_scope);
|
||||
const file = zcu.fileByIndex(file_scope);
|
||||
const arena = self.path_arena.allocator();
|
||||
// each file gets a symbol
|
||||
fn_map_res.value_ptr.* = .{
|
||||
@ -391,10 +391,10 @@ pub fn updateFunc(self: *Plan9, pt: Zcu.PerThread, func_index: InternPool.Index,
|
||||
@panic("Attempted to compile for object format that was disabled by build configuration");
|
||||
}
|
||||
|
||||
const mod = pt.zcu;
|
||||
const gpa = mod.gpa;
|
||||
const zcu = pt.zcu;
|
||||
const gpa = zcu.gpa;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const func = mod.funcInfo(func_index);
|
||||
const func = zcu.funcInfo(func_index);
|
||||
|
||||
const atom_idx = try self.seeNav(pt, func.owner_nav);
|
||||
|
||||
@ -413,7 +413,7 @@ pub fn updateFunc(self: *Plan9, pt: Zcu.PerThread, func_index: InternPool.Index,
|
||||
const res = try codegen.generateFunction(
|
||||
&self.base,
|
||||
pt,
|
||||
mod.navSrcLoc(func.owner_nav),
|
||||
zcu.navSrcLoc(func.owner_nav),
|
||||
func_index,
|
||||
air,
|
||||
liveness,
|
||||
@ -423,7 +423,7 @@ pub fn updateFunc(self: *Plan9, pt: Zcu.PerThread, func_index: InternPool.Index,
|
||||
const code = switch (res) {
|
||||
.ok => try code_buffer.toOwnedSlice(),
|
||||
.fail => |em| {
|
||||
try mod.failed_codegen.put(gpa, func.owner_nav, em);
|
||||
try zcu.failed_codegen.put(gpa, func.owner_nav, em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -952,11 +952,11 @@ pub fn freeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
// TODO audit the lifetimes of decls table entries. It's possible to get
|
||||
// freeDecl without any updateDecl in between.
|
||||
const mod = self.base.comp.zcu.?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const is_fn = decl.val.isFuncBody(mod);
|
||||
const zcu = self.base.comp.zcu.?;
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
const is_fn = decl.val.isFuncBody(zcu);
|
||||
if (is_fn) {
|
||||
const symidx_and_submap = self.fn_decl_table.get(decl.getFileScope(mod)).?;
|
||||
const symidx_and_submap = self.fn_decl_table.get(decl.getFileScope(zcu)).?;
|
||||
var submap = symidx_and_submap.functions;
|
||||
if (submap.fetchSwapRemove(decl_index)) |removed_entry| {
|
||||
gpa.free(removed_entry.value.code);
|
||||
@ -1256,8 +1256,8 @@ pub fn writeSym(self: *Plan9, w: anytype, sym: aout.Sym) !void {
|
||||
}
|
||||
|
||||
pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
|
||||
const mod = self.base.comp.zcu.?;
|
||||
const ip = &mod.intern_pool;
|
||||
const zcu = self.base.comp.zcu.?;
|
||||
const ip = &zcu.intern_pool;
|
||||
const writer = buf.writer();
|
||||
// write __GOT
|
||||
try self.writeSym(writer, self.syms.items[0]);
|
||||
@ -1284,7 +1284,7 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
|
||||
try self.writeSym(writer, sym);
|
||||
if (self.nav_exports.get(nav_index)) |export_indices| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
const exp = zcu.all_exports.items[export_idx];
|
||||
if (nav_metadata.getExport(self, exp.opts.name.toSlice(ip))) |exp_i| {
|
||||
try self.writeSym(writer, self.syms.items[exp_i]);
|
||||
}
|
||||
@ -1323,7 +1323,7 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
|
||||
try self.writeSym(writer, sym);
|
||||
if (self.nav_exports.get(nav_index)) |export_indices| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
const exp = zcu.all_exports.items[export_idx];
|
||||
if (nav_metadata.getExport(self, exp.opts.name.toSlice(ip))) |exp_i| {
|
||||
const s = self.syms.items[exp_i];
|
||||
if (mem.eql(u8, s.name, "_start"))
|
||||
|
||||
@ -803,9 +803,9 @@ pub fn getUavVAddr(
|
||||
const parent_atom_index = wasm_file.symbol_atom.get(.{ .file = zig_object.index, .index = @enumFromInt(reloc_info.parent_atom_index) }).?;
|
||||
const parent_atom = wasm_file.getAtomPtr(parent_atom_index);
|
||||
const is_wasm32 = target.cpu.arch == .wasm32;
|
||||
const mod = wasm_file.base.comp.zcu.?;
|
||||
const ty = Type.fromInterned(mod.intern_pool.typeOf(uav));
|
||||
if (ty.zigTypeTag(mod) == .Fn) {
|
||||
const zcu = wasm_file.base.comp.zcu.?;
|
||||
const ty = Type.fromInterned(zcu.intern_pool.typeOf(uav));
|
||||
if (ty.zigTypeTag(zcu) == .Fn) {
|
||||
std.debug.assert(reloc_info.addend == 0); // addend not allowed for function relocations
|
||||
try parent_atom.relocs.append(gpa, .{
|
||||
.index = target_symbol_index,
|
||||
@ -834,13 +834,13 @@ pub fn deleteExport(
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
const mod = wasm_file.base.comp.zcu.?;
|
||||
const zcu = wasm_file.base.comp.zcu.?;
|
||||
const nav_index = switch (exported) {
|
||||
.nav => |nav_index| nav_index,
|
||||
.uav => @panic("TODO: implement Wasm linker code for exporting a constant value"),
|
||||
};
|
||||
const nav_info = zig_object.navs.getPtr(nav_index) orelse return;
|
||||
if (nav_info.@"export"(zig_object, name.toSlice(&mod.intern_pool))) |sym_index| {
|
||||
if (nav_info.@"export"(zig_object, name.toSlice(&zcu.intern_pool))) |sym_index| {
|
||||
const sym = zig_object.symbol(sym_index);
|
||||
nav_info.deleteExport(sym_index);
|
||||
std.debug.assert(zig_object.global_syms.remove(sym.name));
|
||||
@ -930,8 +930,8 @@ pub fn updateExports(
|
||||
|
||||
pub fn freeNav(zig_object: *ZigObject, wasm_file: *Wasm, nav_index: InternPool.Nav.Index) void {
|
||||
const gpa = wasm_file.base.comp.gpa;
|
||||
const mod = wasm_file.base.comp.zcu.?;
|
||||
const ip = &mod.intern_pool;
|
||||
const zcu = wasm_file.base.comp.zcu.?;
|
||||
const ip = &zcu.intern_pool;
|
||||
const nav_info = zig_object.navs.getPtr(nav_index).?;
|
||||
const atom_index = nav_info.atom;
|
||||
const atom = wasm_file.getAtomPtr(atom_index);
|
||||
@ -956,7 +956,7 @@ pub fn freeNav(zig_object: *ZigObject, wasm_file: *Wasm, nav_index: InternPool.N
|
||||
segment.name = &.{}; // Ensure no accidental double free
|
||||
}
|
||||
|
||||
const nav_val = mod.navValue(nav_index).toIntern();
|
||||
const nav_val = zcu.navValue(nav_index).toIntern();
|
||||
if (ip.indexToKey(nav_val) == .@"extern") {
|
||||
std.debug.assert(zig_object.imports.remove(atom.sym_index));
|
||||
}
|
||||
|
||||
@ -428,10 +428,10 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writeAggregateInit(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
|
||||
const mod = w.pt.zcu;
|
||||
const zcu = w.pt.zcu;
|
||||
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
|
||||
const vector_ty = ty_pl.ty.toType();
|
||||
const len = @as(usize, @intCast(vector_ty.arrayLen(mod)));
|
||||
const len = @as(usize, @intCast(vector_ty.arrayLen(zcu)));
|
||||
const elements = @as([]const Air.Inst.Ref, @ptrCast(w.air.extra[ty_pl.payload..][0..len]));
|
||||
|
||||
try w.writeType(s, vector_ty);
|
||||
@ -508,11 +508,11 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writeSelect(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
|
||||
const mod = w.pt.zcu;
|
||||
const zcu = w.pt.zcu;
|
||||
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
|
||||
const extra = w.air.extraData(Air.Bin, pl_op.payload).data;
|
||||
|
||||
const elem_ty = w.typeOfIndex(inst).childType(mod);
|
||||
const elem_ty = w.typeOfIndex(inst).childType(zcu);
|
||||
try w.writeType(s, elem_ty);
|
||||
try s.writeAll(", ");
|
||||
try w.writeOperand(s, inst, 0, pl_op.operand);
|
||||
@ -974,7 +974,7 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn typeOfIndex(w: *Writer, inst: Air.Inst.Index) Type {
|
||||
const mod = w.pt.zcu;
|
||||
return w.air.typeOfIndex(inst, &mod.intern_pool);
|
||||
const zcu = w.pt.zcu;
|
||||
return w.air.typeOfIndex(inst, &zcu.intern_pool);
|
||||
}
|
||||
};
|
||||
|
||||
@ -62,8 +62,8 @@ pub fn print(
|
||||
comptime have_sema: bool,
|
||||
sema: if (have_sema) *Sema else void,
|
||||
) (@TypeOf(writer).Error || Zcu.CompileError)!void {
|
||||
const mod = pt.zcu;
|
||||
const ip = &mod.intern_pool;
|
||||
const zcu = pt.zcu;
|
||||
const ip = &zcu.intern_pool;
|
||||
switch (ip.indexToKey(val.toIntern())) {
|
||||
.int_type,
|
||||
.ptr_type,
|
||||
@ -116,7 +116,7 @@ pub fn print(
|
||||
enum_literal.fmt(ip),
|
||||
}),
|
||||
.enum_tag => |enum_tag| {
|
||||
const enum_type = ip.loadEnumType(val.typeOf(mod).toIntern());
|
||||
const enum_type = ip.loadEnumType(val.typeOf(zcu).toIntern());
|
||||
if (enum_type.tagValueIndex(ip, val.toIntern())) |tag_index| {
|
||||
return writer.print(".{i}", .{enum_type.names.get(ip)[tag_index].fmt(ip)});
|
||||
}
|
||||
@ -173,7 +173,7 @@ pub fn print(
|
||||
return;
|
||||
}
|
||||
if (un.tag == .none) {
|
||||
const backing_ty = try val.typeOf(mod).unionBackingType(pt);
|
||||
const backing_ty = try val.typeOf(zcu).unionBackingType(pt);
|
||||
try writer.print("@bitCast(@as({}, ", .{backing_ty.fmt(pt)});
|
||||
try print(Value.fromInterned(un.val), writer, level - 1, pt, have_sema, sema);
|
||||
try writer.writeAll("))");
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user