stage2: fix InternPool compile errors on 32-bit targets

This commit is contained in:
mlugg 2023-06-11 13:15:37 +01:00 committed by Andrew Kelley
parent 2afc689060
commit 63604024f4
6 changed files with 28 additions and 24 deletions

View File

@ -3824,13 +3824,13 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
assert(child == .u8_type);
if (bytes.len != len) {
assert(bytes.len == len_including_sentinel);
assert(bytes[len] == ip.indexToKey(sentinel).int.storage.u64);
assert(bytes[@intCast(usize, len)] == ip.indexToKey(sentinel).int.storage.u64);
}
},
.elems => |elems| {
if (elems.len != len) {
assert(elems.len == len_including_sentinel);
assert(elems[len] == sentinel);
assert(elems[@intCast(usize, len)] == sentinel);
}
},
.repeated_elem => |elem| {
@ -3936,7 +3936,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
if (child == .u8_type) bytes: {
const string_bytes_index = ip.string_bytes.items.len;
try ip.string_bytes.ensureUnusedCapacity(gpa, len_including_sentinel + 1);
try ip.string_bytes.ensureUnusedCapacity(gpa, @intCast(usize, len_including_sentinel + 1));
try ip.extra.ensureUnusedCapacity(gpa, @typeInfo(Bytes).Struct.fields.len);
switch (aggregate.storage) {
.bytes => |bytes| ip.string_bytes.appendSliceAssumeCapacity(bytes),
@ -3953,7 +3953,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
.repeated_elem => |elem| switch (ip.indexToKey(elem)) {
.undef => break :bytes,
.int => |int| @memset(
ip.string_bytes.addManyAsSliceAssumeCapacity(len),
ip.string_bytes.addManyAsSliceAssumeCapacity(@intCast(usize, len)),
@intCast(u8, int.storage.u64),
),
else => unreachable,
@ -3967,7 +3967,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
const string = if (has_internal_null)
@intToEnum(String, string_bytes_index)
else
(try ip.getOrPutTrailingString(gpa, len_including_sentinel)).toString();
(try ip.getOrPutTrailingString(gpa, @intCast(usize, len_including_sentinel))).toString();
ip.items.appendAssumeCapacity(.{
.tag = .bytes,
.data = ip.addExtraAssumeCapacity(Bytes{
@ -3980,7 +3980,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
try ip.extra.ensureUnusedCapacity(
gpa,
@typeInfo(Tag.Aggregate).Struct.fields.len + len_including_sentinel,
@typeInfo(Tag.Aggregate).Struct.fields.len + @intCast(usize, len_including_sentinel),
);
ip.items.appendAssumeCapacity(.{
.tag = .aggregate,

View File

@ -28186,11 +28186,12 @@ fn beginComptimePtrMutation(
const elem_abi_size_u64 = try sema.typeAbiSize(base_elem_ty);
if (elem_abi_size_u64 < try sema.typeAbiSize(ptr_elem_ty)) {
const elem_abi_size = try sema.usizeCast(block, src, elem_abi_size_u64);
const elem_idx = try sema.usizeCast(block, src, elem_ptr.index);
return .{
.mut_decl = parent.mut_decl,
.pointee = .{ .reinterpret = .{
.val_ptr = val_ptr,
.byte_offset = elem_abi_size * elem_ptr.index,
.byte_offset = elem_abi_size * elem_idx,
} },
.ty = parent.ty,
};
@ -28223,7 +28224,7 @@ fn beginComptimePtrMutation(
block,
src,
elem_ty,
&elems[elem_ptr.index],
&elems[@intCast(usize, elem_ptr.index)],
ptr_elem_ty,
parent.mut_decl,
);
@ -28254,7 +28255,7 @@ fn beginComptimePtrMutation(
block,
src,
elem_ty,
&elems[elem_ptr.index],
&elems[@intCast(usize, elem_ptr.index)],
ptr_elem_ty,
parent.mut_decl,
);
@ -28265,7 +28266,7 @@ fn beginComptimePtrMutation(
block,
src,
elem_ty,
&val_ptr.castTag(.aggregate).?.data[elem_ptr.index],
&val_ptr.castTag(.aggregate).?.data[@intCast(usize, elem_ptr.index)],
ptr_elem_ty,
parent.mut_decl,
),
@ -28291,7 +28292,7 @@ fn beginComptimePtrMutation(
block,
src,
elem_ty,
&elems[elem_ptr.index],
&elems[@intCast(usize, elem_ptr.index)],
ptr_elem_ty,
parent.mut_decl,
);
@ -28331,11 +28332,12 @@ fn beginComptimePtrMutation(
const elem_abi_size_u64 = try sema.typeAbiSize(base_elem_ty);
const elem_abi_size = try sema.usizeCast(block, src, elem_abi_size_u64);
const elem_idx = try sema.usizeCast(block, src, elem_ptr.index);
return ComptimePtrMutationKit{
.mut_decl = parent.mut_decl,
.pointee = .{ .reinterpret = .{
.val_ptr = reinterpret.val_ptr,
.byte_offset = reinterpret.byte_offset + elem_abi_size * elem_ptr.index,
.byte_offset = reinterpret.byte_offset + elem_abi_size * elem_idx,
} },
.ty = parent.ty,
};
@ -28750,9 +28752,10 @@ fn beginComptimePtrLoad(
// the pointee array directly from our parent array.
if (load_ty.isArrayOrVector(mod) and load_ty.childType(mod).eql(elem_ty, mod)) {
const N = try sema.usizeCast(block, src, load_ty.arrayLenIncludingSentinel(mod));
const elem_idx = try sema.usizeCast(block, src, elem_ptr.index);
deref.pointee = if (elem_ptr.index + N <= check_len) TypedValue{
.ty = try Type.array(sema.arena, N, null, elem_ty, mod),
.val = try array_tv.val.sliceArray(mod, sema.arena, elem_ptr.index, elem_ptr.index + N),
.val = try array_tv.val.sliceArray(mod, sema.arena, elem_idx, elem_idx + N),
} else null;
break :blk deref;
}
@ -28773,7 +28776,7 @@ fn beginComptimePtrLoad(
}
deref.pointee = TypedValue{
.ty = elem_ty,
.val = try array_tv.val.elemValue(mod, elem_ptr.index),
.val = try array_tv.val.elemValue(mod, @intCast(usize, elem_ptr.index)),
};
break :blk deref;
},

View File

@ -356,12 +356,12 @@ pub fn print(
if (container_ty.isTuple(mod)) {
try writer.print("[{d}]", .{field.index});
}
const field_name_ip = container_ty.structFieldName(field.index, mod);
const field_name_ip = container_ty.structFieldName(@intCast(usize, field.index), mod);
const field_name = mod.intern_pool.stringToSlice(field_name_ip);
try writer.print(".{}", .{std.zig.fmtId(field_name)});
},
.Union => {
const field_name_ip = container_ty.unionFields(mod).keys()[field.index];
const field_name_ip = container_ty.unionFields(mod).keys()[@intCast(usize, field.index)];
const field_name = mod.intern_pool.stringToSlice(field_name_ip);
try writer.print(".{}", .{std.zig.fmtId(field_name)});
},

View File

@ -2982,8 +2982,8 @@ fn lowerParentPtr(func: *CodeGen, ptr_val: Value) InnerError!WValue {
const offset = switch (parent_ty.zigTypeTag(mod)) {
.Struct => switch (parent_ty.containerLayout(mod)) {
.Packed => parent_ty.packedStructFieldByteOffset(field.index, mod),
else => parent_ty.structFieldOffset(field.index, mod),
.Packed => parent_ty.packedStructFieldByteOffset(@intCast(usize, field.index), mod),
else => parent_ty.structFieldOffset(@intCast(usize, field.index), mod),
},
.Union => switch (parent_ty.containerLayout(mod)) {
.Packed => 0,

View File

@ -642,7 +642,7 @@ pub const DeclGen = struct {
// Ensure complete type definition is visible before accessing fields.
_ = try dg.typeToIndex(base_ty, .complete);
const field_ty = switch (mod.intern_pool.indexToKey(base_ty.toIntern())) {
.anon_struct_type, .struct_type, .union_type => base_ty.structFieldType(field.index, mod),
.anon_struct_type, .struct_type, .union_type => base_ty.structFieldType(@intCast(usize, field.index), mod),
.ptr_type => |ptr_type| switch (ptr_type.flags.size) {
.One, .Many, .C => unreachable,
.Slice => switch (field.index) {

View File

@ -395,7 +395,8 @@ pub const Value = struct {
} });
},
.aggregate => {
const old_elems = val.castTag(.aggregate).?.data[0..ty.arrayLen(mod)];
const len = @intCast(usize, ty.arrayLen(mod));
const old_elems = val.castTag(.aggregate).?.data[0..len];
const new_elems = try mod.gpa.alloc(InternPool.Index, old_elems.len);
defer mod.gpa.free(new_elems);
const ty_key = mod.intern_pool.indexToKey(ty.toIntern());
@ -642,7 +643,7 @@ pub const Value = struct {
const base_addr = (try field.base.toValue().getUnsignedIntAdvanced(mod, opt_sema)) orelse return null;
const struct_ty = mod.intern_pool.typeOf(field.base).toType().childType(mod);
if (opt_sema) |sema| try sema.resolveTypeLayout(struct_ty);
return base_addr + struct_ty.structFieldOffset(field.index, mod);
return base_addr + struct_ty.structFieldOffset(@intCast(usize, field.index), mod);
},
else => null,
},
@ -1798,10 +1799,10 @@ pub const Value = struct {
.int, .eu_payload => unreachable,
.opt_payload => |base| base.toValue().elemValue(mod, index),
.comptime_field => |field_val| field_val.toValue().elemValue(mod, index),
.elem => |elem| elem.base.toValue().elemValue(mod, index + elem.index),
.elem => |elem| elem.base.toValue().elemValue(mod, index + @intCast(usize, elem.index)),
.field => |field| if (field.base.toValue().pointerDecl(mod)) |decl_index| {
const base_decl = mod.declPtr(decl_index);
const field_val = try base_decl.val.fieldValue(mod, field.index);
const field_val = try base_decl.val.fieldValue(mod, @intCast(usize, field.index));
return field_val.elemValue(mod, index);
} else unreachable,
},
@ -1921,7 +1922,7 @@ pub const Value = struct {
.comptime_field => |comptime_field| comptime_field.toValue()
.sliceArray(mod, arena, start, end),
.elem => |elem| elem.base.toValue()
.sliceArray(mod, arena, start + elem.index, end + elem.index),
.sliceArray(mod, arena, start + @intCast(usize, elem.index), end + @intCast(usize, elem.index)),
else => unreachable,
},
.aggregate => |aggregate| (try mod.intern(.{ .aggregate = .{