mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
sema: analyze field init bodies in a second pass
This change allows struct field inits to use layout information of their own struct without causing a circular dependency. `semaStructFields` caches the ranges of the init bodies in the `StructType` trailing data. The init bodies are then resolved by `resolveStructFieldInits`, which is called before the inits are actually required. Within the init bodies, the struct decl's instruction is repurposed to refer to the field type itself. This is to allow us to easily rebuild the inst_map mapping required for the init body instructions to refer to the field type. Thanks to @mlugg for the guidance on this one!
This commit is contained in:
parent
234693bcbb
commit
f10499be0a
@ -4951,7 +4951,10 @@ fn structDeclInner(
|
||||
|
||||
if (have_value) {
|
||||
any_default_inits = true;
|
||||
const ri: ResultInfo = .{ .rl = if (field_type == .none) .none else .{ .coerced_ty = field_type } };
|
||||
|
||||
// The decl_inst is used as here so that we can easily reconstruct a mapping
|
||||
// between it and the field type when the fields inits are analzyed.
|
||||
const ri: ResultInfo = .{ .rl = if (field_type == .none) .none else .{ .coerced_ty = decl_inst.toRef() } };
|
||||
|
||||
const default_inst = try expr(&block_scope, &namespace.base, ri, member.ast.value_expr);
|
||||
if (!block_scope.endsWithNoReturn()) {
|
||||
|
||||
@ -3808,6 +3808,11 @@ fn walkInstruction(
|
||||
call_ctx,
|
||||
);
|
||||
|
||||
// Inside field init bodies, the struct decl instruction is used to refer to the
|
||||
// field type during the second pass of analysis.
|
||||
try self.repurposed_insts.put(self.arena, inst, {});
|
||||
defer _ = self.repurposed_insts.remove(inst);
|
||||
|
||||
var field_type_refs: std.ArrayListUnmanaged(DocData.Expr) = .{};
|
||||
var field_default_refs: std.ArrayListUnmanaged(?DocData.Expr) = .{};
|
||||
var field_name_indexes: std.ArrayListUnmanaged(usize) = .{};
|
||||
|
||||
@ -463,6 +463,7 @@ pub const Key = union(enum) {
|
||||
|
||||
pub fn fieldInit(s: @This(), ip: *const InternPool, i: usize) Index {
|
||||
if (s.field_inits.len == 0) return .none;
|
||||
assert(s.haveFieldInits(ip));
|
||||
return s.field_inits.get(ip)[i];
|
||||
}
|
||||
|
||||
@ -497,6 +498,14 @@ pub const Key = union(enum) {
|
||||
return @ptrCast(&ip.extra.items[self.extra_index + flags_field_index]);
|
||||
}
|
||||
|
||||
/// The returned pointer expires with any addition to the `InternPool`.
|
||||
/// Asserts that the struct is packed.
|
||||
pub fn packedFlagsPtr(self: @This(), ip: *const InternPool) *Tag.TypeStructPacked.Flags {
|
||||
assert(self.layout == .Packed);
|
||||
const flags_field_index = std.meta.fieldIndex(Tag.TypeStructPacked, "flags").?;
|
||||
return @ptrCast(&ip.extra.items[self.extra_index + flags_field_index]);
|
||||
}
|
||||
|
||||
pub fn assumeRuntimeBitsIfFieldTypesWip(s: @This(), ip: *InternPool) bool {
|
||||
if (s.layout == .Packed) return false;
|
||||
const flags_ptr = s.flagsPtr(ip);
|
||||
@ -546,6 +555,30 @@ pub const Key = union(enum) {
|
||||
s.flagsPtr(ip).alignment_wip = false;
|
||||
}
|
||||
|
||||
pub fn setInitsWip(s: @This(), ip: *InternPool) bool {
|
||||
switch (s.layout) {
|
||||
.Packed => {
|
||||
const flag = &s.packedFlagsPtr(ip).field_inits_wip;
|
||||
if (flag.*) return true;
|
||||
flag.* = true;
|
||||
return false;
|
||||
},
|
||||
.Auto, .Extern => {
|
||||
const flag = &s.flagsPtr(ip).field_inits_wip;
|
||||
if (flag.*) return true;
|
||||
flag.* = true;
|
||||
return false;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clearInitsWip(s: @This(), ip: *InternPool) void {
|
||||
switch (s.layout) {
|
||||
.Packed => s.packedFlagsPtr(ip).field_inits_wip = false,
|
||||
.Auto, .Extern => s.flagsPtr(ip).field_inits_wip = false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setFullyResolved(s: @This(), ip: *InternPool) bool {
|
||||
if (s.layout == .Packed) return true;
|
||||
const flags_ptr = s.flagsPtr(ip);
|
||||
@ -588,6 +621,20 @@ pub const Key = union(enum) {
|
||||
return types.len == 0 or types[0] != .none;
|
||||
}
|
||||
|
||||
pub fn haveFieldInits(s: @This(), ip: *const InternPool) bool {
|
||||
return switch (s.layout) {
|
||||
.Packed => s.packedFlagsPtr(ip).inits_resolved,
|
||||
.Auto, .Extern => s.flagsPtr(ip).inits_resolved,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn setHaveFieldInits(s: @This(), ip: *InternPool) void {
|
||||
switch (s.layout) {
|
||||
.Packed => s.packedFlagsPtr(ip).inits_resolved = true,
|
||||
.Auto, .Extern => s.flagsPtr(ip).inits_resolved = true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn haveLayout(s: @This(), ip: *InternPool) bool {
|
||||
return switch (s.layout) {
|
||||
.Packed => s.backingIntType(ip).* != .none,
|
||||
@ -3000,6 +3047,14 @@ pub const Tag = enum(u8) {
|
||||
namespace: Module.Namespace.OptionalIndex,
|
||||
backing_int_ty: Index,
|
||||
names_map: MapIndex,
|
||||
flags: Flags,
|
||||
|
||||
pub const Flags = packed struct(u32) {
|
||||
/// Dependency loop detection when resolving field inits.
|
||||
field_inits_wip: bool,
|
||||
inits_resolved: bool,
|
||||
_: u30 = 0,
|
||||
};
|
||||
};
|
||||
|
||||
/// At first I thought of storing the denormalized data externally, such as...
|
||||
@ -3045,6 +3100,7 @@ pub const Tag = enum(u8) {
|
||||
requires_comptime: RequiresComptime,
|
||||
is_tuple: bool,
|
||||
assumed_runtime_bits: bool,
|
||||
assumed_pointer_aligned: bool,
|
||||
has_namespace: bool,
|
||||
any_comptime_fields: bool,
|
||||
any_default_inits: bool,
|
||||
@ -3057,14 +3113,18 @@ pub const Tag = enum(u8) {
|
||||
field_types_wip: bool,
|
||||
/// Dependency loop detection when resolving struct layout.
|
||||
layout_wip: bool,
|
||||
/// Determines whether `size`, `alignment`, runtime field order, and
|
||||
/// Indicates whether `size`, `alignment`, runtime field order, and
|
||||
/// field offets are populated.
|
||||
layout_resolved: bool,
|
||||
/// Dependency loop detection when resolving field inits.
|
||||
field_inits_wip: bool,
|
||||
/// Indicates whether `field_inits` has been resolved.
|
||||
inits_resolved: bool,
|
||||
// The types and all its fields have had their layout resolved. Even through pointer,
|
||||
// which `layout_resolved` does not ensure.
|
||||
fully_resolved: bool,
|
||||
|
||||
_: u11 = 0,
|
||||
_: u8 = 0,
|
||||
};
|
||||
};
|
||||
};
|
||||
@ -5347,6 +5407,7 @@ pub const StructTypeInit = struct {
|
||||
is_tuple: bool,
|
||||
any_comptime_fields: bool,
|
||||
any_default_inits: bool,
|
||||
inits_resolved: bool,
|
||||
any_aligned_fields: bool,
|
||||
};
|
||||
|
||||
@ -5399,6 +5460,10 @@ pub fn getStructType(
|
||||
.namespace = ini.namespace,
|
||||
.backing_int_ty = .none,
|
||||
.names_map = names_map,
|
||||
.flags = .{
|
||||
.field_inits_wip = false,
|
||||
.inits_resolved = ini.inits_resolved,
|
||||
},
|
||||
}),
|
||||
});
|
||||
ip.extra.appendNTimesAssumeCapacity(@intFromEnum(Index.none), ini.fields_len);
|
||||
@ -5431,6 +5496,7 @@ pub fn getStructType(
|
||||
.requires_comptime = ini.requires_comptime,
|
||||
.is_tuple = ini.is_tuple,
|
||||
.assumed_runtime_bits = false,
|
||||
.assumed_pointer_aligned = false,
|
||||
.has_namespace = ini.namespace != .none,
|
||||
.any_comptime_fields = ini.any_comptime_fields,
|
||||
.any_default_inits = ini.any_default_inits,
|
||||
@ -5440,6 +5506,8 @@ pub fn getStructType(
|
||||
.field_types_wip = false,
|
||||
.layout_wip = false,
|
||||
.layout_resolved = false,
|
||||
.field_inits_wip = false,
|
||||
.inits_resolved = ini.inits_resolved,
|
||||
.fully_resolved = false,
|
||||
},
|
||||
}),
|
||||
@ -6451,6 +6519,7 @@ fn addExtraAssumeCapacity(ip: *InternPool, extra: anytype) u32 {
|
||||
Tag.TypePointer.PackedOffset,
|
||||
Tag.TypeUnion.Flags,
|
||||
Tag.TypeStruct.Flags,
|
||||
Tag.TypeStructPacked.Flags,
|
||||
Tag.Variable.Flags,
|
||||
=> @bitCast(@field(extra, field.name)),
|
||||
|
||||
@ -6525,6 +6594,7 @@ fn extraDataTrail(ip: *const InternPool, comptime T: type, index: usize) struct
|
||||
Tag.TypePointer.PackedOffset,
|
||||
Tag.TypeUnion.Flags,
|
||||
Tag.TypeStruct.Flags,
|
||||
Tag.TypeStructPacked.Flags,
|
||||
Tag.Variable.Flags,
|
||||
FuncAnalysis,
|
||||
=> @bitCast(int32),
|
||||
|
||||
293
src/Sema.zig
293
src/Sema.zig
@ -2699,6 +2699,7 @@ pub fn getStructType(
|
||||
.requires_comptime = if (small.known_comptime_only) .yes else .unknown,
|
||||
.any_default_inits = small.any_default_inits,
|
||||
.any_comptime_fields = small.any_comptime_fields,
|
||||
.inits_resolved = false,
|
||||
.any_aligned_fields = small.any_aligned_fields,
|
||||
});
|
||||
|
||||
@ -4718,6 +4719,7 @@ fn validateStructInit(
|
||||
const i: u32 = @intCast(i_usize);
|
||||
if (field_ptr != .none) continue;
|
||||
|
||||
try sema.resolveStructFieldInits(struct_ty);
|
||||
const default_val = struct_ty.structFieldDefaultValue(i, mod);
|
||||
if (default_val.toIntern() == .unreachable_value) {
|
||||
const field_name = struct_ty.structFieldName(i, mod).unwrap() orelse {
|
||||
@ -4773,6 +4775,8 @@ fn validateStructInit(
|
||||
const air_tags = sema.air_instructions.items(.tag);
|
||||
const air_datas = sema.air_instructions.items(.data);
|
||||
|
||||
try sema.resolveStructFieldInits(struct_ty);
|
||||
|
||||
// We collect the comptime field values in case the struct initialization
|
||||
// ends up being comptime-known.
|
||||
const field_values = try sema.arena.alloc(InternPool.Index, struct_ty.structFieldCount(mod));
|
||||
@ -17630,6 +17634,8 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
};
|
||||
struct_field_vals = try gpa.alloc(InternPool.Index, struct_type.field_types.len);
|
||||
|
||||
try sema.resolveStructFieldInits(ty);
|
||||
|
||||
for (struct_field_vals, 0..) |*field_val, i| {
|
||||
// TODO: write something like getCoercedInts to avoid needing to dupe
|
||||
const name = if (struct_type.fieldName(ip, i).unwrap()) |name_nts|
|
||||
@ -19205,17 +19211,20 @@ fn zirStructInit(
|
||||
const uncoerced_init = try sema.resolveInst(item.data.init);
|
||||
const field_ty = resolved_ty.structFieldType(field_index, mod);
|
||||
field_inits[field_index] = try sema.coerce(block, field_ty, uncoerced_init, field_src);
|
||||
if (!is_packed) if (try resolved_ty.structFieldValueComptime(mod, field_index)) |default_value| {
|
||||
const init_val = (try sema.resolveValue(field_inits[field_index])) orelse {
|
||||
return sema.failWithNeededComptime(block, field_src, .{
|
||||
.needed_comptime_reason = "value stored in comptime field must be comptime-known",
|
||||
});
|
||||
};
|
||||
if (!is_packed) {
|
||||
try sema.resolveStructFieldInits(resolved_ty);
|
||||
if (try resolved_ty.structFieldValueComptime(mod, field_index)) |default_value| {
|
||||
const init_val = (try sema.resolveValue(field_inits[field_index])) orelse {
|
||||
return sema.failWithNeededComptime(block, field_src, .{
|
||||
.needed_comptime_reason = "value stored in comptime field must be comptime-known",
|
||||
});
|
||||
};
|
||||
|
||||
if (!init_val.eql(default_value, resolved_ty.structFieldType(field_index, mod), mod)) {
|
||||
return sema.failWithInvalidComptimeFieldStore(block, field_src, resolved_ty, field_index);
|
||||
if (!init_val.eql(default_value, resolved_ty.structFieldType(field_index, mod), mod)) {
|
||||
return sema.failWithInvalidComptimeFieldStore(block, field_src, resolved_ty, field_index);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return sema.finishStructInit(block, src, src, field_inits, resolved_ty, result_ty, is_ref);
|
||||
@ -19368,6 +19377,8 @@ fn finishStructInit(
|
||||
continue;
|
||||
}
|
||||
|
||||
try sema.resolveStructFieldInits(struct_ty);
|
||||
|
||||
const field_init = struct_type.fieldInit(ip, i);
|
||||
if (field_init == .none) {
|
||||
const field_name = struct_type.field_names.get(ip)[i];
|
||||
@ -21132,6 +21143,7 @@ fn reifyStruct(
|
||||
// struct types.
|
||||
.any_comptime_fields = true,
|
||||
.any_default_inits = true,
|
||||
.inits_resolved = true,
|
||||
.any_aligned_fields = true,
|
||||
});
|
||||
// TODO: figure out InternPool removals for incremental compilation
|
||||
@ -26632,6 +26644,7 @@ fn finishFieldCallBind(
|
||||
|
||||
const container_ty = ptr_ty.childType(mod);
|
||||
if (container_ty.zigTypeTag(mod) == .Struct) {
|
||||
try sema.resolveStructFieldInits(container_ty);
|
||||
if (try container_ty.structFieldValueComptime(mod, field_index)) |default_val| {
|
||||
return .{ .direct = Air.internedToRef(default_val.toIntern()) };
|
||||
}
|
||||
@ -26847,6 +26860,7 @@ fn structFieldPtrByIndex(
|
||||
const ptr_field_ty = try sema.ptrType(ptr_ty_data);
|
||||
|
||||
if (struct_type.fieldIsComptime(ip, field_index)) {
|
||||
try sema.resolveStructFieldInits(struct_ty);
|
||||
const val = try mod.intern(.{ .ptr = .{
|
||||
.ty = ptr_field_ty.toIntern(),
|
||||
.addr = .{ .comptime_field = struct_type.field_inits.get(ip)[field_index] },
|
||||
@ -26883,6 +26897,7 @@ fn structFieldVal(
|
||||
assert(struct_ty.zigTypeTag(mod) == .Struct);
|
||||
|
||||
try sema.resolveTypeFields(struct_ty);
|
||||
|
||||
switch (ip.indexToKey(struct_ty.toIntern())) {
|
||||
.struct_type => |struct_type| {
|
||||
if (struct_type.isTuple(ip))
|
||||
@ -26891,6 +26906,7 @@ fn structFieldVal(
|
||||
const field_index = struct_type.nameIndex(ip, field_name) orelse
|
||||
return sema.failWithBadStructFieldAccess(block, struct_type, field_name_src, field_name);
|
||||
if (struct_type.fieldIsComptime(ip, field_index)) {
|
||||
try sema.resolveStructFieldInits(struct_ty);
|
||||
return Air.internedToRef(struct_type.field_inits.get(ip)[field_index]);
|
||||
}
|
||||
|
||||
@ -31282,6 +31298,7 @@ fn coerceTupleToStruct(
|
||||
const mod = sema.mod;
|
||||
const ip = &mod.intern_pool;
|
||||
try sema.resolveTypeFields(struct_ty);
|
||||
try sema.resolveStructFieldInits(struct_ty);
|
||||
|
||||
if (struct_ty.isTupleOrAnonStruct(mod)) {
|
||||
return sema.coerceTupleToTuple(block, struct_ty, inst, inst_src);
|
||||
@ -34264,6 +34281,8 @@ fn resolvePeerTypesInner(
|
||||
var comptime_val: ?Value = null;
|
||||
for (peer_tys) |opt_ty| {
|
||||
const struct_ty = opt_ty orelse continue;
|
||||
try sema.resolveStructFieldInits(struct_ty);
|
||||
|
||||
const uncoerced_field_val = try struct_ty.structFieldValueComptime(mod, field_idx) orelse {
|
||||
comptime_val = null;
|
||||
break;
|
||||
@ -34605,8 +34624,7 @@ pub fn resolveStructAlignment(
|
||||
// We'll guess "pointer-aligned", if the struct has an
|
||||
// underaligned pointer field then some allocations
|
||||
// might require explicit alignment.
|
||||
//TODO write this bit and emit an error later if incorrect
|
||||
//struct_type.flagsPtr(ip).assumed_pointer_aligned = true;
|
||||
struct_type.flagsPtr(ip).assumed_pointer_aligned = true;
|
||||
const result = Alignment.fromByteUnits(@divExact(target.ptrBitWidth(), 8));
|
||||
struct_type.flagsPtr(ip).alignment = result;
|
||||
return result;
|
||||
@ -34618,8 +34636,7 @@ pub fn resolveStructAlignment(
|
||||
// We'll guess "pointer-aligned", if the struct has an
|
||||
// underaligned pointer field then some allocations
|
||||
// might require explicit alignment.
|
||||
//TODO write this bit and emit an error later if incorrect
|
||||
//struct_type.flagsPtr(ip).assumed_pointer_aligned = true;
|
||||
struct_type.flagsPtr(ip).assumed_pointer_aligned = true;
|
||||
const result = Alignment.fromByteUnits(@divExact(target.ptrBitWidth(), 8));
|
||||
struct_type.flagsPtr(ip).alignment = result;
|
||||
return result;
|
||||
@ -34710,6 +34727,18 @@ fn resolveStructLayout(sema: *Sema, ty: Type) CompileError!void {
|
||||
return sema.failWithOwnedErrorMsg(null, msg);
|
||||
}
|
||||
|
||||
if (struct_type.flagsPtr(ip).assumed_pointer_aligned and
|
||||
big_align.compareStrict(.neq, Alignment.fromByteUnits(@divExact(mod.getTarget().ptrBitWidth(), 8))))
|
||||
{
|
||||
const msg = try Module.ErrorMsg.create(
|
||||
sema.gpa,
|
||||
mod.declPtr(struct_type.decl.unwrap().?).srcLoc(mod),
|
||||
"struct layout depends on being pointer aligned",
|
||||
.{},
|
||||
);
|
||||
return sema.failWithOwnedErrorMsg(null, msg);
|
||||
}
|
||||
|
||||
if (struct_type.hasReorderedFields()) {
|
||||
const runtime_order = struct_type.runtime_order.get(ip);
|
||||
|
||||
@ -35329,6 +35358,32 @@ pub fn resolveTypeFieldsStruct(
|
||||
try semaStructFields(mod, sema.arena, struct_type);
|
||||
}
|
||||
|
||||
pub fn resolveStructFieldInits(sema: *Sema, ty: Type) CompileError!void {
|
||||
const mod = sema.mod;
|
||||
const ip = &mod.intern_pool;
|
||||
const struct_type = mod.typeToStruct(ty) orelse return;
|
||||
const owner_decl = struct_type.decl.unwrap() orelse return;
|
||||
|
||||
// Inits can start as resolved
|
||||
if (struct_type.haveFieldInits(ip)) return;
|
||||
|
||||
try sema.resolveStructLayout(ty);
|
||||
|
||||
if (struct_type.setInitsWip(ip)) {
|
||||
const msg = try Module.ErrorMsg.create(
|
||||
sema.gpa,
|
||||
mod.declPtr(owner_decl).srcLoc(mod),
|
||||
"struct '{}' depends on itself",
|
||||
.{ty.fmt(mod)},
|
||||
);
|
||||
return sema.failWithOwnedErrorMsg(null, msg);
|
||||
}
|
||||
defer struct_type.clearInitsWip(ip);
|
||||
|
||||
try semaStructFieldInits(mod, sema.arena, struct_type);
|
||||
struct_type.setHaveFieldInits(ip);
|
||||
}
|
||||
|
||||
pub fn resolveTypeFieldsUnion(sema: *Sema, ty: Type, union_type: InternPool.Key.UnionType) CompileError!void {
|
||||
const mod = sema.mod;
|
||||
const ip = &mod.intern_pool;
|
||||
@ -35510,24 +35565,18 @@ fn resolveInferredErrorSetTy(
|
||||
}
|
||||
}
|
||||
|
||||
fn semaStructFields(
|
||||
mod: *Module,
|
||||
arena: Allocator,
|
||||
struct_type: InternPool.Key.StructType,
|
||||
) CompileError!void {
|
||||
const gpa = mod.gpa;
|
||||
const ip = &mod.intern_pool;
|
||||
const decl_index = struct_type.decl.unwrap() orelse return;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const namespace_index = struct_type.namespace.unwrap() orelse decl.src_namespace;
|
||||
const zir = mod.namespacePtr(namespace_index).file_scope.zir;
|
||||
const zir_index = struct_type.zir_index;
|
||||
fn structZirInfo(zir: Zir, zir_index: Zir.Inst.Index) struct {
|
||||
/// fields_len
|
||||
usize,
|
||||
Zir.Inst.StructDecl.Small,
|
||||
/// extra_index
|
||||
usize,
|
||||
} {
|
||||
const extended = zir.instructions.items(.data)[@intFromEnum(zir_index)].extended;
|
||||
assert(extended.opcode == .struct_decl);
|
||||
const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small);
|
||||
var extra_index: usize = extended.operand;
|
||||
|
||||
const src = LazySrcLoc.nodeOffset(0);
|
||||
extra_index += @intFromBool(small.has_src_node);
|
||||
|
||||
const fields_len = if (small.has_fields_len) blk: {
|
||||
@ -35558,6 +35607,25 @@ fn semaStructFields(
|
||||
while (decls_it.next()) |_| {}
|
||||
extra_index = decls_it.extra_index;
|
||||
|
||||
return .{ fields_len, small, extra_index };
|
||||
}
|
||||
|
||||
fn semaStructFields(
|
||||
mod: *Module,
|
||||
arena: Allocator,
|
||||
struct_type: InternPool.Key.StructType,
|
||||
) CompileError!void {
|
||||
const gpa = mod.gpa;
|
||||
const ip = &mod.intern_pool;
|
||||
const decl_index = struct_type.decl.unwrap() orelse return;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const namespace_index = struct_type.namespace.unwrap() orelse decl.src_namespace;
|
||||
const zir = mod.namespacePtr(namespace_index).file_scope.zir;
|
||||
const zir_index = struct_type.zir_index;
|
||||
|
||||
const src = LazySrcLoc.nodeOffset(0);
|
||||
const fields_len, const small, var extra_index = structZirInfo(zir, zir_index);
|
||||
|
||||
if (fields_len == 0) switch (struct_type.layout) {
|
||||
.Packed => {
|
||||
try semaBackingIntType(mod, struct_type);
|
||||
@ -35685,7 +35753,6 @@ fn semaStructFields(
|
||||
|
||||
// Next we do only types and alignments, saving the inits for a second pass,
|
||||
// so that init values may depend on type layout.
|
||||
const bodies_index = extra_index;
|
||||
|
||||
for (fields, 0..) |zir_field, field_i| {
|
||||
const field_ty: Type = ty: {
|
||||
@ -35809,44 +35876,161 @@ fn semaStructFields(
|
||||
extra_index += zir_field.init_body_len;
|
||||
}
|
||||
|
||||
// TODO: there seems to be no mechanism to catch when an init depends on
|
||||
// another init that hasn't been resolved.
|
||||
struct_type.clearTypesWip(ip);
|
||||
if (!any_inits) struct_type.setHaveFieldInits(ip);
|
||||
|
||||
for (comptime_mutable_decls.items) |ct_decl_index| {
|
||||
const ct_decl = mod.declPtr(ct_decl_index);
|
||||
_ = try ct_decl.internValue(mod);
|
||||
}
|
||||
}
|
||||
|
||||
// This logic must be kept in sync with `semaStructFields`
|
||||
fn semaStructFieldInits(
|
||||
mod: *Module,
|
||||
arena: Allocator,
|
||||
struct_type: InternPool.Key.StructType,
|
||||
) CompileError!void {
|
||||
const gpa = mod.gpa;
|
||||
const ip = &mod.intern_pool;
|
||||
|
||||
assert(!struct_type.haveFieldInits(ip));
|
||||
|
||||
const decl_index = struct_type.decl.unwrap() orelse return;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const namespace_index = struct_type.namespace.unwrap() orelse decl.src_namespace;
|
||||
const zir = mod.namespacePtr(namespace_index).file_scope.zir;
|
||||
const zir_index = struct_type.zir_index;
|
||||
const fields_len, const small, var extra_index = structZirInfo(zir, zir_index);
|
||||
|
||||
var comptime_mutable_decls = std.ArrayList(Decl.Index).init(gpa);
|
||||
defer comptime_mutable_decls.deinit();
|
||||
|
||||
var sema: Sema = .{
|
||||
.mod = mod,
|
||||
.gpa = gpa,
|
||||
.arena = arena,
|
||||
.code = zir,
|
||||
.owner_decl = decl,
|
||||
.owner_decl_index = decl_index,
|
||||
.func_index = .none,
|
||||
.func_is_naked = false,
|
||||
.fn_ret_ty = Type.void,
|
||||
.fn_ret_ty_ies = null,
|
||||
.owner_func_index = .none,
|
||||
.comptime_mutable_decls = &comptime_mutable_decls,
|
||||
};
|
||||
defer sema.deinit();
|
||||
|
||||
var block_scope: Block = .{
|
||||
.parent = null,
|
||||
.sema = &sema,
|
||||
.src_decl = decl_index,
|
||||
.namespace = namespace_index,
|
||||
.wip_capture_scope = try mod.createCaptureScope(decl.src_scope),
|
||||
.instructions = .{},
|
||||
.inlining = null,
|
||||
.is_comptime = true,
|
||||
};
|
||||
defer assert(block_scope.instructions.items.len == 0);
|
||||
|
||||
const Field = struct {
|
||||
type_body_len: u32 = 0,
|
||||
align_body_len: u32 = 0,
|
||||
init_body_len: u32 = 0,
|
||||
};
|
||||
const fields = try sema.arena.alloc(Field, fields_len);
|
||||
|
||||
var any_inits = false;
|
||||
|
||||
{
|
||||
const bits_per_field = 4;
|
||||
const fields_per_u32 = 32 / bits_per_field;
|
||||
const bit_bags_count = std.math.divCeil(usize, fields_len, fields_per_u32) catch unreachable;
|
||||
const flags_index = extra_index;
|
||||
var bit_bag_index: usize = flags_index;
|
||||
extra_index += bit_bags_count;
|
||||
var cur_bit_bag: u32 = undefined;
|
||||
var field_i: u32 = 0;
|
||||
while (field_i < fields_len) : (field_i += 1) {
|
||||
if (field_i % fields_per_u32 == 0) {
|
||||
cur_bit_bag = zir.extra[bit_bag_index];
|
||||
bit_bag_index += 1;
|
||||
}
|
||||
const has_align = @as(u1, @truncate(cur_bit_bag)) != 0;
|
||||
cur_bit_bag >>= 1;
|
||||
const has_init = @as(u1, @truncate(cur_bit_bag)) != 0;
|
||||
cur_bit_bag >>= 2;
|
||||
const has_type_body = @as(u1, @truncate(cur_bit_bag)) != 0;
|
||||
cur_bit_bag >>= 1;
|
||||
|
||||
if (!small.is_tuple) {
|
||||
extra_index += 1;
|
||||
}
|
||||
extra_index += 1; // doc_comment
|
||||
|
||||
fields[field_i] = .{};
|
||||
|
||||
if (has_type_body) fields[field_i].type_body_len = zir.extra[extra_index];
|
||||
extra_index += 1;
|
||||
|
||||
if (has_align) {
|
||||
fields[field_i].align_body_len = zir.extra[extra_index];
|
||||
extra_index += 1;
|
||||
}
|
||||
if (has_init) {
|
||||
fields[field_i].init_body_len = zir.extra[extra_index];
|
||||
extra_index += 1;
|
||||
any_inits = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (any_inits) {
|
||||
extra_index = bodies_index;
|
||||
for (fields, 0..) |zir_field, field_i| {
|
||||
const field_ty = struct_type.field_types.get(ip)[field_i].toType();
|
||||
extra_index += zir_field.type_body_len;
|
||||
extra_index += zir_field.align_body_len;
|
||||
if (zir_field.init_body_len > 0) {
|
||||
const body = zir.bodySlice(extra_index, zir_field.init_body_len);
|
||||
extra_index += body.len;
|
||||
const init = try sema.resolveBody(&block_scope, body, zir_index);
|
||||
const coerced = sema.coerce(&block_scope, field_ty, init, .unneeded) catch |err| switch (err) {
|
||||
error.NeededSourceLocation => {
|
||||
const init_src = mod.fieldSrcLoc(decl_index, .{
|
||||
.index = field_i,
|
||||
.range = .value,
|
||||
}).lazy;
|
||||
_ = try sema.coerce(&block_scope, field_ty, init, init_src);
|
||||
unreachable;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
const default_val = (try sema.resolveValue(coerced)) orelse {
|
||||
const body = zir.bodySlice(extra_index, zir_field.init_body_len);
|
||||
extra_index += zir_field.init_body_len;
|
||||
|
||||
if (body.len == 0) continue;
|
||||
|
||||
// Pre-populate the type mapping the body expects to be there.
|
||||
// In init bodies, the zir index of the struct itself is used
|
||||
// to refer to the current field type.
|
||||
|
||||
const field_ty = struct_type.field_types.get(ip)[field_i].toType();
|
||||
const type_ref = Air.internedToRef(field_ty.toIntern());
|
||||
try sema.inst_map.ensureSpaceForInstructions(sema.gpa, &.{zir_index});
|
||||
sema.inst_map.putAssumeCapacity(zir_index, type_ref);
|
||||
|
||||
const init = try sema.resolveBody(&block_scope, body, zir_index);
|
||||
const coerced = sema.coerce(&block_scope, field_ty, init, .unneeded) catch |err| switch (err) {
|
||||
error.NeededSourceLocation => {
|
||||
const init_src = mod.fieldSrcLoc(decl_index, .{
|
||||
.index = field_i,
|
||||
.range = .value,
|
||||
}).lazy;
|
||||
return sema.failWithNeededComptime(&block_scope, init_src, .{
|
||||
.needed_comptime_reason = "struct field default value must be comptime-known",
|
||||
});
|
||||
};
|
||||
const field_init = try default_val.intern(field_ty, mod);
|
||||
struct_type.field_inits.get(ip)[field_i] = field_init;
|
||||
}
|
||||
_ = try sema.coerce(&block_scope, field_ty, init, init_src);
|
||||
unreachable;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
const default_val = (try sema.resolveValue(coerced)) orelse {
|
||||
const init_src = mod.fieldSrcLoc(decl_index, .{
|
||||
.index = field_i,
|
||||
.range = .value,
|
||||
}).lazy;
|
||||
return sema.failWithNeededComptime(&block_scope, init_src, .{
|
||||
.needed_comptime_reason = "struct field default value must be comptime-known",
|
||||
});
|
||||
};
|
||||
|
||||
const field_init = try default_val.intern(field_ty, mod);
|
||||
struct_type.field_inits.get(ip)[field_i] = field_init;
|
||||
}
|
||||
}
|
||||
|
||||
for (comptime_mutable_decls.items) |ct_decl_index| {
|
||||
const ct_decl = mod.declPtr(ct_decl_index);
|
||||
_ = try ct_decl.internValue(mod);
|
||||
@ -36674,6 +36858,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
|
||||
);
|
||||
for (field_vals, 0..) |*field_val, i| {
|
||||
if (struct_type.fieldIsComptime(ip, i)) {
|
||||
try sema.resolveStructFieldInits(ty);
|
||||
field_val.* = struct_type.field_inits.get(ip)[i];
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -2415,6 +2415,7 @@ pub const Type = struct {
|
||||
for (field_vals, 0..) |*field_val, i_usize| {
|
||||
const i: u32 = @intCast(i_usize);
|
||||
if (struct_type.fieldIsComptime(ip, i)) {
|
||||
assert(struct_type.haveFieldInits(ip));
|
||||
field_val.* = struct_type.field_inits.get(ip)[i];
|
||||
continue;
|
||||
}
|
||||
@ -3014,6 +3015,7 @@ pub const Type = struct {
|
||||
const ip = &mod.intern_pool;
|
||||
switch (ip.indexToKey(ty.toIntern())) {
|
||||
.struct_type => |struct_type| {
|
||||
assert(struct_type.haveFieldInits(ip));
|
||||
if (struct_type.fieldIsComptime(ip, index)) {
|
||||
return struct_type.field_inits.get(ip)[index].toValue();
|
||||
} else {
|
||||
|
||||
@ -1785,3 +1785,60 @@ test "comptimeness of optional and error union payload is analyzed properly" {
|
||||
const x = (try c).?.x;
|
||||
try std.testing.expectEqual(3, x);
|
||||
}
|
||||
|
||||
test "initializer uses own alignment" {
|
||||
const S = struct {
|
||||
x: u32 = @alignOf(@This()) + 1,
|
||||
};
|
||||
|
||||
var s: S = .{};
|
||||
try expectEqual(4, @alignOf(S));
|
||||
try expectEqual(@as(usize, 5), s.x);
|
||||
}
|
||||
|
||||
test "initializer uses own size" {
|
||||
const S = struct {
|
||||
x: u32 = @sizeOf(@This()) + 1,
|
||||
};
|
||||
|
||||
var s: S = .{};
|
||||
try expectEqual(4, @sizeOf(S));
|
||||
try expectEqual(@as(usize, 5), s.x);
|
||||
}
|
||||
|
||||
test "initializer takes a pointer to a variable inside its struct" {
|
||||
const namespace = struct {
|
||||
const S = struct {
|
||||
s: *S = &S.instance,
|
||||
var instance: S = undefined;
|
||||
};
|
||||
|
||||
fn doTheTest() !void {
|
||||
var foo: S = .{};
|
||||
try expectEqual(&S.instance, foo.s);
|
||||
}
|
||||
};
|
||||
|
||||
try namespace.doTheTest();
|
||||
comptime try namespace.doTheTest();
|
||||
}
|
||||
|
||||
test "circular dependency through pointer field of a struct" {
|
||||
const S = struct {
|
||||
const StructInner = extern struct {
|
||||
outer: StructOuter = std.mem.zeroes(StructOuter),
|
||||
};
|
||||
|
||||
const StructMiddle = extern struct {
|
||||
outer: ?*StructInner,
|
||||
inner: ?*StructOuter,
|
||||
};
|
||||
|
||||
const StructOuter = extern struct {
|
||||
middle: StructMiddle = std.mem.zeroes(StructMiddle),
|
||||
};
|
||||
};
|
||||
var outer: S.StructOuter = .{};
|
||||
try expect(outer.middle.outer == null);
|
||||
try expect(outer.middle.inner == null);
|
||||
}
|
||||
|
||||
@ -1869,6 +1869,126 @@ test "reinterpret packed union inside packed struct" {
|
||||
try S.doTheTest();
|
||||
}
|
||||
|
||||
test "inner struct initializer uses union layout" {
|
||||
const namespace = struct {
|
||||
const U = union {
|
||||
a: struct {
|
||||
x: u32 = @alignOf(U) + 1,
|
||||
},
|
||||
b: struct {
|
||||
y: u16 = @sizeOf(U) + 2,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
{
|
||||
const u: namespace.U = .{ .a = .{} };
|
||||
try expectEqual(4, @alignOf(namespace.U));
|
||||
try expectEqual(@as(usize, 5), u.a.x);
|
||||
}
|
||||
|
||||
{
|
||||
const u: namespace.U = .{ .b = .{} };
|
||||
try expectEqual(@as(usize, @sizeOf(namespace.U) + 2), u.b.y);
|
||||
}
|
||||
}
|
||||
|
||||
test "inner struct initializer uses packed union layout" {
|
||||
const namespace = struct {
|
||||
const U = packed union {
|
||||
a: packed struct {
|
||||
x: u32 = @alignOf(U) + 1,
|
||||
},
|
||||
b: packed struct {
|
||||
y: u16 = @sizeOf(U) + 2,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
{
|
||||
const u: namespace.U = .{ .a = .{} };
|
||||
try expectEqual(4, @alignOf(namespace.U));
|
||||
try expectEqual(@as(usize, 5), u.a.x);
|
||||
}
|
||||
|
||||
{
|
||||
const u: namespace.U = .{ .b = .{} };
|
||||
try expectEqual(@as(usize, @sizeOf(namespace.U) + 2), u.b.y);
|
||||
}
|
||||
}
|
||||
|
||||
test "extern union initialized via reintepreted struct field initializer" {
|
||||
const bytes = [_]u8{ 0xaa, 0xbb, 0xcc, 0xdd };
|
||||
|
||||
const U = extern union {
|
||||
a: u32,
|
||||
b: u8,
|
||||
};
|
||||
|
||||
const S = extern struct {
|
||||
u: U = std.mem.bytesAsValue(U, &bytes).*,
|
||||
};
|
||||
|
||||
const s: S = .{};
|
||||
try expect(s.u.a == littleToNativeEndian(u32, 0xddccbbaa));
|
||||
try expect(s.u.b == 0xaa);
|
||||
}
|
||||
|
||||
test "packed union initialized via reintepreted struct field initializer" {
|
||||
const bytes = [_]u8{ 0xaa, 0xbb, 0xcc, 0xdd };
|
||||
|
||||
const U = packed union {
|
||||
a: u32,
|
||||
b: u8,
|
||||
};
|
||||
|
||||
const S = packed struct {
|
||||
u: U = std.mem.bytesAsValue(U, &bytes).*,
|
||||
};
|
||||
|
||||
var s: S = .{};
|
||||
try expect(s.u.a == littleToNativeEndian(u32, 0xddccbbaa));
|
||||
try expect(s.u.b == if (endian == .little) 0xaa else 0xdd);
|
||||
}
|
||||
|
||||
test "store of comptime reinterpreted memory to extern union" {
|
||||
const bytes = [_]u8{ 0xaa, 0xbb, 0xcc, 0xdd };
|
||||
|
||||
const U = extern union {
|
||||
a: u32,
|
||||
b: u8,
|
||||
};
|
||||
|
||||
const reinterpreted = comptime b: {
|
||||
var u: U = undefined;
|
||||
u = std.mem.bytesAsValue(U, &bytes).*;
|
||||
break :b u;
|
||||
};
|
||||
|
||||
var u: U = reinterpreted;
|
||||
try expect(u.a == littleToNativeEndian(u32, 0xddccbbaa));
|
||||
try expect(u.b == 0xaa);
|
||||
}
|
||||
|
||||
test "store of comptime reinterpreted memory to packed union" {
|
||||
const bytes = [_]u8{ 0xaa, 0xbb, 0xcc, 0xdd };
|
||||
|
||||
const U = packed union {
|
||||
a: u32,
|
||||
b: u8,
|
||||
};
|
||||
|
||||
const reinterpreted = comptime b: {
|
||||
var u: U = undefined;
|
||||
u = std.mem.bytesAsValue(U, &bytes).*;
|
||||
break :b u;
|
||||
};
|
||||
|
||||
var u: U = reinterpreted;
|
||||
try expect(u.a == littleToNativeEndian(u32, 0xddccbbaa));
|
||||
try expect(u.b == if (endian == .little) 0xaa else 0xdd);
|
||||
}
|
||||
|
||||
test "union field is a pointer to an aligned version of itself" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
@ -1902,3 +2022,23 @@ test "pass register-sized field as non-register-sized union" {
|
||||
try S.untaggedUnion(.{ .x = x });
|
||||
try S.externUnion(.{ .x = x });
|
||||
}
|
||||
|
||||
test "circular dependency through pointer field of a union" {
|
||||
const S = struct {
|
||||
const UnionInner = extern struct {
|
||||
outer: UnionOuter = std.mem.zeroes(UnionOuter),
|
||||
};
|
||||
|
||||
const UnionMiddle = extern union {
|
||||
outer: ?*UnionOuter,
|
||||
inner: ?*UnionInner,
|
||||
};
|
||||
|
||||
const UnionOuter = extern struct {
|
||||
u: UnionMiddle = std.mem.zeroes(UnionMiddle),
|
||||
};
|
||||
};
|
||||
var outer: S.UnionOuter = .{};
|
||||
try expect(outer.u.outer == null);
|
||||
try expect(outer.u.inner == null);
|
||||
}
|
||||
|
||||
@ -0,0 +1,11 @@
|
||||
const S = struct {
|
||||
next: ?*align(1) S align(128),
|
||||
};
|
||||
|
||||
export fn entry() usize {
|
||||
return @alignOf(S);
|
||||
}
|
||||
|
||||
// error
|
||||
//
|
||||
// :1:11: error: struct layout depends on being pointer aligned
|
||||
Loading…
x
Reference in New Issue
Block a user