compiler: implement analysis-local comptime-mutable memory

This commit changes how we represent comptime-mutable memory
(`comptime var`) in the compiler in order to implement the intended
behavior that references to such memory can only exist at comptime.

It does *not* clean up the representation of mutable values, improve the
representation of comptime-known pointers, or fix the many bugs in the
comptime pointer access code. These will be future enhancements.

Comptime memory lives for the duration of a single Sema, and is not
permitted to escape that one analysis, either by becoming runtime-known
or by becoming comptime-known to other analyses. These restrictions mean
that we can represent comptime allocations not via Decl, but with state
local to Sema - specifically, the new `Sema.comptime_allocs` field. All
comptime-mutable allocations, as well as any comptime-known const allocs
containing references to such memory, live in here. This allows for
relatively fast checking of whether a value references any
comptime-mtuable memory, since we need only traverse values up to
pointers: pointers to Decls can never reference comptime-mutable memory,
and pointers into `Sema.comptime_allocs` always do.

This change exposed some faulty pointer access logic in `Value.zig`.
I've fixed the important cases, but there are some TODOs I've put in
which are definitely possible to hit with sufficiently esoteric code. I
plan to resolve these by auditing all direct accesses to pointers (most
of them ought to use Sema to perform the pointer access!), but for now
this is sufficient for all realistic code and to get tests passing.

This change eliminates `Zcu.tmp_hack_arena`, instead using the Sema
arena for comptime memory mutations, which is possible since comptime
memory is now local to the current Sema.

This change should allow `Decl` to store only an `InternPool.Index`
rather than a full-blown `ty: Type, val: Value`. This commit does not
perform this refactor.
This commit is contained in:
mlugg 2024-03-22 23:39:44 +00:00
parent 5c628312b1
commit 9c3670fc93
No known key found for this signature in database
GPG Key ID: 58978E823BDE3EF9
28 changed files with 886 additions and 557 deletions

View File

@ -1317,7 +1317,8 @@ pub const Cpu = struct {
for (decls, 0..) |decl, i| {
array[i] = &@field(cpus, decl.name);
}
return &array;
const finalized = array;
return &finalized;
}
};

View File

@ -41,7 +41,8 @@ pub inline fn valuesFromFields(comptime E: type, comptime fields: []const EnumFi
for (&result, fields) |*r, f| {
r.* = @enumFromInt(f.value);
}
return &result;
const final = result;
return &final;
}
}

View File

@ -1829,7 +1829,8 @@ pub inline fn comptimePrint(comptime fmt: []const u8, args: anytype) *const [cou
var buf: [count(fmt, args):0]u8 = undefined;
_ = bufPrint(&buf, fmt, args) catch unreachable;
buf[buf.len] = 0;
return &buf;
const final = buf;
return &final;
}
}

View File

@ -465,7 +465,8 @@ pub fn fieldNames(comptime T: type) *const [fields(T).len][:0]const u8 {
var names: [fieldInfos.len][:0]const u8 = undefined;
// This concat can be removed with the next zig1 update.
for (&names, fieldInfos) |*name, field| name.* = field.name ++ "";
break :blk &names;
const final = names;
break :blk &final;
};
}
@ -506,7 +507,8 @@ pub fn tags(comptime T: type) *const [fields(T).len]T {
for (fieldInfos, 0..) |field, i| {
res[i] = @field(T, field.name);
}
break :blk &res;
const final = res;
break :blk &final;
};
}

View File

@ -1358,7 +1358,8 @@ pub fn utf8ToUtf16LeStringLiteral(comptime utf8: []const u8) *const [calcUtf16Le
var utf16le: [len:0]u16 = [_:0]u16{0} ** len;
const utf16le_len = utf8ToUtf16Le(&utf16le, utf8[0..]) catch |err| @compileError(err);
assert(len == utf16le_len);
break :blk &utf16le;
const final = utf16le;
break :blk &final;
};
}

View File

@ -8296,22 +8296,27 @@ fn localVarRef(
});
}
const ptr_inst = if (num_namespaces_out != 0) try tunnelThroughClosure(
gz,
ident,
num_namespaces_out,
.{ .ref = local_ptr.ptr },
.{ .token = local_ptr.token_src },
) else local_ptr.ptr;
switch (ri.rl) {
.ref, .ref_coerced_ty => {
const ptr_inst = if (num_namespaces_out != 0) try tunnelThroughClosure(
gz,
ident,
num_namespaces_out,
.{ .ref = local_ptr.ptr },
.{ .token = local_ptr.token_src },
) else local_ptr.ptr;
local_ptr.used_as_lvalue = true;
return ptr_inst;
},
else => {
const loaded = try gz.addUnNode(.load, ptr_inst, ident);
return rvalueNoCoercePreRef(gz, ri, loaded, ident);
const val_inst = if (num_namespaces_out != 0) try tunnelThroughClosure(
gz,
ident,
num_namespaces_out,
.{ .ref_load = local_ptr.ptr },
.{ .token = local_ptr.token_src },
) else try gz.addUnNode(.load, local_ptr.ptr, ident);
return rvalueNoCoercePreRef(gz, ri, val_inst, ident);
},
}
}
@ -8390,6 +8395,7 @@ fn tunnelThroughClosure(
/// The value being captured.
value: union(enum) {
ref: Zir.Inst.Ref,
ref_load: Zir.Inst.Ref,
decl_val: Zir.NullTerminatedString,
decl_ref: Zir.NullTerminatedString,
},
@ -8400,7 +8406,8 @@ fn tunnelThroughClosure(
},
) !Zir.Inst.Ref {
switch (value) {
.ref => |v| if (v.toIndex() == null) return v, // trivia value; do not need tunnel
.ref => |v| if (v.toIndex() == null) return v, // trivial value; do not need tunnel
.ref_load => |v| assert(v.toIndex() != null), // there are no constant pointer refs
.decl_val, .decl_ref => {},
}
@ -8433,6 +8440,7 @@ fn tunnelThroughClosure(
// captures as required, starting with the outermost namespace.
const root_capture = Zir.Inst.Capture.wrap(switch (value) {
.ref => |v| .{ .instruction = v.toIndex().? },
.ref_load => |v| .{ .instruction_load = v.toIndex().? },
.decl_val => |str| .{ .decl_val = str },
.decl_ref => |str| .{ .decl_ref = str },
});

View File

@ -3058,20 +3058,23 @@ pub const Inst = struct {
/// Represents a single value being captured in a type declaration's closure.
pub const Capture = packed struct(u32) {
tag: enum(u2) {
tag: enum(u3) {
/// `data` is a `u16` index into the parent closure.
nested,
/// `data` is a `Zir.Inst.Index` to an instruction whose value is being captured.
instruction,
/// `data` is a `Zir.Inst.Index` to an instruction representing an alloc whose contents is being captured.
instruction_load,
/// `data` is a `NullTerminatedString` to a decl name.
decl_val,
/// `data` is a `NullTerminatedString` to a decl name.
decl_ref,
},
data: u30,
data: u29,
pub const Unwrapped = union(enum) {
nested: u16,
instruction: Zir.Inst.Index,
instruction_load: Zir.Inst.Index,
decl_val: NullTerminatedString,
decl_ref: NullTerminatedString,
};
@ -3085,6 +3088,10 @@ pub const Inst = struct {
.tag = .instruction,
.data = @intCast(@intFromEnum(inst)),
},
.instruction_load => |inst| .{
.tag = .instruction_load,
.data = @intCast(@intFromEnum(inst)),
},
.decl_val => |str| .{
.tag = .decl_val,
.data = @intCast(@intFromEnum(str)),
@ -3099,6 +3106,7 @@ pub const Inst = struct {
return switch (cap.tag) {
.nested => .{ .nested = @intCast(cap.data) },
.instruction => .{ .instruction = @enumFromInt(cap.data) },
.instruction_load => .{ .instruction_load = @enumFromInt(cap.data) },
.decl_val => .{ .decl_val = @enumFromInt(cap.data) },
.decl_ref => .{ .decl_ref = @enumFromInt(cap.data) },
};

View File

@ -1084,9 +1084,11 @@ pub const Inst = struct {
inferred_alloc: InferredAlloc,
pub const InferredAllocComptime = struct {
decl_index: InternPool.DeclIndex,
alignment: InternPool.Alignment,
is_const: bool,
/// This is `undefined` until we encounter a `store_to_inferred_alloc`,
/// at which point the pointer is created and stored here.
ptr: InternPool.Index,
};
pub const InferredAlloc = struct {

View File

@ -1382,7 +1382,6 @@ pub fn create(gpa: Allocator, arena: Allocator, options: CreateOptions) !*Compil
.global_zir_cache = global_zir_cache,
.local_zir_cache = local_zir_cache,
.emit_h = emit_h,
.tmp_hack_arena = std.heap.ArenaAllocator.init(gpa),
.error_limit = error_limit,
.llvm_object = null,
};

View File

@ -389,6 +389,8 @@ pub const RuntimeIndex = enum(u32) {
}
};
pub const ComptimeAllocIndex = enum(u32) { _ };
pub const DeclIndex = std.zig.DeclIndex;
pub const OptionalDeclIndex = std.zig.OptionalDeclIndex;
@ -979,7 +981,7 @@ pub const Key = union(enum) {
const Tag = @typeInfo(Addr).Union.tag_type.?;
decl: DeclIndex,
mut_decl: MutDecl,
comptime_alloc: ComptimeAllocIndex,
anon_decl: AnonDecl,
comptime_field: Index,
int: Index,
@ -1172,20 +1174,14 @@ pub const Key = union(enum) {
const seed2 = seed + @intFromEnum(addr);
const common = asBytes(&ptr.ty);
return switch (ptr.addr) {
.decl => |x| Hash.hash(seed2, common ++ asBytes(&x)),
.mut_decl => |x| Hash.hash(
seed2,
common ++ asBytes(&x.decl) ++ asBytes(&x.runtime_index),
),
.anon_decl => |x| Hash.hash(seed2, common ++ asBytes(&x)),
inline .decl,
.comptime_alloc,
.anon_decl,
.int,
.eu_payload,
.opt_payload,
.comptime_field,
=> |int| Hash.hash(seed2, common ++ asBytes(&int)),
=> |x| Hash.hash(seed2, common ++ asBytes(&x)),
.elem, .field => |x| Hash.hash(
seed2,
@ -1452,7 +1448,7 @@ pub const Key = union(enum) {
return switch (a_info.addr) {
.decl => |a_decl| a_decl == b_info.addr.decl,
.mut_decl => |a_mut_decl| std.meta.eql(a_mut_decl, b_info.addr.mut_decl),
.comptime_alloc => |a_alloc| a_alloc == b_info.addr.comptime_alloc,
.anon_decl => |ad| ad.val == b_info.addr.anon_decl.val and
ad.orig_ty == b_info.addr.anon_decl.orig_ty,
.int => |a_int| a_int == b_info.addr.int,
@ -2787,7 +2783,7 @@ pub const Index = enum(u32) {
undef: DataIsIndex,
simple_value: struct { data: SimpleValue },
ptr_decl: struct { data: *PtrDecl },
ptr_mut_decl: struct { data: *PtrMutDecl },
ptr_comptime_alloc: struct { data: *PtrComptimeAlloc },
ptr_anon_decl: struct { data: *PtrAnonDecl },
ptr_anon_decl_aligned: struct { data: *PtrAnonDeclAligned },
ptr_comptime_field: struct { data: *PtrComptimeField },
@ -3243,8 +3239,8 @@ pub const Tag = enum(u8) {
/// data is extra index of `PtrDecl`, which contains the type and address.
ptr_decl,
/// A pointer to a decl that can be mutated at comptime.
/// data is extra index of `PtrMutDecl`, which contains the type and address.
ptr_mut_decl,
/// data is extra index of `PtrComptimeAlloc`, which contains the type and address.
ptr_comptime_alloc,
/// A pointer to an anonymous decl.
/// data is extra index of `PtrAnonDecl`, which contains the pointer type and decl value.
/// The alignment of the anonymous decl is communicated via the pointer type.
@ -3448,7 +3444,7 @@ pub const Tag = enum(u8) {
.undef => unreachable,
.simple_value => unreachable,
.ptr_decl => PtrDecl,
.ptr_mut_decl => PtrMutDecl,
.ptr_comptime_alloc => PtrComptimeAlloc,
.ptr_anon_decl => PtrAnonDecl,
.ptr_anon_decl_aligned => PtrAnonDeclAligned,
.ptr_comptime_field => PtrComptimeField,
@ -4129,10 +4125,9 @@ pub const PtrAnonDeclAligned = struct {
orig_ty: Index,
};
pub const PtrMutDecl = struct {
pub const PtrComptimeAlloc = struct {
ty: Index,
decl: DeclIndex,
runtime_index: RuntimeIndex,
index: ComptimeAllocIndex,
};
pub const PtrComptimeField = struct {
@ -4537,14 +4532,11 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
.addr = .{ .decl = info.decl },
} };
},
.ptr_mut_decl => {
const info = ip.extraData(PtrMutDecl, data);
.ptr_comptime_alloc => {
const info = ip.extraData(PtrComptimeAlloc, data);
return .{ .ptr = .{
.ty = info.ty,
.addr = .{ .mut_decl = .{
.decl = info.decl,
.runtime_index = info.runtime_index,
} },
.addr = .{ .comptime_alloc = info.index },
} };
},
.ptr_anon_decl => {
@ -5186,12 +5178,11 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
.decl = decl,
}),
}),
.mut_decl => |mut_decl| ip.items.appendAssumeCapacity(.{
.tag = .ptr_mut_decl,
.data = try ip.addExtra(gpa, PtrMutDecl{
.comptime_alloc => |alloc_index| ip.items.appendAssumeCapacity(.{
.tag = .ptr_comptime_alloc,
.data = try ip.addExtra(gpa, PtrComptimeAlloc{
.ty = ptr.ty,
.decl = mut_decl.decl,
.runtime_index = mut_decl.runtime_index,
.index = alloc_index,
}),
}),
.anon_decl => |anon_decl| ip.items.appendAssumeCapacity(
@ -7265,6 +7256,7 @@ fn addExtraAssumeCapacity(ip: *InternPool, extra: anytype) u32 {
Tag.TypePointer.VectorIndex,
TrackedInst.Index,
TrackedInst.Index.Optional,
ComptimeAllocIndex,
=> @intFromEnum(@field(extra, field.name)),
u32,
@ -7342,6 +7334,7 @@ fn extraDataTrail(ip: *const InternPool, comptime T: type, index: usize) struct
Tag.TypePointer.VectorIndex,
TrackedInst.Index,
TrackedInst.Index.Optional,
ComptimeAllocIndex,
=> @enumFromInt(int32),
u32,
@ -8144,7 +8137,7 @@ fn dumpStatsFallible(ip: *const InternPool, arena: Allocator) anyerror!void {
.simple_type => 0,
.simple_value => 0,
.ptr_decl => @sizeOf(PtrDecl),
.ptr_mut_decl => @sizeOf(PtrMutDecl),
.ptr_comptime_alloc => @sizeOf(PtrComptimeAlloc),
.ptr_anon_decl => @sizeOf(PtrAnonDecl),
.ptr_anon_decl_aligned => @sizeOf(PtrAnonDeclAligned),
.ptr_comptime_field => @sizeOf(PtrComptimeField),
@ -8275,7 +8268,7 @@ fn dumpAllFallible(ip: *const InternPool) anyerror!void {
.type_function,
.undef,
.ptr_decl,
.ptr_mut_decl,
.ptr_comptime_alloc,
.ptr_anon_decl,
.ptr_anon_decl_aligned,
.ptr_comptime_field,
@ -8690,7 +8683,7 @@ pub fn typeOf(ip: *const InternPool, index: Index) Index {
.simple_value => unreachable, // handled via Index above
inline .ptr_decl,
.ptr_mut_decl,
.ptr_comptime_alloc,
.ptr_anon_decl,
.ptr_anon_decl_aligned,
.ptr_comptime_field,
@ -8822,10 +8815,8 @@ pub fn getBackingDecl(ip: *const InternPool, val: Index) OptionalDeclIndex {
var base = @intFromEnum(val);
while (true) {
switch (ip.items.items(.tag)[base]) {
inline .ptr_decl,
.ptr_mut_decl,
=> |tag| return @enumFromInt(ip.extra.items[
ip.items.items(.data)[base] + std.meta.fieldIndex(tag.Payload(), "decl").?
.ptr_decl => return @enumFromInt(ip.extra.items[
ip.items.items(.data)[base] + std.meta.fieldIndex(PtrDecl, "decl").?
]),
inline .ptr_eu_payload,
.ptr_opt_payload,
@ -8834,8 +8825,8 @@ pub fn getBackingDecl(ip: *const InternPool, val: Index) OptionalDeclIndex {
=> |tag| base = ip.extra.items[
ip.items.items(.data)[base] + std.meta.fieldIndex(tag.Payload(), "base").?
],
inline .ptr_slice => |tag| base = ip.extra.items[
ip.items.items(.data)[base] + std.meta.fieldIndex(tag.Payload(), "ptr").?
.ptr_slice => base = ip.extra.items[
ip.items.items(.data)[base] + std.meta.fieldIndex(PtrSlice, "ptr").?
],
else => return .none,
}
@ -8847,7 +8838,7 @@ pub fn getBackingAddrTag(ip: *const InternPool, val: Index) ?Key.Ptr.Addr.Tag {
while (true) {
switch (ip.items.items(.tag)[base]) {
.ptr_decl => return .decl,
.ptr_mut_decl => return .mut_decl,
.ptr_comptime_alloc => return .comptime_alloc,
.ptr_anon_decl, .ptr_anon_decl_aligned => return .anon_decl,
.ptr_comptime_field => return .comptime_field,
.ptr_int => return .int,
@ -9023,7 +9014,7 @@ pub fn zigTypeTagOrPoison(ip: *const InternPool, index: Index) error{GenericPois
.undef,
.simple_value,
.ptr_decl,
.ptr_mut_decl,
.ptr_comptime_alloc,
.ptr_anon_decl,
.ptr_anon_decl_aligned,
.ptr_comptime_field,

View File

@ -101,12 +101,6 @@ embed_table: std.StringArrayHashMapUnmanaged(*EmbedFile) = .{},
/// is not yet implemented.
intern_pool: InternPool = .{},
/// To be eliminated in a future commit by moving more data into InternPool.
/// Current uses that must be eliminated:
/// * comptime pointer mutation
/// This memory lives until the Module is destroyed.
tmp_hack_arena: std.heap.ArenaAllocator,
/// We optimize memory usage for a compilation with no compile errors by storing the
/// error messages and mapping outside of `Decl`.
/// The ErrorMsg memory is owned by the decl, using Module's general purpose allocator.
@ -2099,7 +2093,6 @@ pub fn deinit(zcu: *Zcu) void {
}
zcu.intern_pool.deinit(gpa);
zcu.tmp_hack_arena.deinit();
}
pub fn destroyDecl(mod: *Module, decl_index: Decl.Index) void {
@ -3656,9 +3649,6 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !SemaDeclResult {
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
defer analysis_arena.deinit();
var comptime_mutable_decls = std.ArrayList(Decl.Index).init(gpa);
defer comptime_mutable_decls.deinit();
var comptime_err_ret_trace = std.ArrayList(SrcLoc).init(gpa);
defer comptime_err_ret_trace.deinit();
@ -3674,7 +3664,6 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !SemaDeclResult {
.fn_ret_ty = Type.void,
.fn_ret_ty_ies = null,
.owner_func_index = .none,
.comptime_mutable_decls = &comptime_mutable_decls,
.comptime_err_ret_trace = &comptime_err_ret_trace,
.builtin_type_target_index = builtin_type_target_index,
};
@ -3704,18 +3693,12 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !SemaDeclResult {
// We'll do some other bits with the Sema. Clear the type target index just
// in case they analyze any type.
sema.builtin_type_target_index = .none;
for (comptime_mutable_decls.items) |ct_decl_index| {
const ct_decl = mod.declPtr(ct_decl_index);
_ = try ct_decl.internValue(mod);
}
const align_src: LazySrcLoc = .{ .node_offset_var_decl_align = 0 };
const section_src: LazySrcLoc = .{ .node_offset_var_decl_section = 0 };
const address_space_src: LazySrcLoc = .{ .node_offset_var_decl_addrspace = 0 };
const ty_src: LazySrcLoc = .{ .node_offset_var_decl_ty = 0 };
const init_src: LazySrcLoc = .{ .node_offset_var_decl_init = 0 };
const decl_tv = try sema.resolveConstValueAllowVariables(&block_scope, init_src, result_ref, .{
.needed_comptime_reason = "global variable initializer must be comptime-known",
});
const decl_tv = try sema.resolveFinalDeclValue(&block_scope, init_src, result_ref);
// Note this resolves the type of the Decl, not the value; if this Decl
// is a struct, for example, this resolves `type` (which needs no resolution),
@ -4572,9 +4555,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: InternPool.Index, arena: Allocato
mod.intern_pool.removeDependenciesForDepender(gpa, InternPool.Depender.wrap(.{ .func = func_index }));
var comptime_mutable_decls = std.ArrayList(Decl.Index).init(gpa);
defer comptime_mutable_decls.deinit();
var comptime_err_ret_trace = std.ArrayList(SrcLoc).init(gpa);
defer comptime_err_ret_trace.deinit();
@ -4599,7 +4579,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: InternPool.Index, arena: Allocato
.fn_ret_ty_ies = null,
.owner_func_index = func_index,
.branch_quota = @max(func.branchQuota(ip).*, Sema.default_branch_quota),
.comptime_mutable_decls = &comptime_mutable_decls,
.comptime_err_ret_trace = &comptime_err_ret_trace,
};
defer sema.deinit();
@ -4736,11 +4715,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: InternPool.Index, arena: Allocato
};
}
for (comptime_mutable_decls.items) |ct_decl_index| {
const ct_decl = mod.declPtr(ct_decl_index);
_ = try ct_decl.internValue(mod);
}
// Copy the block into place and mark that as the main block.
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Block).Struct.fields.len +
inner_block.instructions.items.len);
@ -5632,8 +5606,7 @@ pub fn markReferencedDeclsAlive(mod: *Module, val: Value) Allocator.Error!void {
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| try mod.markDeclIndexAlive(decl),
.anon_decl => {},
.mut_decl => |mut_decl| try mod.markDeclIndexAlive(mut_decl.decl),
.int, .comptime_field => {},
.int, .comptime_field, .comptime_alloc => {},
.eu_payload, .opt_payload => |parent| try mod.markReferencedDeclsAlive(Value.fromInterned(parent)),
.elem, .field => |base_index| try mod.markReferencedDeclsAlive(Value.fromInterned(base_index.base)),
},

File diff suppressed because it is too large Load Diff

View File

@ -329,13 +329,9 @@ pub fn print(
.val = Value.fromInterned(decl_val),
}, writer, level - 1, mod);
},
.mut_decl => |mut_decl| {
const decl = mod.declPtr(mut_decl.decl);
if (level == 0) return writer.print("(mut decl '{}')", .{decl.name.fmt(ip)});
return print(.{
.ty = decl.ty,
.val = decl.val,
}, writer, level - 1, mod);
.comptime_alloc => {
// TODO: we need a Sema to print this!
return writer.writeAll("(comptime alloc)");
},
.comptime_field => |field_val_ip| {
return print(.{

View File

@ -6,7 +6,8 @@ const BigIntConst = std.math.big.int.Const;
const BigIntMutable = std.math.big.int.Mutable;
const Target = std.Target;
const Allocator = std.mem.Allocator;
const Module = @import("Module.zig");
const Zcu = @import("Module.zig");
const Module = Zcu;
const TypedValue = @import("TypedValue.zig");
const Sema = @import("Sema.zig");
const InternPool = @import("InternPool.zig");
@ -187,24 +188,21 @@ pub fn fmtValue(val: Value, ty: Type, mod: *Module) std.fmt.Formatter(TypedValue
} };
}
/// Asserts that the value is representable as an array of bytes.
/// Returns the value as a null-terminated string stored in the InternPool.
/// Converts `val` to a null-terminated string stored in the InternPool.
/// Asserts `val` is an array of `u8`
pub fn toIpString(val: Value, ty: Type, mod: *Module) !InternPool.NullTerminatedString {
assert(ty.zigTypeTag(mod) == .Array);
assert(ty.childType(mod).toIntern() == .u8_type);
const ip = &mod.intern_pool;
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.enum_literal => |enum_literal| enum_literal,
.slice => |slice| try arrayToIpString(val, Value.fromInterned(slice.len).toUnsignedInt(mod), mod),
.aggregate => |aggregate| switch (aggregate.storage) {
.bytes => |bytes| try ip.getOrPutString(mod.gpa, bytes),
.elems => try arrayToIpString(val, ty.arrayLen(mod), mod),
.repeated_elem => |elem| {
const byte = @as(u8, @intCast(Value.fromInterned(elem).toUnsignedInt(mod)));
const len = @as(usize, @intCast(ty.arrayLen(mod)));
try ip.string_bytes.appendNTimes(mod.gpa, byte, len);
return ip.getOrPutTrailingString(mod.gpa, len);
},
return switch (mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage) {
.bytes => |bytes| try ip.getOrPutString(mod.gpa, bytes),
.elems => try arrayToIpString(val, ty.arrayLen(mod), mod),
.repeated_elem => |elem| {
const byte = @as(u8, @intCast(Value.fromInterned(elem).toUnsignedInt(mod)));
const len = @as(usize, @intCast(ty.arrayLen(mod)));
try ip.string_bytes.appendNTimes(mod.gpa, byte, len);
return ip.getOrPutTrailingString(mod.gpa, len);
},
else => unreachable,
};
}
@ -606,7 +604,7 @@ fn isDeclRef(val: Value, mod: *Module) bool {
var check = val;
while (true) switch (mod.intern_pool.indexToKey(check.toIntern())) {
.ptr => |ptr| switch (ptr.addr) {
.decl, .mut_decl, .comptime_field, .anon_decl => return true,
.decl, .comptime_alloc, .comptime_field, .anon_decl => return true,
.eu_payload, .opt_payload => |base| check = Value.fromInterned(base),
.elem, .field => |base_index| check = Value.fromInterned(base_index.base),
.int => return false,
@ -1343,7 +1341,7 @@ pub fn orderAgainstZeroAdvanced(
.bool_true => .gt,
else => switch (mod.intern_pool.indexToKey(lhs.toIntern())) {
.ptr => |ptr| switch (ptr.addr) {
.decl, .mut_decl, .comptime_field => .gt,
.decl, .comptime_alloc, .comptime_field => .gt,
.int => |int| Value.fromInterned(int).orderAgainstZeroAdvanced(mod, opt_sema),
.elem => |elem| switch (try Value.fromInterned(elem.base).orderAgainstZeroAdvanced(mod, opt_sema)) {
.lt => unreachable,
@ -1532,45 +1530,34 @@ pub fn eql(a: Value, b: Value, ty: Type, mod: *Module) bool {
return a.toIntern() == b.toIntern();
}
pub fn isComptimeMutablePtr(val: Value, mod: *Module) bool {
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.slice => |slice| return Value.fromInterned(slice.ptr).isComptimeMutablePtr(mod),
pub fn canMutateComptimeVarState(val: Value, zcu: *Zcu) bool {
return switch (zcu.intern_pool.indexToKey(val.toIntern())) {
.error_union => |error_union| switch (error_union.val) {
.err_name => false,
.payload => |payload| Value.fromInterned(payload).canMutateComptimeVarState(zcu),
},
.ptr => |ptr| switch (ptr.addr) {
.mut_decl, .comptime_field => true,
.eu_payload, .opt_payload => |base_ptr| Value.fromInterned(base_ptr).isComptimeMutablePtr(mod),
.elem, .field => |base_index| Value.fromInterned(base_index.base).isComptimeMutablePtr(mod),
else => false,
.decl => false, // The value of a Decl can never reference a comptime alloc.
.int => false,
.comptime_alloc => true, // A comptime alloc is either mutable or references comptime-mutable memory.
.comptime_field => true, // Comptime field pointers are comptime-mutable, albeit only to the "correct" value.
.eu_payload, .opt_payload => |base| Value.fromInterned(base).canMutateComptimeVarState(zcu),
.anon_decl => |anon_decl| Value.fromInterned(anon_decl.val).canMutateComptimeVarState(zcu),
.elem, .field => |base_index| Value.fromInterned(base_index.base).canMutateComptimeVarState(zcu),
},
.slice => |slice| return Value.fromInterned(slice.ptr).canMutateComptimeVarState(zcu),
.opt => |opt| switch (opt.val) {
.none => false,
else => |payload| Value.fromInterned(payload).canMutateComptimeVarState(zcu),
},
.aggregate => |aggregate| for (aggregate.storage.values()) |elem| {
if (Value.fromInterned(elem).canMutateComptimeVarState(zcu)) break true;
} else false,
.un => |un| Value.fromInterned(un.val).canMutateComptimeVarState(zcu),
else => false,
};
}
pub fn canMutateComptimeVarState(val: Value, mod: *Module) bool {
return val.isComptimeMutablePtr(mod) or switch (val.toIntern()) {
else => switch (mod.intern_pool.indexToKey(val.toIntern())) {
.error_union => |error_union| switch (error_union.val) {
.err_name => false,
.payload => |payload| Value.fromInterned(payload).canMutateComptimeVarState(mod),
},
.ptr => |ptr| switch (ptr.addr) {
.eu_payload, .opt_payload => |base| Value.fromInterned(base).canMutateComptimeVarState(mod),
.anon_decl => |anon_decl| Value.fromInterned(anon_decl.val).canMutateComptimeVarState(mod),
.elem, .field => |base_index| Value.fromInterned(base_index.base).canMutateComptimeVarState(mod),
else => false,
},
.opt => |opt| switch (opt.val) {
.none => false,
else => |payload| Value.fromInterned(payload).canMutateComptimeVarState(mod),
},
.aggregate => |aggregate| for (aggregate.storage.values()) |elem| {
if (Value.fromInterned(elem).canMutateComptimeVarState(mod)) break true;
} else false,
.un => |un| Value.fromInterned(un.val).canMutateComptimeVarState(mod),
else => false,
},
};
}
/// Gets the decl referenced by this pointer. If the pointer does not point
/// to a decl, or if it points to some part of a decl (like field_ptr or element_ptr),
/// this function returns null.
@ -1581,7 +1568,6 @@ pub fn pointerDecl(val: Value, mod: *Module) ?InternPool.DeclIndex {
.func => |func| func.owner_decl,
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| decl,
.mut_decl => |mut_decl| mut_decl.decl,
else => null,
},
else => null,
@ -1600,7 +1586,7 @@ pub fn sliceLen(val: Value, mod: *Module) u64 {
return switch (ip.indexToKey(val.toIntern())) {
.ptr => |ptr| switch (ip.indexToKey(switch (ptr.addr) {
.decl => |decl| mod.declPtr(decl).ty.toIntern(),
.mut_decl => |mut_decl| mod.declPtr(mut_decl.decl).ty.toIntern(),
.comptime_alloc => @panic("TODO"),
.anon_decl => |anon_decl| ip.typeOf(anon_decl.val),
.comptime_field => |comptime_field| ip.typeOf(comptime_field),
else => unreachable,
@ -1621,34 +1607,38 @@ pub fn elemValue(val: Value, mod: *Module, index: usize) Allocator.Error!Value {
/// Like `elemValue`, but returns `null` instead of asserting on failure.
pub fn maybeElemValue(val: Value, mod: *Module, index: usize) Allocator.Error!?Value {
return val.maybeElemValueFull(null, mod, index);
}
pub fn maybeElemValueFull(val: Value, sema: ?*Sema, mod: *Module, index: usize) Allocator.Error!?Value {
return switch (val.ip_index) {
.none => switch (val.tag()) {
.bytes => try mod.intValue(Type.u8, val.castTag(.bytes).?.data[index]),
.repeated => val.castTag(.repeated).?.data,
.aggregate => val.castTag(.aggregate).?.data[index],
.slice => val.castTag(.slice).?.data.ptr.maybeElemValue(mod, index),
.slice => val.castTag(.slice).?.data.ptr.maybeElemValueFull(sema, mod, index),
else => null,
},
else => switch (mod.intern_pool.indexToKey(val.toIntern())) {
.undef => |ty| Value.fromInterned((try mod.intern(.{
.undef = Type.fromInterned(ty).elemType2(mod).toIntern(),
}))),
.slice => |slice| return Value.fromInterned(slice.ptr).maybeElemValue(mod, index),
.slice => |slice| return Value.fromInterned(slice.ptr).maybeElemValueFull(sema, mod, index),
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| mod.declPtr(decl).val.maybeElemValue(mod, index),
.anon_decl => |anon_decl| Value.fromInterned(anon_decl.val).maybeElemValue(mod, index),
.mut_decl => |mut_decl| Value.fromInterned((try mod.declPtr(mut_decl.decl).internValue(mod))).maybeElemValue(mod, index),
.decl => |decl| mod.declPtr(decl).val.maybeElemValueFull(sema, mod, index),
.anon_decl => |anon_decl| Value.fromInterned(anon_decl.val).maybeElemValueFull(sema, mod, index),
.comptime_alloc => |idx| if (sema) |s| s.getComptimeAlloc(idx).val.maybeElemValueFull(sema, mod, index) else null,
.int, .eu_payload => null,
.opt_payload => |base| Value.fromInterned(base).maybeElemValue(mod, index),
.comptime_field => |field_val| Value.fromInterned(field_val).maybeElemValue(mod, index),
.elem => |elem| Value.fromInterned(elem.base).maybeElemValue(mod, index + @as(usize, @intCast(elem.index))),
.opt_payload => |base| Value.fromInterned(base).maybeElemValueFull(sema, mod, index),
.comptime_field => |field_val| Value.fromInterned(field_val).maybeElemValueFull(sema, mod, index),
.elem => |elem| Value.fromInterned(elem.base).maybeElemValueFull(sema, mod, index + @as(usize, @intCast(elem.index))),
.field => |field| if (Value.fromInterned(field.base).pointerDecl(mod)) |decl_index| {
const base_decl = mod.declPtr(decl_index);
const field_val = try base_decl.val.fieldValue(mod, @as(usize, @intCast(field.index)));
return field_val.maybeElemValue(mod, index);
return field_val.maybeElemValueFull(sema, mod, index);
} else null,
},
.opt => |opt| Value.fromInterned(opt.val).maybeElemValue(mod, index),
.opt => |opt| Value.fromInterned(opt.val).maybeElemValueFull(sema, mod, index),
.aggregate => |aggregate| {
const len = mod.intern_pool.aggregateTypeLen(aggregate.ty);
if (index < len) return Value.fromInterned(switch (aggregate.storage) {
@ -1690,29 +1680,28 @@ pub fn isPtrToThreadLocal(val: Value, mod: *Module) bool {
// Asserts that the provided start/end are in-bounds.
pub fn sliceArray(
val: Value,
mod: *Module,
arena: Allocator,
sema: *Sema,
start: usize,
end: usize,
) error{OutOfMemory}!Value {
// TODO: write something like getCoercedInts to avoid needing to dupe
const mod = sema.mod;
return switch (val.ip_index) {
.none => switch (val.tag()) {
.slice => val.castTag(.slice).?.data.ptr.sliceArray(mod, arena, start, end),
.bytes => Tag.bytes.create(arena, val.castTag(.bytes).?.data[start..end]),
.slice => val.castTag(.slice).?.data.ptr.sliceArray(sema, start, end),
.bytes => Tag.bytes.create(sema.arena, val.castTag(.bytes).?.data[start..end]),
.repeated => val,
.aggregate => Tag.aggregate.create(arena, val.castTag(.aggregate).?.data[start..end]),
.aggregate => Tag.aggregate.create(sema.arena, val.castTag(.aggregate).?.data[start..end]),
else => unreachable,
},
else => switch (mod.intern_pool.indexToKey(val.toIntern())) {
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| try mod.declPtr(decl).val.sliceArray(mod, arena, start, end),
.mut_decl => |mut_decl| Value.fromInterned((try mod.declPtr(mut_decl.decl).internValue(mod)))
.sliceArray(mod, arena, start, end),
.decl => |decl| try mod.declPtr(decl).val.sliceArray(sema, start, end),
.comptime_alloc => |idx| sema.getComptimeAlloc(idx).val.sliceArray(sema, start, end),
.comptime_field => |comptime_field| Value.fromInterned(comptime_field)
.sliceArray(mod, arena, start, end),
.sliceArray(sema, start, end),
.elem => |elem| Value.fromInterned(elem.base)
.sliceArray(mod, arena, start + @as(usize, @intCast(elem.index)), end + @as(usize, @intCast(elem.index))),
.sliceArray(sema, start + @as(usize, @intCast(elem.index)), end + @as(usize, @intCast(elem.index))),
else => unreachable,
},
.aggregate => |aggregate| Value.fromInterned((try mod.intern(.{ .aggregate = .{
@ -1729,8 +1718,8 @@ pub fn sliceArray(
else => unreachable,
}.toIntern(),
.storage = switch (aggregate.storage) {
.bytes => .{ .bytes = try arena.dupe(u8, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.bytes[start..end]) },
.elems => .{ .elems = try arena.dupe(InternPool.Index, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.elems[start..end]) },
.bytes => .{ .bytes = try sema.arena.dupe(u8, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.bytes[start..end]) },
.elems => .{ .elems = try sema.arena.dupe(InternPool.Index, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.elems[start..end]) },
.repeated_elem => |elem| .{ .repeated_elem = elem },
},
} }))),
@ -1838,26 +1827,6 @@ pub fn isUndefDeep(val: Value, mod: *Module) bool {
return val.isUndef(mod);
}
/// Returns true if any value contained in `self` is undefined.
pub fn anyUndef(val: Value, mod: *Module) !bool {
if (val.ip_index == .none) return false;
return switch (val.toIntern()) {
.undef => true,
else => switch (mod.intern_pool.indexToKey(val.toIntern())) {
.undef => true,
.simple_value => |v| v == .undefined,
.slice => |slice| for (0..@intCast(Value.fromInterned(slice.len).toUnsignedInt(mod))) |idx| {
if (try (try val.elemValue(mod, idx)).anyUndef(mod)) break true;
} else false,
.aggregate => |aggregate| for (0..aggregate.storage.values().len) |i| {
const elem = mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.values()[i];
if (try anyUndef(Value.fromInterned(elem), mod)) break true;
} else false,
else => false,
},
};
}
/// Asserts the value is not undefined and not unreachable.
/// C pointers with an integer value of 0 are also considered null.
pub fn isNull(val: Value, mod: *Module) bool {

View File

@ -3067,14 +3067,10 @@ fn lowerParentPtr(func: *CodeGen, ptr_val: Value, offset: u32) InnerError!WValue
return func.lowerParentPtrDecl(ptr_val, decl_index, offset);
},
.anon_decl => |ad| return func.lowerAnonDeclRef(ad, offset),
.mut_decl => |mut_decl| {
const decl_index = mut_decl.decl;
return func.lowerParentPtrDecl(ptr_val, decl_index, offset);
},
.eu_payload => |tag| return func.fail("TODO: Implement lowerParentPtr for {}", .{tag}),
.int => |base| return func.lowerConstant(Value.fromInterned(base), Type.usize),
.opt_payload => |base_ptr| return func.lowerParentPtr(Value.fromInterned(base_ptr), offset),
.comptime_field => unreachable,
.comptime_field, .comptime_alloc => unreachable,
.elem => |elem| {
const index = elem.index;
const elem_type = Type.fromInterned(mod.intern_pool.typeOf(elem.base)).elemType2(mod);
@ -3320,20 +3316,19 @@ fn lowerConstant(func: *CodeGen, val: Value, ty: Type) InnerError!WValue {
var ptr = ip.indexToKey(slice.ptr).ptr;
const owner_decl = while (true) switch (ptr.addr) {
.decl => |decl| break decl,
.mut_decl => |mut_decl| break mut_decl.decl,
.int, .anon_decl => return func.fail("Wasm TODO: lower slice where ptr is not owned by decl", .{}),
.opt_payload, .eu_payload => |base| ptr = ip.indexToKey(base).ptr,
.elem, .field => |base_index| ptr = ip.indexToKey(base_index.base).ptr,
.comptime_field => unreachable,
.comptime_field, .comptime_alloc => unreachable,
};
return .{ .memory = try func.bin_file.lowerUnnamedConst(.{ .ty = ty, .val = val }, owner_decl) };
},
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| return func.lowerDeclRefValue(.{ .ty = ty, .val = val }, decl, 0),
.mut_decl => |mut_decl| return func.lowerDeclRefValue(.{ .ty = ty, .val = val }, mut_decl.decl, 0),
.int => |int| return func.lowerConstant(Value.fromInterned(int), Type.fromInterned(ip.typeOf(int))),
.opt_payload, .elem, .field => return func.lowerParentPtr(val, 0),
.anon_decl => |ad| return func.lowerAnonDeclRef(ad, 0),
.comptime_field, .comptime_alloc => unreachable,
else => return func.fail("Wasm TODO: lowerConstant for other const addr tag {}", .{ptr.addr}),
},
.opt => if (ty.optionalReprIsPayload(mod)) {

View File

@ -818,7 +818,7 @@ fn estimateInstructionLength(prefix: Prefix, encoding: Encoding, ops: []const Op
}
const mnemonic_to_encodings_map = init: {
@setEvalBranchQuota(4_000);
@setEvalBranchQuota(5_000);
const mnemonic_count = @typeInfo(Mnemonic).Enum.fields.len;
var mnemonic_map: [mnemonic_count][]Data = .{&.{}} ** mnemonic_count;
const encodings = @import("encodings.zig");
@ -845,5 +845,13 @@ const mnemonic_to_encodings_map = init: {
};
i.* += 1;
}
break :init mnemonic_map;
const final_storage = data_storage;
var final_map: [mnemonic_count][]const Data = .{&.{}} ** mnemonic_count;
storage_i = 0;
for (&final_map, mnemonic_map) |*value, wip_value| {
value.ptr = final_storage[storage_i..].ptr;
value.len = wip_value.len;
storage_i += value.len;
}
break :init final_map;
};

View File

@ -680,7 +680,6 @@ fn lowerParentPtr(
const ptr = mod.intern_pool.indexToKey(parent_ptr).ptr;
return switch (ptr.addr) {
.decl => |decl| try lowerDeclRef(bin_file, src_loc, decl, code, debug_output, reloc_info),
.mut_decl => |md| try lowerDeclRef(bin_file, src_loc, md.decl, code, debug_output, reloc_info),
.anon_decl => |ad| try lowerAnonDeclRef(bin_file, src_loc, ad, code, debug_output, reloc_info),
.int => |int| try generateSymbol(bin_file, src_loc, .{
.ty = Type.usize,
@ -756,7 +755,7 @@ fn lowerParentPtr(
}),
);
},
.comptime_field => unreachable,
.comptime_field, .comptime_alloc => unreachable,
};
}
@ -1089,7 +1088,6 @@ pub fn genTypedValue(
if (!typed_value.ty.isSlice(zcu)) switch (zcu.intern_pool.indexToKey(typed_value.val.toIntern())) {
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| return genDeclRef(lf, src_loc, typed_value, decl),
.mut_decl => |mut_decl| return genDeclRef(lf, src_loc, typed_value, mut_decl.decl),
else => {},
},
else => {},

View File

@ -698,7 +698,6 @@ pub const DeclGen = struct {
const ptr = mod.intern_pool.indexToKey(ptr_val).ptr;
switch (ptr.addr) {
.decl => |d| try dg.renderDeclValue(writer, ptr_ty, Value.fromInterned(ptr_val), d, location),
.mut_decl => |md| try dg.renderDeclValue(writer, ptr_ty, Value.fromInterned(ptr_val), md.decl, location),
.anon_decl => |anon_decl| try dg.renderAnonDeclValue(writer, ptr_ty, Value.fromInterned(ptr_val), anon_decl, location),
.int => |int| {
try writer.writeByte('(');
@ -795,7 +794,7 @@ pub const DeclGen = struct {
},
}
},
.comptime_field => unreachable,
.comptime_field, .comptime_alloc => unreachable,
}
}
@ -1229,7 +1228,6 @@ pub const DeclGen = struct {
},
.ptr => |ptr| switch (ptr.addr) {
.decl => |d| try dg.renderDeclValue(writer, ty, val, d, location),
.mut_decl => |md| try dg.renderDeclValue(writer, ty, val, md.decl, location),
.anon_decl => |decl_val| try dg.renderAnonDeclValue(writer, ty, val, decl_val, location),
.int => |int| {
try writer.writeAll("((");
@ -1243,7 +1241,7 @@ pub const DeclGen = struct {
.elem,
.field,
=> try dg.renderParentPtr(writer, val.ip_index, location),
.comptime_field => unreachable,
.comptime_field, .comptime_alloc => unreachable,
},
.opt => |opt| {
const payload_ty = ty.optionalChild(mod);

View File

@ -3808,7 +3808,6 @@ pub const Object = struct {
},
.ptr => |ptr| return switch (ptr.addr) {
.decl => |decl| try o.lowerDeclRefValue(ty, decl),
.mut_decl => |mut_decl| try o.lowerDeclRefValue(ty, mut_decl.decl),
.anon_decl => |anon_decl| try o.lowerAnonDeclRef(ty, anon_decl),
.int => |int| try o.lowerIntAsPtr(int),
.eu_payload,
@ -3816,7 +3815,7 @@ pub const Object = struct {
.elem,
.field,
=> try o.lowerParentPtr(val),
.comptime_field => unreachable,
.comptime_field, .comptime_alloc => unreachable,
},
.slice => |slice| return o.builder.structConst(try o.lowerType(ty), &.{
try o.lowerValue(slice.ptr),
@ -4274,7 +4273,6 @@ pub const Object = struct {
const ptr = ip.indexToKey(ptr_val.toIntern()).ptr;
return switch (ptr.addr) {
.decl => |decl| try o.lowerParentPtrDecl(decl),
.mut_decl => |mut_decl| try o.lowerParentPtrDecl(mut_decl.decl),
.anon_decl => |ad| try o.lowerAnonDeclRef(Type.fromInterned(ad.orig_ty), ad),
.int => |int| try o.lowerIntAsPtr(int),
.eu_payload => |eu_ptr| {
@ -4311,7 +4309,7 @@ pub const Object = struct {
return o.builder.gepConst(.inbounds, try o.lowerType(opt_ty), parent_ptr, null, &.{ .@"0", .@"0" });
},
.comptime_field => unreachable,
.comptime_field, .comptime_alloc => unreachable,
.elem => |elem_ptr| {
const parent_ptr = try o.lowerParentPtr(Value.fromInterned(elem_ptr.base));
const elem_ty = Type.fromInterned(ip.typeOf(elem_ptr.base)).elemType2(mod);

View File

@ -1105,7 +1105,6 @@ const DeclGen = struct {
const mod = self.module;
switch (mod.intern_pool.indexToKey(ptr_val.toIntern()).ptr.addr) {
.decl => |decl| return try self.constantDeclRef(ptr_ty, decl),
.mut_decl => |decl_mut| return try self.constantDeclRef(ptr_ty, decl_mut.decl),
.anon_decl => |anon_decl| return try self.constantAnonDeclRef(ptr_ty, anon_decl),
.int => |int| {
const ptr_id = self.spv.allocId();
@ -1121,7 +1120,7 @@ const DeclGen = struct {
},
.eu_payload => unreachable, // TODO
.opt_payload => unreachable, // TODO
.comptime_field => unreachable,
.comptime_field, .comptime_alloc => unreachable,
.elem => |elem_ptr| {
const parent_ptr_ty = Type.fromInterned(mod.intern_pool.typeOf(elem_ptr.base));
const parent_ptr_id = try self.constantPtr(parent_ptr_ty, Value.fromInterned(elem_ptr.base));

View File

@ -109,11 +109,14 @@ const Command = enum {
fn fromString(s: []const u8) ?Command {
inline for (@typeInfo(Command).Enum.fields) |field| {
comptime var buf: [field.name.len]u8 = undefined;
inline for (field.name, 0..) |c, i| {
buf[i] = comptime std.ascii.toUpper(c);
}
if (std.mem.eql(u8, &buf, s)) return @field(Command, field.name);
const upper_name = n: {
comptime var buf: [field.name.len]u8 = undefined;
inline for (field.name, 0..) |c, i| {
buf[i] = comptime std.ascii.toUpper(c);
}
break :n buf;
};
if (std.mem.eql(u8, &upper_name, s)) return @field(Command, field.name);
}
return null;
}

View File

@ -2810,6 +2810,10 @@ const Writer = struct {
switch (capture.unwrap()) {
.nested => |i| return stream.print("[{d}]", .{i}),
.instruction => |inst| return self.writeInstIndex(stream, inst),
.instruction_load => |ptr_inst| {
try stream.writeAll("load ");
try self.writeInstIndex(stream, ptr_inst);
},
.decl_val => |str| try stream.print("decl_val \"{}\"", .{
std.zig.fmtEscapes(self.code.nullTerminatedString(str)),
}),

View File

@ -586,6 +586,8 @@ fn overaligned_fn() align(0x1000) i32 {
}
test "comptime alloc alignment" {
// TODO: it's impossible to test this in Zig today, since comptime vars do not have runtime addresses.
if (true) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO

View File

@ -461,7 +461,7 @@ test "write empty array to end" {
array[5..5].* = .{};
array[5..5].* = [0]u8{};
array[5..5].* = [_]u8{};
try testing.expectEqualStrings("hello", &array);
comptime std.debug.assert(std.mem.eql(u8, "hello", &array));
}
fn doublePtrTest() !void {

View File

@ -1211,7 +1211,7 @@ test "storing an array of type in a field" {
const S = struct {
fn doTheTest() void {
comptime var foobar = Foobar.foo();
const foobar = Foobar.foo();
foo(foobar.str[0..10]);
}
const Foobar = struct {

View File

@ -0,0 +1,65 @@
var runtime_int: u32 = 123;
export fn foo() void {
comptime var x: u32 = 123;
var runtime = &x;
_ = &runtime;
}
export fn bar() void {
const S = struct { u32, *const u32 };
comptime var x: u32 = 123;
const runtime: S = .{ runtime_int, &x };
_ = runtime;
}
export fn qux() void {
const S = struct { a: u32, b: *const u32 };
comptime var x: u32 = 123;
const runtime: S = .{ .a = runtime_int, .b = &x };
_ = runtime;
}
export fn baz() void {
const S = struct {
fn f(_: *const u32) void {}
};
comptime var x: u32 = 123;
S.f(&x);
}
export fn faz() void {
const S = struct {
fn f(_: anytype) void {}
};
comptime var x: u32 = 123;
S.f(&x);
}
export fn boo() *const u32 {
comptime var x: u32 = 123;
return &x;
}
export fn qar() void {
comptime var x: u32 = 123;
const y = if (runtime_int == 123) &x else undefined;
_ = y;
}
// error
//
// :5:19: error: runtime value contains reference to comptime var
// :5:19: note: comptime var pointers are not available at runtime
// :12:40: error: runtime value contains reference to comptime var
// :12:40: note: comptime var pointers are not available at runtime
// :19:50: error: runtime value contains reference to comptime var
// :19:50: note: comptime var pointers are not available at runtime
// :28:9: error: runtime value contains reference to comptime var
// :28:9: note: comptime var pointers are not available at runtime
// :36:9: error: runtime value contains reference to comptime var
// :36:9: note: comptime var pointers are not available at runtime
// :41:12: error: runtime value contains reference to comptime var
// :41:12: note: comptime var pointers are not available at runtime
// :46:39: error: runtime value contains reference to comptime var
// :46:39: note: comptime var pointers are not available at runtime

View File

@ -0,0 +1,49 @@
export const a: *u32 = a: {
var x: u32 = 123;
break :a &x;
};
export const b: [1]*u32 = b: {
var x: u32 = 123;
break :b .{&x};
};
export const c: *[1]u32 = c: {
var x: u32 = 123;
break :c (&x)[0..1];
};
export const d: *anyopaque = d: {
var x: u32 = 123;
break :d &x;
};
const S = extern struct { ptr: *u32 };
export const e: S = e: {
var x: u32 = 123;
break :e .{ .ptr = &x };
};
// The pointer constness shouldn't matter - *any* reference to a comptime var is illegal in a global's value.
export const f: *const u32 = f: {
var x: u32 = 123;
break :f &x;
};
// The pointer itself doesn't refer to a comptime var, but from it you can derive a pointer which does.
export const g: *const *const u32 = g: {
const valid: u32 = 123;
var invalid: u32 = 123;
const aggregate: [2]*const u32 = .{ &valid, &invalid };
break :g &aggregate[0];
};
// error
//
// :1:27: error: global variable contains reference to comptime var
// :6:30: error: global variable contains reference to comptime var
// :11:30: error: global variable contains reference to comptime var
// :16:33: error: global variable contains reference to comptime var
// :22:24: error: global variable contains reference to comptime var
// :28:33: error: global variable contains reference to comptime var
// :34:40: error: global variable contains reference to comptime var

View File

@ -23,10 +23,13 @@ comptime {
pub fn main() !void {}
// TODO: the output here has been regressed by #19414.
// Restoring useful output here will require providing a Sema to TypedValue.print.
// error
//
// :20:5: error: found compile log statement
//
// Compile Log Output:
// @as([]i32, .{ 1, 2 })
// @as([]i32, .{ 3, 4 })
// @as([]i32, .{ (reinterpreted data) })
// @as([]i32, .{ (reinterpreted data) })