mirror of
https://github.com/ziglang/zig.git
synced 2026-02-21 16:54:52 +00:00
Merge pull request #17276 from ziglang/anon-decls
compiler: start handling anonymous decls differently
This commit is contained in:
commit
398db54434
@ -3499,6 +3499,7 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = .{},
|
||||
.anon_decl_deps = .{},
|
||||
};
|
||||
defer {
|
||||
dg.ctypes.deinit(gpa);
|
||||
|
||||
@ -1074,6 +1074,7 @@ pub const Key = union(enum) {
|
||||
|
||||
decl: Module.Decl.Index,
|
||||
mut_decl: MutDecl,
|
||||
anon_decl: Index,
|
||||
comptime_field: Index,
|
||||
int: Index,
|
||||
eu_payload: Index,
|
||||
@ -1230,10 +1231,12 @@ pub const Key = union(enum) {
|
||||
asBytes(&x.decl) ++ asBytes(&x.runtime_index),
|
||||
),
|
||||
|
||||
.int, .eu_payload, .opt_payload, .comptime_field => |int| Hash.hash(
|
||||
seed2,
|
||||
asBytes(&int),
|
||||
),
|
||||
.anon_decl,
|
||||
.int,
|
||||
.eu_payload,
|
||||
.opt_payload,
|
||||
.comptime_field,
|
||||
=> |int| Hash.hash(seed2, asBytes(&int)),
|
||||
|
||||
.elem, .field => |x| Hash.hash(
|
||||
seed2,
|
||||
@ -1497,6 +1500,7 @@ pub const Key = union(enum) {
|
||||
return switch (a_info.addr) {
|
||||
.decl => |a_decl| a_decl == b_info.addr.decl,
|
||||
.mut_decl => |a_mut_decl| std.meta.eql(a_mut_decl, b_info.addr.mut_decl),
|
||||
.anon_decl => |a_decl| a_decl == b_info.addr.anon_decl,
|
||||
.int => |a_int| a_int == b_info.addr.int,
|
||||
.eu_payload => |a_eu_payload| a_eu_payload == b_info.addr.eu_payload,
|
||||
.opt_payload => |a_opt_payload| a_opt_payload == b_info.addr.opt_payload,
|
||||
@ -2123,6 +2127,7 @@ pub const Index = enum(u32) {
|
||||
simple_value: struct { data: SimpleValue },
|
||||
ptr_decl: struct { data: *PtrDecl },
|
||||
ptr_mut_decl: struct { data: *PtrMutDecl },
|
||||
ptr_anon_decl: struct { data: *PtrAnonDecl },
|
||||
ptr_comptime_field: struct { data: *PtrComptimeField },
|
||||
ptr_int: struct { data: *PtrBase },
|
||||
ptr_eu_payload: struct { data: *PtrBase },
|
||||
@ -2572,6 +2577,9 @@ pub const Tag = enum(u8) {
|
||||
/// A pointer to a decl that can be mutated at comptime.
|
||||
/// data is extra index of `PtrMutDecl`, which contains the type and address.
|
||||
ptr_mut_decl,
|
||||
/// A pointer to an anonymous decl.
|
||||
/// data is extra index of `PtrAnonDecl`, which contains the type and decl value.
|
||||
ptr_anon_decl,
|
||||
/// data is extra index of `PtrComptimeField`, which contains the pointer type and field value.
|
||||
ptr_comptime_field,
|
||||
/// A pointer with an integer value.
|
||||
@ -2767,6 +2775,7 @@ pub const Tag = enum(u8) {
|
||||
.simple_value => unreachable,
|
||||
.ptr_decl => PtrDecl,
|
||||
.ptr_mut_decl => PtrMutDecl,
|
||||
.ptr_anon_decl => PtrAnonDecl,
|
||||
.ptr_comptime_field => PtrComptimeField,
|
||||
.ptr_int => PtrBase,
|
||||
.ptr_eu_payload => PtrBase,
|
||||
@ -3364,6 +3373,11 @@ pub const PtrDecl = struct {
|
||||
decl: Module.Decl.Index,
|
||||
};
|
||||
|
||||
pub const PtrAnonDecl = struct {
|
||||
ty: Index,
|
||||
val: Index,
|
||||
};
|
||||
|
||||
pub const PtrMutDecl = struct {
|
||||
ty: Index,
|
||||
decl: Module.Decl.Index,
|
||||
@ -3713,6 +3727,13 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
|
||||
} },
|
||||
} };
|
||||
},
|
||||
.ptr_anon_decl => {
|
||||
const info = ip.extraData(PtrAnonDecl, data);
|
||||
return .{ .ptr = .{
|
||||
.ty = info.ty,
|
||||
.addr = .{ .anon_decl = info.val },
|
||||
} };
|
||||
},
|
||||
.ptr_comptime_field => {
|
||||
const info = ip.extraData(PtrComptimeField, data);
|
||||
return .{ .ptr = .{
|
||||
@ -3790,6 +3811,9 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
|
||||
.runtime_index = sub_info.runtime_index,
|
||||
} };
|
||||
},
|
||||
.ptr_anon_decl => .{
|
||||
.anon_decl = ip.extraData(PtrAnonDecl, ptr_item.data).val,
|
||||
},
|
||||
.ptr_comptime_field => .{
|
||||
.comptime_field = ip.extraData(PtrComptimeField, ptr_item.data).field_val,
|
||||
},
|
||||
@ -4542,6 +4566,13 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
.runtime_index = mut_decl.runtime_index,
|
||||
}),
|
||||
}),
|
||||
.anon_decl => |anon_decl| ip.items.appendAssumeCapacity(.{
|
||||
.tag = .ptr_anon_decl,
|
||||
.data = try ip.addExtra(gpa, PtrAnonDecl{
|
||||
.ty = ptr.ty,
|
||||
.val = anon_decl,
|
||||
}),
|
||||
}),
|
||||
.comptime_field => |field_val| {
|
||||
assert(field_val != .none);
|
||||
ip.items.appendAssumeCapacity(.{
|
||||
@ -7147,6 +7178,7 @@ fn dumpStatsFallible(ip: *const InternPool, arena: Allocator) anyerror!void {
|
||||
.simple_value => 0,
|
||||
.ptr_decl => @sizeOf(PtrDecl),
|
||||
.ptr_mut_decl => @sizeOf(PtrMutDecl),
|
||||
.ptr_anon_decl => @sizeOf(PtrAnonDecl),
|
||||
.ptr_comptime_field => @sizeOf(PtrComptimeField),
|
||||
.ptr_int => @sizeOf(PtrBase),
|
||||
.ptr_eu_payload => @sizeOf(PtrBase),
|
||||
@ -7276,6 +7308,7 @@ fn dumpAllFallible(ip: *const InternPool) anyerror!void {
|
||||
.runtime_value,
|
||||
.ptr_decl,
|
||||
.ptr_mut_decl,
|
||||
.ptr_anon_decl,
|
||||
.ptr_comptime_field,
|
||||
.ptr_int,
|
||||
.ptr_eu_payload,
|
||||
@ -7656,6 +7689,7 @@ pub fn typeOf(ip: *const InternPool, index: Index) Index {
|
||||
|
||||
inline .ptr_decl,
|
||||
.ptr_mut_decl,
|
||||
.ptr_anon_decl,
|
||||
.ptr_comptime_field,
|
||||
.ptr_int,
|
||||
.ptr_eu_payload,
|
||||
@ -7816,6 +7850,7 @@ pub fn getBackingAddrTag(ip: *const InternPool, val: Index) ?Key.Ptr.Addr.Tag {
|
||||
switch (ip.items.items(.tag)[base]) {
|
||||
.ptr_decl => return .decl,
|
||||
.ptr_mut_decl => return .mut_decl,
|
||||
.ptr_anon_decl => return .anon_decl,
|
||||
.ptr_comptime_field => return .comptime_field,
|
||||
.ptr_int => return .int,
|
||||
inline .ptr_eu_payload,
|
||||
@ -7991,6 +8026,7 @@ pub fn zigTypeTagOrPoison(ip: *const InternPool, index: Index) error{GenericPois
|
||||
.simple_value,
|
||||
.ptr_decl,
|
||||
.ptr_mut_decl,
|
||||
.ptr_anon_decl,
|
||||
.ptr_comptime_field,
|
||||
.ptr_int,
|
||||
.ptr_eu_payload,
|
||||
|
||||
@ -109,9 +109,6 @@ comptime_capture_scopes: std.AutoArrayHashMapUnmanaged(CaptureScope.Key, InternP
|
||||
/// This memory lives until the Module is destroyed.
|
||||
tmp_hack_arena: std.heap.ArenaAllocator,
|
||||
|
||||
/// This is currently only used for string literals.
|
||||
memoized_decls: std.AutoHashMapUnmanaged(InternPool.Index, Decl.Index) = .{},
|
||||
|
||||
/// We optimize memory usage for a compilation with no compile errors by storing the
|
||||
/// error messages and mapping outside of `Decl`.
|
||||
/// The ErrorMsg memory is owned by the decl, using Module's general purpose allocator.
|
||||
@ -2627,7 +2624,6 @@ pub fn deinit(mod: *Module) void {
|
||||
mod.global_assembly.deinit(gpa);
|
||||
mod.reference_table.deinit(gpa);
|
||||
|
||||
mod.memoized_decls.deinit(gpa);
|
||||
mod.intern_pool.deinit(gpa);
|
||||
mod.tmp_hack_arena.deinit();
|
||||
|
||||
@ -5814,6 +5810,7 @@ pub fn markReferencedDeclsAlive(mod: *Module, val: Value) Allocator.Error!void {
|
||||
.ptr => |ptr| {
|
||||
switch (ptr.addr) {
|
||||
.decl => |decl| try mod.markDeclIndexAlive(decl),
|
||||
.anon_decl => {},
|
||||
.mut_decl => |mut_decl| try mod.markDeclIndexAlive(mut_decl.decl),
|
||||
.int, .comptime_field => {},
|
||||
.eu_payload, .opt_payload => |parent| try mod.markReferencedDeclsAlive(parent.toValue()),
|
||||
|
||||
97
src/Sema.zig
97
src/Sema.zig
@ -1091,7 +1091,7 @@ fn analyzeBodyInner(
|
||||
.slice_sentinel => try sema.zirSliceSentinel(block, inst),
|
||||
.slice_start => try sema.zirSliceStart(block, inst),
|
||||
.slice_length => try sema.zirSliceLength(block, inst),
|
||||
.str => try sema.zirStr(block, inst),
|
||||
.str => try sema.zirStr(inst),
|
||||
.switch_block => try sema.zirSwitchBlock(block, inst, false),
|
||||
.switch_block_ref => try sema.zirSwitchBlock(block, inst, true),
|
||||
.type_info => try sema.zirTypeInfo(block, inst),
|
||||
@ -2185,7 +2185,7 @@ fn resolveMaybeUndefValIntable(sema: *Sema, inst: Air.Inst.Ref) CompileError!?Va
|
||||
if (val.ip_index == .none) return val;
|
||||
if (sema.mod.intern_pool.isVariable(val.toIntern())) return null;
|
||||
if (sema.mod.intern_pool.getBackingAddrTag(val.toIntern())) |addr| switch (addr) {
|
||||
.decl, .mut_decl, .comptime_field => return null,
|
||||
.decl, .anon_decl, .mut_decl, .comptime_field => return null,
|
||||
.int => {},
|
||||
.eu_payload, .opt_payload, .elem, .field => unreachable,
|
||||
};
|
||||
@ -5501,38 +5501,40 @@ fn zirStoreNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!v
|
||||
return sema.storePtr2(block, src, ptr, ptr_src, operand, operand_src, air_tag);
|
||||
}
|
||||
|
||||
fn zirStr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
fn zirStr(sema: *Sema, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
const bytes = sema.code.instructions.items(.data)[inst].str.get(sema.code);
|
||||
return sema.addStrLit(block, bytes);
|
||||
return sema.addStrLitNoAlias(bytes);
|
||||
}
|
||||
|
||||
fn addStrLit(sema: *Sema, block: *Block, bytes: []const u8) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const gpa = sema.gpa;
|
||||
// TODO: write something like getCoercedInts to avoid needing to dupe
|
||||
fn addStrLit(sema: *Sema, bytes: []const u8) CompileError!Air.Inst.Ref {
|
||||
const duped_bytes = try sema.arena.dupe(u8, bytes);
|
||||
const ty = try mod.arrayType(.{
|
||||
return addStrLitNoAlias(sema, duped_bytes);
|
||||
}
|
||||
|
||||
/// Safe to call when `bytes` does not point into `InternPool`.
|
||||
fn addStrLitNoAlias(sema: *Sema, bytes: []const u8) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const array_ty = try mod.arrayType(.{
|
||||
.len = bytes.len,
|
||||
.sentinel = .zero_u8,
|
||||
.child = .u8_type,
|
||||
});
|
||||
const val = try mod.intern(.{ .aggregate = .{
|
||||
.ty = ty.toIntern(),
|
||||
.storage = .{ .bytes = duped_bytes },
|
||||
.ty = array_ty.toIntern(),
|
||||
.storage = .{ .bytes = bytes },
|
||||
} });
|
||||
const gop = try mod.memoized_decls.getOrPut(gpa, val);
|
||||
if (!gop.found_existing) {
|
||||
const new_decl_index = try mod.createAnonymousDecl(block, .{
|
||||
.ty = ty,
|
||||
.val = val.toValue(),
|
||||
});
|
||||
gop.value_ptr.* = new_decl_index;
|
||||
try mod.finalizeAnonDecl(new_decl_index);
|
||||
}
|
||||
return sema.analyzeDeclRef(gop.value_ptr.*);
|
||||
const ptr_ty = try sema.ptrType(.{
|
||||
.child = array_ty.toIntern(),
|
||||
.flags = .{
|
||||
.alignment = .none,
|
||||
.is_const = true,
|
||||
.address_space = .generic,
|
||||
},
|
||||
});
|
||||
return Air.internedToRef((try mod.intern(.{ .ptr = .{
|
||||
.ty = ptr_ty.toIntern(),
|
||||
.addr = .{ .anon_decl = val },
|
||||
} })));
|
||||
}
|
||||
|
||||
fn zirInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@ -12907,7 +12909,7 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op
|
||||
try sema.zirSaveErrRetIndex(block, inst);
|
||||
continue;
|
||||
},
|
||||
.str => try sema.zirStr(block, inst),
|
||||
.str => try sema.zirStr(inst),
|
||||
.as_node => try sema.zirAsNode(block, inst),
|
||||
.field_val => try sema.zirFieldVal(block, inst),
|
||||
.@"unreachable" => {
|
||||
@ -20170,7 +20172,7 @@ fn zirErrorName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
|
||||
|
||||
if (try sema.resolveDefinedValue(block, operand_src, operand)) |val| {
|
||||
const err_name = sema.mod.intern_pool.indexToKey(val.toIntern()).err.name;
|
||||
return sema.addStrLit(block, sema.mod.intern_pool.stringToSlice(err_name));
|
||||
return sema.addStrLit(sema.mod.intern_pool.stringToSlice(err_name));
|
||||
}
|
||||
|
||||
// Similar to zirTagName, we have special AIR instruction for the error name in case an optimimzation pass
|
||||
@ -20288,7 +20290,7 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
|
||||
.EnumLiteral => {
|
||||
const val = try sema.resolveConstValue(block, .unneeded, operand, undefined);
|
||||
const tag_name = ip.indexToKey(val.toIntern()).enum_literal;
|
||||
return sema.addStrLit(block, ip.stringToSlice(tag_name));
|
||||
return sema.addStrLit(ip.stringToSlice(tag_name));
|
||||
},
|
||||
.Enum => operand_ty,
|
||||
.Union => operand_ty.unionTagType(mod) orelse {
|
||||
@ -20330,7 +20332,7 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
|
||||
};
|
||||
// TODO: write something like getCoercedInts to avoid needing to dupe
|
||||
const field_name = enum_ty.enumFieldName(field_index, mod);
|
||||
return sema.addStrLit(block, ip.stringToSlice(field_name));
|
||||
return sema.addStrLit(ip.stringToSlice(field_name));
|
||||
}
|
||||
try sema.requireRuntimeBlock(block, src, operand_src);
|
||||
if (block.wantSafety() and sema.mod.backendSupportsFeature(.is_named_enum_value)) {
|
||||
@ -29859,7 +29861,7 @@ fn beginComptimePtrMutation(
|
||||
const mod = sema.mod;
|
||||
const ptr = mod.intern_pool.indexToKey(ptr_val.toIntern()).ptr;
|
||||
switch (ptr.addr) {
|
||||
.decl, .int => unreachable, // isComptimeMutablePtr has been checked already
|
||||
.decl, .anon_decl, .int => unreachable, // isComptimeMutablePtr has been checked already
|
||||
.mut_decl => |mut_decl| {
|
||||
const decl = mod.declPtr(mut_decl.decl);
|
||||
return sema.beginComptimePtrMutationInner(block, src, decl.ty, &decl.val, ptr_elem_ty, mut_decl);
|
||||
@ -30455,9 +30457,10 @@ fn beginComptimePtrLoad(
|
||||
maybe_array_ty: ?Type,
|
||||
) ComptimePtrLoadError!ComptimePtrLoadKit {
|
||||
const mod = sema.mod;
|
||||
const ip = &mod.intern_pool;
|
||||
const target = mod.getTarget();
|
||||
|
||||
var deref: ComptimePtrLoadKit = switch (mod.intern_pool.indexToKey(ptr_val.toIntern())) {
|
||||
var deref: ComptimePtrLoadKit = switch (ip.indexToKey(ptr_val.toIntern())) {
|
||||
.ptr => |ptr| switch (ptr.addr) {
|
||||
.decl, .mut_decl => blk: {
|
||||
const decl_index = switch (ptr.addr) {
|
||||
@ -30478,9 +30481,21 @@ fn beginComptimePtrLoad(
|
||||
.ty_without_well_defined_layout = if (!layout_defined) decl.ty else null,
|
||||
};
|
||||
},
|
||||
.anon_decl => |decl_val| blk: {
|
||||
if (decl_val.toValue().getVariable(mod) != null) return error.RuntimeLoad;
|
||||
const decl_ty = ip.typeOf(decl_val).toType();
|
||||
const decl_tv: TypedValue = .{ .ty = decl_ty, .val = decl_val.toValue() };
|
||||
const layout_defined = decl_ty.hasWellDefinedLayout(mod);
|
||||
break :blk ComptimePtrLoadKit{
|
||||
.parent = if (layout_defined) .{ .tv = decl_tv, .byte_offset = 0 } else null,
|
||||
.pointee = decl_tv,
|
||||
.is_mutable = false,
|
||||
.ty_without_well_defined_layout = if (!layout_defined) decl_ty else null,
|
||||
};
|
||||
},
|
||||
.int => return error.RuntimeLoad,
|
||||
.eu_payload, .opt_payload => |container_ptr| blk: {
|
||||
const container_ty = mod.intern_pool.typeOf(container_ptr).toType().childType(mod);
|
||||
const container_ty = ip.typeOf(container_ptr).toType().childType(mod);
|
||||
const payload_ty = switch (ptr.addr) {
|
||||
.eu_payload => container_ty.errorUnionPayload(mod),
|
||||
.opt_payload => container_ty.optionalChild(mod),
|
||||
@ -30502,13 +30517,13 @@ fn beginComptimePtrLoad(
|
||||
const payload_val = switch (tv.val.ip_index) {
|
||||
.none => tv.val.cast(Value.Payload.SubValue).?.data,
|
||||
.null_value => return sema.fail(block, src, "attempt to use null value", .{}),
|
||||
else => switch (mod.intern_pool.indexToKey(tv.val.toIntern())) {
|
||||
else => switch (ip.indexToKey(tv.val.toIntern())) {
|
||||
.error_union => |error_union| switch (error_union.val) {
|
||||
.err_name => |err_name| return sema.fail(
|
||||
block,
|
||||
src,
|
||||
"attempt to unwrap error: {}",
|
||||
.{err_name.fmt(&mod.intern_pool)},
|
||||
.{err_name.fmt(ip)},
|
||||
),
|
||||
.payload => |payload| payload,
|
||||
},
|
||||
@ -30527,7 +30542,7 @@ fn beginComptimePtrLoad(
|
||||
break :blk deref;
|
||||
},
|
||||
.comptime_field => |comptime_field| blk: {
|
||||
const field_ty = mod.intern_pool.typeOf(comptime_field).toType();
|
||||
const field_ty = ip.typeOf(comptime_field).toType();
|
||||
break :blk ComptimePtrLoadKit{
|
||||
.parent = null,
|
||||
.pointee = .{ .ty = field_ty, .val = comptime_field.toValue() },
|
||||
@ -30536,15 +30551,15 @@ fn beginComptimePtrLoad(
|
||||
};
|
||||
},
|
||||
.elem => |elem_ptr| blk: {
|
||||
const elem_ty = mod.intern_pool.typeOf(elem_ptr.base).toType().elemType2(mod);
|
||||
const elem_ty = ip.typeOf(elem_ptr.base).toType().elemType2(mod);
|
||||
var deref = try sema.beginComptimePtrLoad(block, src, elem_ptr.base.toValue(), null);
|
||||
|
||||
// This code assumes that elem_ptrs have been "flattened" in order for direct dereference
|
||||
// to succeed, meaning that elem ptrs of the same elem_ty are coalesced. Here we check that
|
||||
// our parent is not an elem_ptr with the same elem_ty, since that would be "unflattened"
|
||||
switch (mod.intern_pool.indexToKey(elem_ptr.base)) {
|
||||
switch (ip.indexToKey(elem_ptr.base)) {
|
||||
.ptr => |base_ptr| switch (base_ptr.addr) {
|
||||
.elem => |base_elem| assert(!mod.intern_pool.typeOf(base_elem.base).toType().elemType2(mod).eql(elem_ty, mod)),
|
||||
.elem => |base_elem| assert(!ip.typeOf(base_elem.base).toType().elemType2(mod).eql(elem_ty, mod)),
|
||||
else => {},
|
||||
},
|
||||
else => {},
|
||||
@ -30616,7 +30631,7 @@ fn beginComptimePtrLoad(
|
||||
},
|
||||
.field => |field_ptr| blk: {
|
||||
const field_index: u32 = @intCast(field_ptr.index);
|
||||
const container_ty = mod.intern_pool.typeOf(field_ptr.base).toType().childType(mod);
|
||||
const container_ty = ip.typeOf(field_ptr.base).toType().childType(mod);
|
||||
var deref = try sema.beginComptimePtrLoad(block, src, field_ptr.base.toValue(), container_ty);
|
||||
|
||||
if (container_ty.hasWellDefinedLayout(mod)) {
|
||||
@ -30655,7 +30670,7 @@ fn beginComptimePtrLoad(
|
||||
},
|
||||
Value.slice_len_index => TypedValue{
|
||||
.ty = Type.usize,
|
||||
.val = mod.intern_pool.indexToKey(try tv.val.intern(tv.ty, mod)).ptr.len.toValue(),
|
||||
.val = ip.indexToKey(try tv.val.intern(tv.ty, mod)).ptr.len.toValue(),
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
@ -34529,7 +34544,7 @@ fn resolveLazyValue(sema: *Sema, val: Value) CompileError!Value {
|
||||
else => (try sema.resolveLazyValue(ptr.len.toValue())).toIntern(),
|
||||
};
|
||||
switch (ptr.addr) {
|
||||
.decl, .mut_decl => return if (resolved_len == ptr.len)
|
||||
.decl, .mut_decl, .anon_decl => return if (resolved_len == ptr.len)
|
||||
val
|
||||
else
|
||||
(try mod.intern(.{ .ptr = .{
|
||||
@ -34537,6 +34552,7 @@ fn resolveLazyValue(sema: *Sema, val: Value) CompileError!Value {
|
||||
.addr = switch (ptr.addr) {
|
||||
.decl => |decl| .{ .decl = decl },
|
||||
.mut_decl => |mut_decl| .{ .mut_decl = mut_decl },
|
||||
.anon_decl => |anon_decl| .{ .anon_decl = anon_decl },
|
||||
else => unreachable,
|
||||
},
|
||||
.len = resolved_len,
|
||||
@ -36568,6 +36584,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
|
||||
.runtime_value,
|
||||
.simple_value,
|
||||
.ptr_decl,
|
||||
.ptr_anon_decl,
|
||||
.ptr_mut_decl,
|
||||
.ptr_comptime_field,
|
||||
.ptr_int,
|
||||
|
||||
@ -321,6 +321,15 @@ pub fn print(
|
||||
.val = decl.val,
|
||||
}, writer, level - 1, mod);
|
||||
},
|
||||
.anon_decl => |decl_val| {
|
||||
if (level == 0) return writer.print("(anon decl '{d}')", .{
|
||||
@intFromEnum(decl_val),
|
||||
});
|
||||
return print(.{
|
||||
.ty = ip.typeOf(decl_val).toType(),
|
||||
.val = decl_val.toValue(),
|
||||
}, writer, level - 1, mod);
|
||||
},
|
||||
.mut_decl => |mut_decl| {
|
||||
const decl = mod.declPtr(mut_decl.decl);
|
||||
if (level == 0) return writer.print("(mut decl '{}')", .{decl.name.fmt(ip)});
|
||||
|
||||
@ -3075,6 +3075,7 @@ fn lowerParentPtr(func: *CodeGen, ptr_val: Value, offset: u32) InnerError!WValue
|
||||
.decl => |decl_index| {
|
||||
return func.lowerParentPtrDecl(ptr_val, decl_index, offset);
|
||||
},
|
||||
.anon_decl => |ad| return func.lowerAnonDeclRef(ad, offset),
|
||||
.mut_decl => |mut_decl| {
|
||||
const decl_index = mut_decl.decl;
|
||||
return func.lowerParentPtrDecl(ptr_val, decl_index, offset);
|
||||
@ -3138,6 +3139,32 @@ fn lowerParentPtrDecl(func: *CodeGen, ptr_val: Value, decl_index: Module.Decl.In
|
||||
return func.lowerDeclRefValue(.{ .ty = ptr_ty, .val = ptr_val }, decl_index, offset);
|
||||
}
|
||||
|
||||
fn lowerAnonDeclRef(func: *CodeGen, anon_decl: InternPool.Index, offset: u32) InnerError!WValue {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty = mod.intern_pool.typeOf(anon_decl).toType();
|
||||
|
||||
const is_fn_body = ty.zigTypeTag(mod) == .Fn;
|
||||
if (!is_fn_body and !ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
return WValue{ .imm32 = 0xaaaaaaaa };
|
||||
}
|
||||
|
||||
const res = try func.bin_file.lowerAnonDecl(anon_decl, func.decl.srcLoc(mod));
|
||||
switch (res) {
|
||||
.ok => {},
|
||||
.fail => |em| {
|
||||
func.err_msg = em;
|
||||
return error.CodegenFail;
|
||||
},
|
||||
}
|
||||
const target_atom_index = func.bin_file.anon_decls.get(anon_decl).?;
|
||||
const target_sym_index = func.bin_file.getAtom(target_atom_index).getSymbolIndex().?;
|
||||
if (is_fn_body) {
|
||||
return WValue{ .function_index = target_sym_index };
|
||||
} else if (offset == 0) {
|
||||
return WValue{ .memory = target_sym_index };
|
||||
} else return WValue{ .memory_offset = .{ .pointer = target_sym_index, .offset = offset } };
|
||||
}
|
||||
|
||||
fn lowerDeclRefValue(func: *CodeGen, tv: TypedValue, decl_index: Module.Decl.Index, offset: u32) InnerError!WValue {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
if (tv.ty.isSlice(mod)) {
|
||||
@ -3305,6 +3332,7 @@ fn lowerConstant(func: *CodeGen, arg_val: Value, ty: Type) InnerError!WValue {
|
||||
.mut_decl => |mut_decl| return func.lowerDeclRefValue(.{ .ty = ty, .val = val }, mut_decl.decl, 0),
|
||||
.int => |int| return func.lowerConstant(int.toValue(), ip.typeOf(int).toType()),
|
||||
.opt_payload, .elem, .field => return func.lowerParentPtr(val, 0),
|
||||
.anon_decl => |ad| return func.lowerAnonDeclRef(ad, 0),
|
||||
else => return func.fail("Wasm TODO: lowerConstant for other const addr tag {}", .{ptr.addr}),
|
||||
},
|
||||
.opt => if (ty.optionalReprIsPayload(mod)) {
|
||||
|
||||
@ -643,18 +643,9 @@ fn lowerParentPtr(
|
||||
const ptr = mod.intern_pool.indexToKey(parent_ptr).ptr;
|
||||
assert(ptr.len == .none);
|
||||
return switch (ptr.addr) {
|
||||
.decl, .mut_decl => try lowerDeclRef(
|
||||
bin_file,
|
||||
src_loc,
|
||||
switch (ptr.addr) {
|
||||
.decl => |decl| decl,
|
||||
.mut_decl => |mut_decl| mut_decl.decl,
|
||||
else => unreachable,
|
||||
},
|
||||
code,
|
||||
debug_output,
|
||||
reloc_info,
|
||||
),
|
||||
.decl => |decl| try lowerDeclRef(bin_file, src_loc, decl, code, debug_output, reloc_info),
|
||||
.mut_decl => |md| try lowerDeclRef(bin_file, src_loc, md.decl, code, debug_output, reloc_info),
|
||||
.anon_decl => |ad| try lowerAnonDeclRef(bin_file, src_loc, ad, code, debug_output, reloc_info),
|
||||
.int => |int| try generateSymbol(bin_file, src_loc, .{
|
||||
.ty = Type.usize,
|
||||
.val = int.toValue(),
|
||||
@ -740,6 +731,48 @@ const RelocInfo = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn lowerAnonDeclRef(
|
||||
bin_file: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
decl_val: InternPool.Index,
|
||||
code: *std.ArrayList(u8),
|
||||
debug_output: DebugInfoOutput,
|
||||
reloc_info: RelocInfo,
|
||||
) CodeGenError!Result {
|
||||
_ = debug_output;
|
||||
const target = bin_file.options.target;
|
||||
const mod = bin_file.options.module.?;
|
||||
|
||||
const ptr_width_bytes = @divExact(target.ptrBitWidth(), 8);
|
||||
const decl_ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
const is_fn_body = decl_ty.zigTypeTag(mod) == .Fn;
|
||||
if (!is_fn_body and !decl_ty.hasRuntimeBits(mod)) {
|
||||
try code.appendNTimes(0xaa, ptr_width_bytes);
|
||||
return Result.ok;
|
||||
}
|
||||
|
||||
const res = try bin_file.lowerAnonDecl(decl_val, src_loc);
|
||||
switch (res) {
|
||||
.ok => {},
|
||||
.fail => |em| return .{ .fail = em },
|
||||
}
|
||||
|
||||
const vaddr = try bin_file.getAnonDeclVAddr(decl_val, .{
|
||||
.parent_atom_index = reloc_info.parent_atom_index,
|
||||
.offset = code.items.len,
|
||||
.addend = reloc_info.addend orelse 0,
|
||||
});
|
||||
const endian = target.cpu.arch.endian();
|
||||
switch (ptr_width_bytes) {
|
||||
2 => mem.writeInt(u16, try code.addManyAsArray(2), @intCast(vaddr), endian),
|
||||
4 => mem.writeInt(u32, try code.addManyAsArray(4), @intCast(vaddr), endian),
|
||||
8 => mem.writeInt(u64, try code.addManyAsArray(8), vaddr, endian),
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
return Result.ok;
|
||||
}
|
||||
|
||||
fn lowerDeclRef(
|
||||
bin_file: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
|
||||
@ -528,6 +528,9 @@ pub const DeclGen = struct {
|
||||
fwd_decl: std.ArrayList(u8),
|
||||
error_msg: ?*Module.ErrorMsg,
|
||||
ctypes: CType.Store,
|
||||
/// Keeps track of anonymous decls that need to be rendered before this
|
||||
/// (named) Decl in the output C code.
|
||||
anon_decl_deps: std.AutoArrayHashMapUnmanaged(InternPool.Index, C.DeclBlock),
|
||||
|
||||
fn fail(dg: *DeclGen, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } {
|
||||
@setCold(true);
|
||||
@ -540,6 +543,58 @@ pub const DeclGen = struct {
|
||||
return error.AnalysisFail;
|
||||
}
|
||||
|
||||
fn renderAnonDeclValue(
|
||||
dg: *DeclGen,
|
||||
writer: anytype,
|
||||
ty: Type,
|
||||
ptr_val: Value,
|
||||
decl_val: InternPool.Index,
|
||||
location: ValueRenderLocation,
|
||||
) error{ OutOfMemory, AnalysisFail }!void {
|
||||
const mod = dg.module;
|
||||
const ip = &mod.intern_pool;
|
||||
const decl_ty = ip.typeOf(decl_val).toType();
|
||||
|
||||
// Render an undefined pointer if we have a pointer to a zero-bit or comptime type.
|
||||
if (ty.isPtrAtRuntime(mod) and !decl_ty.isFnOrHasRuntimeBits(mod)) {
|
||||
return dg.writeCValue(writer, .{ .undef = ty });
|
||||
}
|
||||
|
||||
// Chase function values in order to be able to reference the original function.
|
||||
if (decl_val.toValue().getFunction(mod)) |func| {
|
||||
_ = func;
|
||||
_ = ptr_val;
|
||||
_ = location;
|
||||
@panic("TODO");
|
||||
}
|
||||
if (decl_val.toValue().getExternFunc(mod)) |extern_func| {
|
||||
_ = extern_func;
|
||||
_ = ptr_val;
|
||||
_ = location;
|
||||
@panic("TODO");
|
||||
}
|
||||
|
||||
assert(decl_val.toValue().getVariable(mod) == null);
|
||||
|
||||
// We shouldn't cast C function pointers as this is UB (when you call
|
||||
// them). The analysis until now should ensure that the C function
|
||||
// pointers are compatible. If they are not, then there is a bug
|
||||
// somewhere and we should let the C compiler tell us about it.
|
||||
const need_typecast = if (ty.castPtrToFn(mod)) |_| false else !ty.childType(mod).eql(decl_ty, mod);
|
||||
if (need_typecast) {
|
||||
try writer.writeAll("((");
|
||||
try dg.renderType(writer, ty);
|
||||
try writer.writeByte(')');
|
||||
}
|
||||
try writer.writeByte('&');
|
||||
try renderAnonDeclName(writer, decl_val);
|
||||
if (need_typecast) try writer.writeByte(')');
|
||||
|
||||
// Indicate that the anon decl should be rendered to the output so that
|
||||
// our reference above is not undefined.
|
||||
_ = try dg.anon_decl_deps.getOrPut(dg.gpa, decl_val);
|
||||
}
|
||||
|
||||
fn renderDeclValue(
|
||||
dg: *DeclGen,
|
||||
writer: anytype,
|
||||
@ -593,17 +648,9 @@ pub const DeclGen = struct {
|
||||
const ptr_cty = try dg.typeToIndex(ptr_ty, .complete);
|
||||
const ptr = mod.intern_pool.indexToKey(ptr_val).ptr;
|
||||
switch (ptr.addr) {
|
||||
.decl, .mut_decl => try dg.renderDeclValue(
|
||||
writer,
|
||||
ptr_ty,
|
||||
ptr_val.toValue(),
|
||||
switch (ptr.addr) {
|
||||
.decl => |decl| decl,
|
||||
.mut_decl => |mut_decl| mut_decl.decl,
|
||||
else => unreachable,
|
||||
},
|
||||
location,
|
||||
),
|
||||
.decl => |d| try dg.renderDeclValue(writer, ptr_ty, ptr_val.toValue(), d, location),
|
||||
.mut_decl => |md| try dg.renderDeclValue(writer, ptr_ty, ptr_val.toValue(), md.decl, location),
|
||||
.anon_decl => |decl_val| try dg.renderAnonDeclValue(writer, ptr_ty, ptr_val.toValue(), decl_val, location),
|
||||
.int => |int| {
|
||||
try writer.writeByte('(');
|
||||
try dg.renderCType(writer, ptr_cty);
|
||||
@ -1144,17 +1191,9 @@ pub const DeclGen = struct {
|
||||
else => val.slicePtr(mod),
|
||||
};
|
||||
switch (ptr.addr) {
|
||||
.decl, .mut_decl => try dg.renderDeclValue(
|
||||
writer,
|
||||
ptr_ty,
|
||||
ptr_val,
|
||||
switch (ptr.addr) {
|
||||
.decl => |decl| decl,
|
||||
.mut_decl => |mut_decl| mut_decl.decl,
|
||||
else => unreachable,
|
||||
},
|
||||
ptr_location,
|
||||
),
|
||||
.decl => |d| try dg.renderDeclValue(writer, ptr_ty, ptr_val, d, ptr_location),
|
||||
.mut_decl => |md| try dg.renderDeclValue(writer, ptr_ty, ptr_val, md.decl, ptr_location),
|
||||
.anon_decl => |decl_val| try dg.renderAnonDeclValue(writer, ptr_ty, ptr_val, decl_val, ptr_location),
|
||||
.int => |int| {
|
||||
try writer.writeAll("((");
|
||||
try dg.renderType(writer, ptr_ty);
|
||||
@ -1768,7 +1807,7 @@ pub const DeclGen = struct {
|
||||
.none => unreachable,
|
||||
.local, .new_local => |i| return w.print("t{d}", .{i}),
|
||||
.local_ref => |i| return w.print("&t{d}", .{i}),
|
||||
.constant => unreachable,
|
||||
.constant => |val| return renderAnonDeclName(w, val),
|
||||
.arg => |i| return w.print("a{d}", .{i}),
|
||||
.arg_array => |i| return dg.writeCValueMember(w, .{ .arg = i }, .{ .identifier = "array" }),
|
||||
.field => |i| return w.print("f{d}", .{i}),
|
||||
@ -1886,6 +1925,10 @@ pub const DeclGen = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn renderAnonDeclName(writer: anytype, anon_decl_val: InternPool.Index) !void {
|
||||
return writer.print("__anon_{d}", .{@intFromEnum(anon_decl_val)});
|
||||
}
|
||||
|
||||
fn renderTypeForBuiltinFnName(dg: *DeclGen, writer: anytype, ty: Type) !void {
|
||||
try dg.renderCTypeForBuiltinFnName(writer, try dg.typeToCType(ty, .complete));
|
||||
}
|
||||
@ -2723,7 +2766,6 @@ pub fn genDecl(o: *Object) !void {
|
||||
|
||||
const mod = o.dg.module;
|
||||
const decl_index = o.dg.decl_index.unwrap().?;
|
||||
const decl_c_value = .{ .decl = decl_index };
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const tv: TypedValue = .{ .ty = decl.ty, .val = (try decl.internValue(mod)).toValue() };
|
||||
|
||||
@ -2747,6 +2789,7 @@ pub fn genDecl(o: *Object) !void {
|
||||
if (variable.is_threadlocal) try w.writeAll("zig_threadlocal ");
|
||||
if (mod.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |s|
|
||||
try w.print("zig_linksection(\"{s}\", ", .{s});
|
||||
const decl_c_value = .{ .decl = decl_index };
|
||||
try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, .{}, decl.alignment, .complete);
|
||||
if (decl.@"linksection" != .none) try w.writeAll(", read, write)");
|
||||
try w.writeAll(" = ");
|
||||
@ -2755,24 +2798,37 @@ pub fn genDecl(o: *Object) !void {
|
||||
try o.indent_writer.insertNewline();
|
||||
} else {
|
||||
const is_global = o.dg.module.decl_exports.contains(decl_index);
|
||||
const fwd_decl_writer = o.dg.fwd_decl.writer();
|
||||
|
||||
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
|
||||
try o.dg.renderTypeAndName(fwd_decl_writer, tv.ty, decl_c_value, Const, decl.alignment, .complete);
|
||||
try fwd_decl_writer.writeAll(";\n");
|
||||
|
||||
const w = o.writer();
|
||||
if (!is_global) try w.writeAll("static ");
|
||||
if (mod.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |s|
|
||||
try w.print("zig_linksection(\"{s}\", ", .{s});
|
||||
try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, Const, decl.alignment, .complete);
|
||||
if (decl.@"linksection" != .none) try w.writeAll(", read)");
|
||||
try w.writeAll(" = ");
|
||||
try o.dg.renderValue(w, tv.ty, tv.val, .StaticInitializer);
|
||||
try w.writeAll(";\n");
|
||||
const decl_c_value = .{ .decl = decl_index };
|
||||
return genDeclValue(o, tv, is_global, decl_c_value, decl.alignment, decl.@"linksection");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn genDeclValue(
|
||||
o: *Object,
|
||||
tv: TypedValue,
|
||||
is_global: bool,
|
||||
decl_c_value: CValue,
|
||||
alignment: Alignment,
|
||||
link_section: InternPool.OptionalNullTerminatedString,
|
||||
) !void {
|
||||
const fwd_decl_writer = o.dg.fwd_decl.writer();
|
||||
|
||||
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
|
||||
try o.dg.renderTypeAndName(fwd_decl_writer, tv.ty, decl_c_value, Const, alignment, .complete);
|
||||
try fwd_decl_writer.writeAll(";\n");
|
||||
|
||||
const mod = o.dg.module;
|
||||
const w = o.writer();
|
||||
if (!is_global) try w.writeAll("static ");
|
||||
if (mod.intern_pool.stringToSliceUnwrap(link_section)) |s|
|
||||
try w.print("zig_linksection(\"{s}\", ", .{s});
|
||||
try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, Const, alignment, .complete);
|
||||
if (link_section != .none) try w.writeAll(", read)");
|
||||
try w.writeAll(" = ");
|
||||
try o.dg.renderValue(w, tv.ty, tv.val, .StaticInitializer);
|
||||
try w.writeAll(";\n");
|
||||
}
|
||||
|
||||
pub fn genHeader(dg: *DeclGen) error{ AnalysisFail, OutOfMemory }!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
@ -810,6 +810,8 @@ pub const Object = struct {
|
||||
/// * it works for functions not all globals.
|
||||
/// Therefore, this table keeps track of the mapping.
|
||||
decl_map: std.AutoHashMapUnmanaged(Module.Decl.Index, Builder.Global.Index),
|
||||
/// Same deal as `decl_map` but for anonymous declarations, which are always global constants.
|
||||
anon_decl_map: std.AutoHashMapUnmanaged(InternPool.Index, Builder.Global.Index),
|
||||
/// Serves the same purpose as `decl_map` but only used for the `is_named_enum_value` instruction.
|
||||
named_enum_map: std.AutoHashMapUnmanaged(Module.Decl.Index, Builder.Function.Index),
|
||||
/// Maps Zig types to LLVM types. The table memory is backed by the GPA of
|
||||
@ -993,6 +995,7 @@ pub const Object = struct {
|
||||
.target_data = target_data,
|
||||
.target = options.target,
|
||||
.decl_map = .{},
|
||||
.anon_decl_map = .{},
|
||||
.named_enum_map = .{},
|
||||
.type_map = .{},
|
||||
.di_type_map = .{},
|
||||
@ -1011,6 +1014,7 @@ pub const Object = struct {
|
||||
self.target_machine.dispose();
|
||||
}
|
||||
self.decl_map.deinit(gpa);
|
||||
self.anon_decl_map.deinit(gpa);
|
||||
self.named_enum_map.deinit(gpa);
|
||||
self.type_map.deinit(gpa);
|
||||
self.extern_collisions.deinit(gpa);
|
||||
@ -3038,6 +3042,31 @@ pub const Object = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolveGlobalAnonDecl(
|
||||
o: *Object,
|
||||
decl_val: InternPool.Index,
|
||||
llvm_addr_space: Builder.AddrSpace,
|
||||
) Error!Builder.Variable.Index {
|
||||
const gop = try o.anon_decl_map.getOrPut(o.gpa, decl_val);
|
||||
if (gop.found_existing) return gop.value_ptr.ptr(&o.builder).kind.variable;
|
||||
errdefer assert(o.anon_decl_map.remove(decl_val));
|
||||
|
||||
const mod = o.module;
|
||||
const decl_ty = mod.intern_pool.typeOf(decl_val);
|
||||
|
||||
const variable_index = try o.builder.addVariable(
|
||||
try o.builder.fmt("__anon_{d}", .{@intFromEnum(decl_val)}),
|
||||
try o.lowerType(decl_ty.toType()),
|
||||
llvm_addr_space,
|
||||
);
|
||||
gop.value_ptr.* = variable_index.ptrConst(&o.builder).global;
|
||||
|
||||
try variable_index.setInitializer(try o.lowerValue(decl_val), &o.builder);
|
||||
variable_index.setLinkage(.internal, &o.builder);
|
||||
variable_index.setUnnamedAddr(.unnamed_addr, &o.builder);
|
||||
return variable_index;
|
||||
}
|
||||
|
||||
fn resolveGlobalDecl(
|
||||
o: *Object,
|
||||
decl_index: Module.Decl.Index,
|
||||
@ -3764,6 +3793,7 @@ pub const Object = struct {
|
||||
const ptr_val = switch (ptr.addr) {
|
||||
.decl => |decl| try o.lowerDeclRefValue(ptr_ty, decl),
|
||||
.mut_decl => |mut_decl| try o.lowerDeclRefValue(ptr_ty, mut_decl.decl),
|
||||
.anon_decl => |anon_decl| try o.lowerAnonDeclRef(ptr_ty, anon_decl),
|
||||
.int => |int| try o.lowerIntAsPtr(int),
|
||||
.eu_payload,
|
||||
.opt_payload,
|
||||
@ -4216,10 +4246,12 @@ pub const Object = struct {
|
||||
return o.builder.bigIntConst(try o.builder.intType(ty.intInfo(mod).bits), bigint);
|
||||
}
|
||||
|
||||
const ParentPtr = struct {
|
||||
ty: Type,
|
||||
llvm_ptr: Builder.Value,
|
||||
};
|
||||
fn lowerParentPtrAnonDecl(o: *Object, decl_val: InternPool.Index) Error!Builder.Constant {
|
||||
const mod = o.module;
|
||||
const decl_ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
const ptr_ty = try mod.singleMutPtrType(decl_ty);
|
||||
return o.lowerAnonDeclRef(ptr_ty, decl_val);
|
||||
}
|
||||
|
||||
fn lowerParentPtrDecl(o: *Object, decl_index: Module.Decl.Index) Allocator.Error!Builder.Constant {
|
||||
const mod = o.module;
|
||||
@ -4229,13 +4261,14 @@ pub const Object = struct {
|
||||
return o.lowerDeclRefValue(ptr_ty, decl_index);
|
||||
}
|
||||
|
||||
fn lowerParentPtr(o: *Object, ptr_val: Value) Allocator.Error!Builder.Constant {
|
||||
fn lowerParentPtr(o: *Object, ptr_val: Value) Error!Builder.Constant {
|
||||
const mod = o.module;
|
||||
const ip = &mod.intern_pool;
|
||||
const ptr = ip.indexToKey(ptr_val.toIntern()).ptr;
|
||||
return switch (ptr.addr) {
|
||||
.decl => |decl| o.lowerParentPtrDecl(decl),
|
||||
.mut_decl => |mut_decl| o.lowerParentPtrDecl(mut_decl.decl),
|
||||
.decl => |decl| try o.lowerParentPtrDecl(decl),
|
||||
.mut_decl => |mut_decl| try o.lowerParentPtrDecl(mut_decl.decl),
|
||||
.anon_decl => |anon_decl| try o.lowerParentPtrAnonDecl(anon_decl),
|
||||
.int => |int| try o.lowerIntAsPtr(int),
|
||||
.eu_payload => |eu_ptr| {
|
||||
const parent_ptr = try o.lowerParentPtr(eu_ptr.toValue());
|
||||
@ -4349,6 +4382,49 @@ pub const Object = struct {
|
||||
};
|
||||
}
|
||||
|
||||
/// This logic is very similar to `lowerDeclRefValue` but for anonymous declarations.
|
||||
/// Maybe the logic could be unified.
|
||||
fn lowerAnonDeclRef(
|
||||
o: *Object,
|
||||
ptr_ty: Type,
|
||||
decl_val: InternPool.Index,
|
||||
) Error!Builder.Constant {
|
||||
const mod = o.module;
|
||||
const ip = &mod.intern_pool;
|
||||
const decl_ty = ip.typeOf(decl_val).toType();
|
||||
const target = mod.getTarget();
|
||||
|
||||
if (decl_val.toValue().getFunction(mod)) |func| {
|
||||
_ = func;
|
||||
@panic("TODO");
|
||||
} else if (decl_val.toValue().getExternFunc(mod)) |func| {
|
||||
_ = func;
|
||||
@panic("TODO");
|
||||
}
|
||||
|
||||
const is_fn_body = decl_ty.zigTypeTag(mod) == .Fn;
|
||||
if ((!is_fn_body and !decl_ty.hasRuntimeBits(mod)) or
|
||||
(is_fn_body and mod.typeToFunc(decl_ty).?.is_generic)) return o.lowerPtrToVoid(ptr_ty);
|
||||
|
||||
if (is_fn_body)
|
||||
@panic("TODO");
|
||||
|
||||
const addr_space = target_util.defaultAddressSpace(target, .global_constant);
|
||||
const llvm_addr_space = toLlvmAddressSpace(addr_space, target);
|
||||
const llvm_global = (try o.resolveGlobalAnonDecl(decl_val, llvm_addr_space)).ptrConst(&o.builder).global;
|
||||
|
||||
const llvm_val = try o.builder.convConst(
|
||||
.unneeded,
|
||||
llvm_global.toConst(),
|
||||
try o.builder.ptrType(llvm_addr_space),
|
||||
);
|
||||
|
||||
return o.builder.convConst(if (ptr_ty.isAbiInt(mod)) switch (ptr_ty.intInfo(mod).signedness) {
|
||||
.signed => .signed,
|
||||
.unsigned => .unsigned,
|
||||
} else .unneeded, llvm_val, try o.lowerType(ptr_ty));
|
||||
}
|
||||
|
||||
fn lowerDeclRefValue(o: *Object, ty: Type, decl_index: Module.Decl.Index) Allocator.Error!Builder.Constant {
|
||||
const mod = o.module;
|
||||
|
||||
|
||||
@ -818,6 +818,7 @@ pub const DeclGen = struct {
|
||||
const mod = self.module;
|
||||
switch (mod.intern_pool.indexToKey(ptr_val.toIntern()).ptr.addr) {
|
||||
.decl => |decl| return try self.constructDeclRef(ptr_ty, decl),
|
||||
.anon_decl => @panic("TODO"),
|
||||
.mut_decl => |decl_mut| return try self.constructDeclRef(ptr_ty, decl_mut.decl),
|
||||
.int => |int| {
|
||||
const ptr_id = self.spv.allocId();
|
||||
|
||||
30
src/link.zig
30
src/link.zig
@ -937,6 +937,36 @@ pub const File = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub const LowerResult = @import("codegen.zig").Result;
|
||||
|
||||
pub fn lowerAnonDecl(base: *File, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !LowerResult {
|
||||
if (build_options.only_c) unreachable;
|
||||
switch (base.tag) {
|
||||
.coff => return @fieldParentPtr(Coff, "base", base).lowerAnonDecl(decl_val, src_loc),
|
||||
.elf => return @fieldParentPtr(Elf, "base", base).lowerAnonDecl(decl_val, src_loc),
|
||||
.macho => return @fieldParentPtr(MachO, "base", base).lowerAnonDecl(decl_val, src_loc),
|
||||
.plan9 => return @fieldParentPtr(Plan9, "base", base).lowerAnonDecl(decl_val, src_loc),
|
||||
.c => unreachable,
|
||||
.wasm => return @fieldParentPtr(Wasm, "base", base).lowerAnonDecl(decl_val, src_loc),
|
||||
.spirv => unreachable,
|
||||
.nvptx => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getAnonDeclVAddr(base: *File, decl_val: InternPool.Index, reloc_info: RelocInfo) !u64 {
|
||||
if (build_options.only_c) unreachable;
|
||||
switch (base.tag) {
|
||||
.coff => return @fieldParentPtr(Coff, "base", base).getAnonDeclVAddr(decl_val, reloc_info),
|
||||
.elf => return @fieldParentPtr(Elf, "base", base).getAnonDeclVAddr(decl_val, reloc_info),
|
||||
.macho => return @fieldParentPtr(MachO, "base", base).getAnonDeclVAddr(decl_val, reloc_info),
|
||||
.plan9 => return @fieldParentPtr(Plan9, "base", base).getAnonDeclVAddr(decl_val, reloc_info),
|
||||
.c => unreachable,
|
||||
.wasm => return @fieldParentPtr(Wasm, "base", base).getAnonDeclVAddr(decl_val, reloc_info),
|
||||
.spirv => unreachable,
|
||||
.nvptx => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// This function is called by the frontend before flush(). It communicates that
|
||||
/// `options.bin_file.emit` directory needs to be renamed from
|
||||
/// `[zig-cache]/tmp/[random]` to `[zig-cache]/o/[digest]`.
|
||||
|
||||
145
src/link/C.zig
145
src/link/C.zig
@ -27,6 +27,9 @@ decl_table: std.AutoArrayHashMapUnmanaged(Module.Decl.Index, DeclBlock) = .{},
|
||||
/// While in progress, a separate buffer is used, and then when finished, the
|
||||
/// buffer is copied into this one.
|
||||
string_bytes: std.ArrayListUnmanaged(u8) = .{},
|
||||
/// Tracks all the anonymous decls that are used by all the decls so they can
|
||||
/// be rendered during flush().
|
||||
anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, DeclBlock) = .{},
|
||||
|
||||
/// Optimization, `updateDecl` reuses this buffer rather than creating a new
|
||||
/// one with every call.
|
||||
@ -42,7 +45,7 @@ lazy_fwd_decl_buf: std.ArrayListUnmanaged(u8) = .{},
|
||||
lazy_code_buf: std.ArrayListUnmanaged(u8) = .{},
|
||||
|
||||
/// A reference into `string_bytes`.
|
||||
const String = struct {
|
||||
const String = extern struct {
|
||||
start: u32,
|
||||
len: u32,
|
||||
|
||||
@ -53,7 +56,7 @@ const String = struct {
|
||||
};
|
||||
|
||||
/// Per-declaration data.
|
||||
const DeclBlock = struct {
|
||||
pub const DeclBlock = struct {
|
||||
code: String = String.empty,
|
||||
fwd_decl: String = String.empty,
|
||||
/// Each `Decl` stores a set of used `CType`s. In `flush()`, we iterate
|
||||
@ -98,7 +101,7 @@ pub fn openPath(gpa: Allocator, sub_path: []const u8, options: link.Options) !*C
|
||||
var c_file = try gpa.create(C);
|
||||
errdefer gpa.destroy(c_file);
|
||||
|
||||
c_file.* = C{
|
||||
c_file.* = .{
|
||||
.base = .{
|
||||
.tag = .c,
|
||||
.options = options,
|
||||
@ -118,6 +121,11 @@ pub fn deinit(self: *C) void {
|
||||
}
|
||||
self.decl_table.deinit(gpa);
|
||||
|
||||
for (self.anon_decls.values()) |*db| {
|
||||
db.deinit(gpa);
|
||||
}
|
||||
self.anon_decls.deinit(gpa);
|
||||
|
||||
self.string_bytes.deinit(gpa);
|
||||
self.fwd_decl_buf.deinit(gpa);
|
||||
self.code_buf.deinit(gpa);
|
||||
@ -131,10 +139,13 @@ pub fn freeDecl(self: *C, decl_index: Module.Decl.Index) void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn updateFunc(self: *C, module: *Module, func_index: InternPool.Index, air: Air, liveness: Liveness) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
pub fn updateFunc(
|
||||
self: *C,
|
||||
module: *Module,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
) !void {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
const func = module.funcInfo(func_index);
|
||||
@ -167,6 +178,7 @@ pub fn updateFunc(self: *C, module: *Module, func_index: InternPool.Index, air:
|
||||
.is_naked_fn = decl.ty.fnCallingConvention(module) == .Naked,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = ctypes.*,
|
||||
.anon_decl_deps = self.anon_decls,
|
||||
},
|
||||
.code = code.toManaged(gpa),
|
||||
.indent_writer = undefined, // set later so we can get a pointer to object.code
|
||||
@ -176,6 +188,7 @@ pub fn updateFunc(self: *C, module: *Module, func_index: InternPool.Index, air:
|
||||
|
||||
function.object.indent_writer = .{ .underlying_writer = function.object.code.writer() };
|
||||
defer {
|
||||
self.anon_decls = function.object.dg.anon_decl_deps;
|
||||
fwd_decl.* = function.object.dg.fwd_decl.moveToUnmanaged();
|
||||
code.* = function.object.code.moveToUnmanaged();
|
||||
function.deinit();
|
||||
@ -200,6 +213,62 @@ pub fn updateFunc(self: *C, module: *Module, func_index: InternPool.Index, air:
|
||||
gop.value_ptr.fwd_decl = try self.addString(function.object.dg.fwd_decl.items);
|
||||
}
|
||||
|
||||
fn updateAnonDecl(self: *C, module: *Module, i: usize) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const anon_decl = self.anon_decls.keys()[i];
|
||||
|
||||
const fwd_decl = &self.fwd_decl_buf;
|
||||
const code = &self.code_buf;
|
||||
fwd_decl.clearRetainingCapacity();
|
||||
code.clearRetainingCapacity();
|
||||
|
||||
var object: codegen.Object = .{
|
||||
.dg = .{
|
||||
.gpa = gpa,
|
||||
.module = module,
|
||||
.error_msg = null,
|
||||
.decl_index = .none,
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = .{},
|
||||
.anon_decl_deps = self.anon_decls,
|
||||
},
|
||||
.code = code.toManaged(gpa),
|
||||
.indent_writer = undefined, // set later so we can get a pointer to object.code
|
||||
};
|
||||
object.indent_writer = .{ .underlying_writer = object.code.writer() };
|
||||
|
||||
defer {
|
||||
self.anon_decls = object.dg.anon_decl_deps;
|
||||
object.dg.ctypes.deinit(object.dg.gpa);
|
||||
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
}
|
||||
|
||||
const tv: @import("../TypedValue.zig") = .{
|
||||
.ty = module.intern_pool.typeOf(anon_decl).toType(),
|
||||
.val = anon_decl.toValue(),
|
||||
};
|
||||
const c_value: codegen.CValue = .{ .constant = anon_decl };
|
||||
codegen.genDeclValue(&object, tv, false, c_value, .none, .none) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
@panic("TODO: C backend AnalysisFail on anonymous decl");
|
||||
//try module.failed_decls.put(gpa, decl_index, object.dg.error_msg.?);
|
||||
//return;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
// Free excess allocated memory for this Decl.
|
||||
object.dg.ctypes.shrinkAndFree(gpa, object.dg.ctypes.count());
|
||||
|
||||
object.dg.anon_decl_deps.values()[i] = .{
|
||||
.code = try self.addString(object.code.items),
|
||||
.fwd_decl = try self.addString(object.dg.fwd_decl.items),
|
||||
.ctypes = object.dg.ctypes.move(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn updateDecl(self: *C, module: *Module, decl_index: Module.Decl.Index) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
@ -226,12 +295,14 @@ pub fn updateDecl(self: *C, module: *Module, decl_index: Module.Decl.Index) !voi
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = ctypes.*,
|
||||
.anon_decl_deps = self.anon_decls,
|
||||
},
|
||||
.code = code.toManaged(gpa),
|
||||
.indent_writer = undefined, // set later so we can get a pointer to object.code
|
||||
};
|
||||
object.indent_writer = .{ .underlying_writer = object.code.writer() };
|
||||
defer {
|
||||
self.anon_decls = object.dg.anon_decl_deps;
|
||||
object.dg.ctypes.deinit(object.dg.gpa);
|
||||
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
@ -289,6 +360,13 @@ pub fn flushModule(self: *C, _: *Compilation, prog_node: *std.Progress.Node) !vo
|
||||
const gpa = self.base.allocator;
|
||||
const module = self.base.options.module.?;
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
while (i < self.anon_decls.count()) : (i += 1) {
|
||||
try updateAnonDecl(self, module, i);
|
||||
}
|
||||
}
|
||||
|
||||
// This code path happens exclusively with -ofmt=c. The flush logic for
|
||||
// emit-h is in `flushEmitH` below.
|
||||
|
||||
@ -331,10 +409,15 @@ pub fn flushModule(self: *C, _: *Compilation, prog_node: *std.Progress.Node) !vo
|
||||
for (module.decl_exports.values()) |exports| for (exports.items) |@"export"|
|
||||
try export_names.put(gpa, @"export".opts.name, {});
|
||||
|
||||
const decl_keys = self.decl_table.keys();
|
||||
for (decl_keys) |decl_index| {
|
||||
for (self.anon_decls.values()) |*decl_block| {
|
||||
try self.flushDeclBlock(&f, decl_block, export_names, .none);
|
||||
}
|
||||
|
||||
for (self.decl_table.keys(), self.decl_table.values()) |decl_index, *decl_block| {
|
||||
assert(module.declPtr(decl_index).has_tv);
|
||||
try self.flushDecl(&f, decl_index, export_names);
|
||||
const decl = module.declPtr(decl_index);
|
||||
const extern_symbol_name = if (decl.isExtern(module)) decl.name.toOptional() else .none;
|
||||
try self.flushDeclBlock(&f, decl_block, export_names, extern_symbol_name);
|
||||
}
|
||||
}
|
||||
|
||||
@ -344,8 +427,12 @@ pub fn flushModule(self: *C, _: *Compilation, prog_node: *std.Progress.Node) !vo
|
||||
assert(f.ctypes.count() == 0);
|
||||
try self.flushCTypes(&f, .none, f.lazy_ctypes);
|
||||
|
||||
for (self.decl_table.keys(), self.decl_table.values()) |decl_index, db| {
|
||||
try self.flushCTypes(&f, decl_index.toOptional(), db.ctypes);
|
||||
for (self.anon_decls.values()) |decl_block| {
|
||||
try self.flushCTypes(&f, .none, decl_block.ctypes);
|
||||
}
|
||||
|
||||
for (self.decl_table.keys(), self.decl_table.values()) |decl_index, decl_block| {
|
||||
try self.flushCTypes(&f, decl_index.toOptional(), decl_block.ctypes);
|
||||
}
|
||||
}
|
||||
|
||||
@ -363,10 +450,12 @@ pub fn flushModule(self: *C, _: *Compilation, prog_node: *std.Progress.Node) !vo
|
||||
f.file_size += lazy_fwd_decl_len;
|
||||
|
||||
// Now the code.
|
||||
const anon_decl_values = self.anon_decls.values();
|
||||
const decl_values = self.decl_table.values();
|
||||
try f.all_buffers.ensureUnusedCapacity(gpa, 1 + decl_values.len);
|
||||
try f.all_buffers.ensureUnusedCapacity(gpa, 1 + anon_decl_values.len + decl_values.len);
|
||||
f.appendBufAssumeCapacity(self.lazy_code_buf.items);
|
||||
for (decl_values) |decl| f.appendBufAssumeCapacity(self.getString(decl.code));
|
||||
for (anon_decl_values) |db| f.appendBufAssumeCapacity(self.getString(db.code));
|
||||
for (decl_values) |db| f.appendBufAssumeCapacity(self.getString(db.code));
|
||||
|
||||
const file = self.base.file.?;
|
||||
try file.setEndPos(f.file_size);
|
||||
@ -512,12 +601,14 @@ fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = ctypes.*,
|
||||
.anon_decl_deps = self.anon_decls,
|
||||
},
|
||||
.code = code.toManaged(gpa),
|
||||
.indent_writer = undefined, // set later so we can get a pointer to object.code
|
||||
};
|
||||
object.indent_writer = .{ .underlying_writer = object.code.writer() };
|
||||
defer {
|
||||
self.anon_decls = object.dg.anon_decl_deps;
|
||||
object.dg.ctypes.deinit(gpa);
|
||||
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
@ -531,7 +622,11 @@ fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
|
||||
ctypes.* = object.dg.ctypes.move();
|
||||
}
|
||||
|
||||
fn flushLazyFn(self: *C, ctypes: *codegen.CType.Store, lazy_fn: codegen.LazyFnMap.Entry) FlushDeclError!void {
|
||||
fn flushLazyFn(
|
||||
self: *C,
|
||||
ctypes: *codegen.CType.Store,
|
||||
lazy_fn: codegen.LazyFnMap.Entry,
|
||||
) FlushDeclError!void {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
const fwd_decl = &self.lazy_fwd_decl_buf;
|
||||
@ -546,12 +641,16 @@ fn flushLazyFn(self: *C, ctypes: *codegen.CType.Store, lazy_fn: codegen.LazyFnMa
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = ctypes.*,
|
||||
.anon_decl_deps = .{},
|
||||
},
|
||||
.code = code.toManaged(gpa),
|
||||
.indent_writer = undefined, // set later so we can get a pointer to object.code
|
||||
};
|
||||
object.indent_writer = .{ .underlying_writer = object.code.writer() };
|
||||
defer {
|
||||
// If this assert trips just handle the anon_decl_deps the same as
|
||||
// `updateFunc()` does.
|
||||
assert(object.dg.anon_decl_deps.count() == 0);
|
||||
object.dg.ctypes.deinit(gpa);
|
||||
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
@ -578,22 +677,22 @@ fn flushLazyFns(self: *C, f: *Flush, lazy_fns: codegen.LazyFnMap) FlushDeclError
|
||||
}
|
||||
}
|
||||
|
||||
fn flushDecl(
|
||||
fn flushDeclBlock(
|
||||
self: *C,
|
||||
f: *Flush,
|
||||
decl_index: Module.Decl.Index,
|
||||
decl_block: *DeclBlock,
|
||||
export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void),
|
||||
extern_symbol_name: InternPool.OptionalNullTerminatedString,
|
||||
) FlushDeclError!void {
|
||||
const gpa = self.base.allocator;
|
||||
const mod = self.base.options.module.?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
|
||||
const decl_block = self.decl_table.getPtr(decl_index).?;
|
||||
|
||||
try self.flushLazyFns(f, decl_block.lazy_fns);
|
||||
try f.all_buffers.ensureUnusedCapacity(gpa, 1);
|
||||
if (!(decl.isExtern(mod) and export_names.contains(decl.name)))
|
||||
fwd_decl: {
|
||||
if (extern_symbol_name.unwrap()) |name| {
|
||||
if (export_names.contains(name)) break :fwd_decl;
|
||||
}
|
||||
f.appendBufAssumeCapacity(self.getString(decl_block.fwd_decl));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn flushEmitH(module: *Module) !void {
|
||||
|
||||
@ -82,6 +82,7 @@ atom_by_index_table: std.AutoHashMapUnmanaged(u32, Atom.Index) = .{},
|
||||
/// value assigned to label `foo` is an unnamed constant belonging/associated
|
||||
/// with `Decl` `main`, and lives as long as that `Decl`.
|
||||
unnamed_const_atoms: UnnamedConstTable = .{},
|
||||
anon_decls: AnonDeclTable = .{},
|
||||
|
||||
/// A table of relocations indexed by the owning them `Atom`.
|
||||
/// Note that once we refactor `Atom`'s lifetime and ownership rules,
|
||||
@ -107,6 +108,7 @@ const HotUpdateState = struct {
|
||||
loaded_base_address: ?std.os.windows.HMODULE = null,
|
||||
};
|
||||
|
||||
const AnonDeclTable = std.AutoHashMapUnmanaged(InternPool.Index, Atom.Index);
|
||||
const RelocTable = std.AutoArrayHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(Relocation));
|
||||
const BaseRelocationTable = std.AutoArrayHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(u32));
|
||||
const UnnamedConstTable = std.AutoArrayHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(Atom.Index));
|
||||
@ -323,6 +325,7 @@ pub fn deinit(self: *Coff) void {
|
||||
atoms.deinit(gpa);
|
||||
}
|
||||
self.unnamed_const_atoms.deinit(gpa);
|
||||
self.anon_decls.deinit(gpa);
|
||||
|
||||
for (self.relocs.values()) |*relocs| {
|
||||
relocs.deinit(gpa);
|
||||
@ -1077,39 +1080,19 @@ pub fn updateFunc(self: *Coff, mod: *Module, func_index: InternPool.Index, air:
|
||||
|
||||
pub fn lowerUnnamedConst(self: *Coff, tv: TypedValue, decl_index: Module.Decl.Index) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const mod = self.base.options.module.?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
|
||||
const gop = try self.unnamed_const_atoms.getOrPut(gpa, decl_index);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{};
|
||||
}
|
||||
const unnamed_consts = gop.value_ptr;
|
||||
|
||||
const atom_index = try self.createAtom();
|
||||
|
||||
const sym_name = blk: {
|
||||
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
|
||||
|
||||
const index = unnamed_consts.items.len;
|
||||
break :blk try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
|
||||
};
|
||||
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
|
||||
const index = unnamed_consts.items.len;
|
||||
const sym_name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
|
||||
defer gpa.free(sym_name);
|
||||
{
|
||||
const atom = self.getAtom(atom_index);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
try self.setSymbolName(sym, sym_name);
|
||||
sym.section_number = @as(coff.SectionNumber, @enumFromInt(self.rdata_section_index.? + 1));
|
||||
}
|
||||
|
||||
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(mod), tv, &code_buffer, .none, .{
|
||||
.parent_atom_index = self.getAtom(atom_index).getSymbolIndex().?,
|
||||
});
|
||||
var code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
const atom_index = switch (try self.lowerConst(sym_name, tv, self.rdata_section_index.?, decl.srcLoc(mod))) {
|
||||
.ok => |atom_index| atom_index,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
@ -1117,6 +1100,34 @@ pub fn lowerUnnamedConst(self: *Coff, tv: TypedValue, decl_index: Module.Decl.In
|
||||
return error.CodegenFail;
|
||||
},
|
||||
};
|
||||
try unnamed_consts.append(gpa, atom_index);
|
||||
return self.getAtom(atom_index).getSymbolIndex().?;
|
||||
}
|
||||
|
||||
const LowerConstResult = union(enum) {
|
||||
ok: Atom.Index,
|
||||
fail: *Module.ErrorMsg,
|
||||
};
|
||||
|
||||
fn lowerConst(self: *Coff, name: []const u8, tv: TypedValue, sect_id: u16, src_loc: Module.SrcLoc) !LowerConstResult {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const mod = self.base.options.module.?;
|
||||
const atom_index = try self.createAtom();
|
||||
const sym = self.getAtom(atom_index).getSymbolPtr(self);
|
||||
try self.setSymbolName(sym, name);
|
||||
sym.section_number = @as(coff.SectionNumber, @enumFromInt(sect_id + 1));
|
||||
|
||||
const res = try codegen.generateSymbol(&self.base, src_loc, tv, &code_buffer, .none, .{
|
||||
.parent_atom_index = self.getAtom(atom_index).getSymbolIndex().?,
|
||||
});
|
||||
var code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| return .{ .fail = em },
|
||||
};
|
||||
|
||||
const required_alignment: u32 = @intCast(tv.ty.abiAlignment(mod).toByteUnits(0));
|
||||
const atom = self.getAtomPtr(atom_index);
|
||||
@ -1124,14 +1135,12 @@ pub fn lowerUnnamedConst(self: *Coff, tv: TypedValue, decl_index: Module.Decl.In
|
||||
atom.getSymbolPtr(self).value = try self.allocateAtom(atom_index, atom.size, required_alignment);
|
||||
errdefer self.freeAtom(atom_index);
|
||||
|
||||
try unnamed_consts.append(gpa, atom_index);
|
||||
|
||||
log.debug("allocated atom for {s} at 0x{x}", .{ sym_name, atom.getSymbol(self).value });
|
||||
log.debug("allocated atom for {s} at 0x{x}", .{ name, atom.getSymbol(self).value });
|
||||
log.debug(" (required alignment 0x{x})", .{required_alignment});
|
||||
|
||||
try self.writeAtom(atom_index, code);
|
||||
|
||||
return atom.getSymbolIndex().?;
|
||||
return .{ .ok = atom_index };
|
||||
}
|
||||
|
||||
pub fn updateDecl(
|
||||
@ -1727,6 +1736,63 @@ pub fn getDeclVAddr(self: *Coff, decl_index: Module.Decl.Index, reloc_info: link
|
||||
return 0;
|
||||
}
|
||||
|
||||
pub fn lowerAnonDecl(self: *Coff, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
|
||||
// This is basically the same as lowerUnnamedConst.
|
||||
// example:
|
||||
// const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
// const val = decl_val.toValue();
|
||||
// The symbol name can be something like `__anon_{d}` with `@intFromEnum(decl_val)`.
|
||||
// It doesn't have an owner decl because it's just an unnamed constant that might
|
||||
// be used by more than one function, however, its address is being used so we need
|
||||
// to put it in some location.
|
||||
// ...
|
||||
const gpa = self.base.allocator;
|
||||
const gop = try self.anon_decls.getOrPut(gpa, decl_val);
|
||||
if (!gop.found_existing) {
|
||||
const mod = self.base.options.module.?;
|
||||
const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
const val = decl_val.toValue();
|
||||
const tv = TypedValue{ .ty = ty, .val = val };
|
||||
const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(decl_val)});
|
||||
defer gpa.free(name);
|
||||
const res = self.lowerConst(name, tv, self.rdata_section_index.?, src_loc) catch |err| switch (err) {
|
||||
else => {
|
||||
// TODO improve error message
|
||||
const em = try Module.ErrorMsg.create(gpa, src_loc, "lowerAnonDecl failed with error: {s}", .{
|
||||
@errorName(err),
|
||||
});
|
||||
return .{ .fail = em };
|
||||
},
|
||||
};
|
||||
const atom_index = switch (res) {
|
||||
.ok => |atom_index| atom_index,
|
||||
.fail => |em| return .{ .fail = em },
|
||||
};
|
||||
gop.value_ptr.* = atom_index;
|
||||
}
|
||||
return .ok;
|
||||
}
|
||||
|
||||
pub fn getAnonDeclVAddr(self: *Coff, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
|
||||
assert(self.llvm_object == null);
|
||||
|
||||
const this_atom_index = self.anon_decls.get(decl_val).?;
|
||||
const sym_index = self.getAtom(this_atom_index).getSymbolIndex().?;
|
||||
const atom_index = self.getAtomIndexForSymbol(.{ .sym_index = reloc_info.parent_atom_index, .file = null }).?;
|
||||
const target = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = .direct,
|
||||
.target = target,
|
||||
.offset = @as(u32, @intCast(reloc_info.offset)),
|
||||
.addend = reloc_info.addend,
|
||||
.pcrel = false,
|
||||
.length = 3,
|
||||
});
|
||||
try Atom.addBaseRelocation(self, atom_index, @as(u32, @intCast(reloc_info.offset)));
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
pub fn getGlobalSymbol(self: *Coff, name: []const u8, lib_name_name: ?[]const u8) !u32 {
|
||||
const gop = try self.getOrPutGlobalPtr(name);
|
||||
const global_index = self.getGlobalIndex(name).?;
|
||||
|
||||
135
src/link/Elf.zig
135
src/link/Elf.zig
@ -155,12 +155,14 @@ last_atom_and_free_list_table: std.AutoArrayHashMapUnmanaged(u16, LastAtomAndFre
|
||||
/// value assigned to label `foo` is an unnamed constant belonging/associated
|
||||
/// with `Decl` `main`, and lives as long as that `Decl`.
|
||||
unnamed_consts: UnnamedConstTable = .{},
|
||||
anon_decls: AnonDeclTable = .{},
|
||||
|
||||
comdat_groups: std.ArrayListUnmanaged(ComdatGroup) = .{},
|
||||
comdat_groups_owners: std.ArrayListUnmanaged(ComdatGroupOwner) = .{},
|
||||
comdat_groups_table: std.AutoHashMapUnmanaged(u32, ComdatGroupOwner.Index) = .{},
|
||||
|
||||
const UnnamedConstTable = std.AutoHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(Symbol.Index));
|
||||
const AnonDeclTable = std.AutoHashMapUnmanaged(InternPool.Index, Symbol.Index);
|
||||
const LazySymbolTable = std.AutoArrayHashMapUnmanaged(Module.Decl.OptionalIndex, LazySymbolMetadata);
|
||||
|
||||
/// When allocating, the ideal_capacity is calculated by
|
||||
@ -321,6 +323,7 @@ pub fn deinit(self: *Elf) void {
|
||||
}
|
||||
self.unnamed_consts.deinit(gpa);
|
||||
}
|
||||
self.anon_decls.deinit(gpa);
|
||||
|
||||
if (self.dwarf) |*dw| {
|
||||
dw.deinit();
|
||||
@ -334,7 +337,6 @@ pub fn deinit(self: *Elf) void {
|
||||
|
||||
pub fn getDeclVAddr(self: *Elf, decl_index: Module.Decl.Index, reloc_info: link.File.RelocInfo) !u64 {
|
||||
assert(self.llvm_object == null);
|
||||
|
||||
const this_sym_index = try self.getOrCreateMetadataForDecl(decl_index);
|
||||
const this_sym = self.symbol(this_sym_index);
|
||||
const vaddr = this_sym.value;
|
||||
@ -344,7 +346,57 @@ pub fn getDeclVAddr(self: *Elf, decl_index: Module.Decl.Index, reloc_info: link.
|
||||
.r_info = (@as(u64, @intCast(this_sym.esym_index)) << 32) | elf.R_X86_64_64,
|
||||
.r_addend = reloc_info.addend,
|
||||
});
|
||||
return vaddr;
|
||||
}
|
||||
|
||||
pub fn lowerAnonDecl(self: *Elf, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
|
||||
// This is basically the same as lowerUnnamedConst.
|
||||
// example:
|
||||
// const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
// const val = decl_val.toValue();
|
||||
// The symbol name can be something like `__anon_{d}` with `@intFromEnum(decl_val)`.
|
||||
// It doesn't have an owner decl because it's just an unnamed constant that might
|
||||
// be used by more than one function, however, its address is being used so we need
|
||||
// to put it in some location.
|
||||
// ...
|
||||
const gpa = self.base.allocator;
|
||||
const gop = try self.anon_decls.getOrPut(gpa, decl_val);
|
||||
if (!gop.found_existing) {
|
||||
const mod = self.base.options.module.?;
|
||||
const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
const val = decl_val.toValue();
|
||||
const tv = TypedValue{ .ty = ty, .val = val };
|
||||
const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(decl_val)});
|
||||
defer gpa.free(name);
|
||||
const res = self.lowerConst(name, tv, self.rodata_section_index.?, src_loc) catch |err| switch (err) {
|
||||
else => {
|
||||
// TODO improve error message
|
||||
const em = try Module.ErrorMsg.create(gpa, src_loc, "lowerAnonDecl failed with error: {s}", .{
|
||||
@errorName(err),
|
||||
});
|
||||
return .{ .fail = em };
|
||||
},
|
||||
};
|
||||
const sym_index = switch (res) {
|
||||
.ok => |sym_index| sym_index,
|
||||
.fail => |em| return .{ .fail = em },
|
||||
};
|
||||
gop.value_ptr.* = sym_index;
|
||||
}
|
||||
return .ok;
|
||||
}
|
||||
|
||||
pub fn getAnonDeclVAddr(self: *Elf, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
|
||||
assert(self.llvm_object == null);
|
||||
const sym_index = self.anon_decls.get(decl_val).?;
|
||||
const sym = self.symbol(sym_index);
|
||||
const vaddr = sym.value;
|
||||
const parent_atom = self.symbol(reloc_info.parent_atom_index).atom(self).?;
|
||||
try parent_atom.addReloc(self, .{
|
||||
.r_offset = reloc_info.offset,
|
||||
.r_info = (@as(u64, @intCast(sym.esym_index)) << 32) | elf.R_X86_64_64,
|
||||
.r_addend = reloc_info.addend,
|
||||
});
|
||||
return vaddr;
|
||||
}
|
||||
|
||||
@ -3105,36 +3157,19 @@ fn updateLazySymbol(self: *Elf, sym: link.File.LazySymbol, symbol_index: Symbol.
|
||||
|
||||
pub fn lowerUnnamedConst(self: *Elf, typed_value: TypedValue, decl_index: Module.Decl.Index) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const mod = self.base.options.module.?;
|
||||
const gop = try self.unnamed_consts.getOrPut(gpa, decl_index);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{};
|
||||
}
|
||||
const unnamed_consts = gop.value_ptr;
|
||||
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const name_str_index = blk: {
|
||||
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
|
||||
const index = unnamed_consts.items.len;
|
||||
const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
|
||||
defer gpa.free(name);
|
||||
break :blk try self.strtab.insert(gpa, name);
|
||||
};
|
||||
|
||||
const zig_module = self.file(self.zig_module_index.?).?.zig_module;
|
||||
const sym_index = try zig_module.addAtom(self);
|
||||
|
||||
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(mod), typed_value, &code_buffer, .{
|
||||
.none = {},
|
||||
}, .{
|
||||
.parent_atom_index = sym_index,
|
||||
});
|
||||
const code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
|
||||
const index = unnamed_consts.items.len;
|
||||
const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
|
||||
defer gpa.free(name);
|
||||
const sym_index = switch (try self.lowerConst(name, typed_value, self.rodata_section_index.?, decl.srcLoc(mod))) {
|
||||
.ok => |sym_index| sym_index,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
@ -3142,13 +3177,48 @@ pub fn lowerUnnamedConst(self: *Elf, typed_value: TypedValue, decl_index: Module
|
||||
return error.CodegenFail;
|
||||
},
|
||||
};
|
||||
const sym = self.symbol(sym_index);
|
||||
try unnamed_consts.append(gpa, sym.atom_index);
|
||||
return sym_index;
|
||||
}
|
||||
|
||||
const required_alignment = typed_value.ty.abiAlignment(mod);
|
||||
const shdr_index = self.rodata_section_index.?;
|
||||
const phdr_index = self.phdr_to_shdr_table.get(shdr_index).?;
|
||||
const LowerConstResult = union(enum) {
|
||||
ok: Symbol.Index,
|
||||
fail: *Module.ErrorMsg,
|
||||
};
|
||||
|
||||
fn lowerConst(
|
||||
self: *Elf,
|
||||
name: []const u8,
|
||||
tv: TypedValue,
|
||||
output_section_index: u16,
|
||||
src_loc: Module.SrcLoc,
|
||||
) !LowerConstResult {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const mod = self.base.options.module.?;
|
||||
const zig_module = self.file(self.zig_module_index.?).?.zig_module;
|
||||
const sym_index = try zig_module.addAtom(self);
|
||||
|
||||
const res = try codegen.generateSymbol(&self.base, src_loc, tv, &code_buffer, .{
|
||||
.none = {},
|
||||
}, .{
|
||||
.parent_atom_index = sym_index,
|
||||
});
|
||||
const code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| return .{ .fail = em },
|
||||
};
|
||||
|
||||
const required_alignment = tv.ty.abiAlignment(mod);
|
||||
const phdr_index = self.phdr_to_shdr_table.get(output_section_index).?;
|
||||
const local_sym = self.symbol(sym_index);
|
||||
const name_str_index = try self.strtab.insert(gpa, name);
|
||||
local_sym.name_offset = name_str_index;
|
||||
local_sym.output_section_index = self.rodata_section_index.?;
|
||||
local_sym.output_section_index = output_section_index;
|
||||
const local_esym = &zig_module.local_esyms.items[local_sym.esym_index];
|
||||
local_esym.st_name = name_str_index;
|
||||
local_esym.st_info |= elf.STT_OBJECT;
|
||||
@ -3158,21 +3228,20 @@ pub fn lowerUnnamedConst(self: *Elf, typed_value: TypedValue, decl_index: Module
|
||||
atom_ptr.name_offset = name_str_index;
|
||||
atom_ptr.alignment = required_alignment;
|
||||
atom_ptr.size = code.len;
|
||||
atom_ptr.output_section_index = self.rodata_section_index.?;
|
||||
atom_ptr.output_section_index = output_section_index;
|
||||
|
||||
try atom_ptr.allocate(self);
|
||||
// TODO rename and re-audit this method
|
||||
errdefer self.freeDeclMetadata(sym_index);
|
||||
|
||||
local_sym.value = atom_ptr.value;
|
||||
local_esym.st_value = atom_ptr.value;
|
||||
|
||||
try unnamed_consts.append(gpa, atom_ptr.atom_index);
|
||||
|
||||
const section_offset = atom_ptr.value - self.phdrs.items[phdr_index].p_vaddr;
|
||||
const file_offset = self.shdrs.items[shdr_index].sh_offset + section_offset;
|
||||
const file_offset = self.shdrs.items[output_section_index].sh_offset + section_offset;
|
||||
try self.base.file.?.pwriteAll(code, file_offset);
|
||||
|
||||
return sym_index;
|
||||
return .{ .ok = sym_index };
|
||||
}
|
||||
|
||||
pub fn updateDeclExports(
|
||||
|
||||
@ -109,6 +109,7 @@ atom_by_index_table: std.AutoHashMapUnmanaged(u32, Atom.Index) = .{},
|
||||
/// value assigned to label `foo` is an unnamed constant belonging/associated
|
||||
/// with `Decl` `main`, and lives as long as that `Decl`.
|
||||
unnamed_const_atoms: UnnamedConstTable = .{},
|
||||
anon_decls: AnonDeclTable = .{},
|
||||
|
||||
/// A table of relocations indexed by the owning them `Atom`.
|
||||
/// Note that once we refactor `Atom`'s lifetime and ownership rules,
|
||||
@ -1899,6 +1900,7 @@ pub fn deinit(self: *MachO) void {
|
||||
atoms.deinit(gpa);
|
||||
}
|
||||
self.unnamed_const_atoms.deinit(gpa);
|
||||
self.anon_decls.deinit(gpa);
|
||||
|
||||
self.atom_by_index_table.deinit(gpa);
|
||||
|
||||
@ -2172,39 +2174,19 @@ pub fn updateFunc(self: *MachO, mod: *Module, func_index: InternPool.Index, air:
|
||||
|
||||
pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl_index: Module.Decl.Index) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const mod = self.base.options.module.?;
|
||||
const gop = try self.unnamed_const_atoms.getOrPut(gpa, decl_index);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{};
|
||||
}
|
||||
const unnamed_consts = gop.value_ptr;
|
||||
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
|
||||
|
||||
const name_str_index = blk: {
|
||||
const index = unnamed_consts.items.len;
|
||||
const name = try std.fmt.allocPrint(gpa, "___unnamed_{s}_{d}", .{ decl_name, index });
|
||||
defer gpa.free(name);
|
||||
break :blk try self.strtab.insert(gpa, name);
|
||||
};
|
||||
const name = self.strtab.get(name_str_index).?;
|
||||
|
||||
log.debug("allocating symbol indexes for {s}", .{name});
|
||||
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom_index = try self.createAtom(sym_index, .{});
|
||||
try self.atom_by_index_table.putNoClobber(gpa, sym_index, atom_index);
|
||||
|
||||
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(mod), typed_value, &code_buffer, .none, .{
|
||||
.parent_atom_index = self.getAtom(atom_index).getSymbolIndex().?,
|
||||
});
|
||||
var code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
const index = unnamed_consts.items.len;
|
||||
const name = try std.fmt.allocPrint(gpa, "___unnamed_{s}_{d}", .{ decl_name, index });
|
||||
defer gpa.free(name);
|
||||
const atom_index = switch (try self.lowerConst(name, typed_value, self.data_const_section_index.?, decl.srcLoc(mod))) {
|
||||
.ok => |atom_index| atom_index,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
@ -2212,28 +2194,63 @@ pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl_index: Modu
|
||||
return error.CodegenFail;
|
||||
},
|
||||
};
|
||||
try unnamed_consts.append(gpa, atom_index);
|
||||
const atom = self.getAtomPtr(atom_index);
|
||||
return atom.getSymbolIndex().?;
|
||||
}
|
||||
|
||||
const required_alignment = typed_value.ty.abiAlignment(mod);
|
||||
const LowerConstResult = union(enum) {
|
||||
ok: Atom.Index,
|
||||
fail: *Module.ErrorMsg,
|
||||
};
|
||||
|
||||
fn lowerConst(
|
||||
self: *MachO,
|
||||
name: []const u8,
|
||||
tv: TypedValue,
|
||||
sect_id: u8,
|
||||
src_loc: Module.SrcLoc,
|
||||
) !LowerConstResult {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const mod = self.base.options.module.?;
|
||||
|
||||
log.debug("allocating symbol indexes for {s}", .{name});
|
||||
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom_index = try self.createAtom(sym_index, .{});
|
||||
try self.atom_by_index_table.putNoClobber(gpa, sym_index, atom_index);
|
||||
|
||||
const res = try codegen.generateSymbol(&self.base, src_loc, tv, &code_buffer, .none, .{
|
||||
.parent_atom_index = self.getAtom(atom_index).getSymbolIndex().?,
|
||||
});
|
||||
var code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| return .{ .fail = em },
|
||||
};
|
||||
|
||||
const required_alignment = tv.ty.abiAlignment(mod);
|
||||
const atom = self.getAtomPtr(atom_index);
|
||||
atom.size = code.len;
|
||||
// TODO: work out logic for disambiguating functions from function pointers
|
||||
// const sect_id = self.getDeclOutputSection(decl_index);
|
||||
const sect_id = self.data_const_section_index.?;
|
||||
const symbol = atom.getSymbolPtr(self);
|
||||
const name_str_index = try self.strtab.insert(gpa, name);
|
||||
symbol.n_strx = name_str_index;
|
||||
symbol.n_type = macho.N_SECT;
|
||||
symbol.n_sect = sect_id + 1;
|
||||
symbol.n_value = try self.allocateAtom(atom_index, code.len, required_alignment);
|
||||
errdefer self.freeAtom(atom_index);
|
||||
|
||||
try unnamed_consts.append(gpa, atom_index);
|
||||
|
||||
log.debug("allocated atom for {s} at 0x{x}", .{ name, symbol.n_value });
|
||||
log.debug(" (required alignment 0x{x})", .{required_alignment});
|
||||
|
||||
try self.writeAtom(atom_index, code);
|
||||
|
||||
return atom.getSymbolIndex().?;
|
||||
return .{ .ok = atom_index };
|
||||
}
|
||||
|
||||
pub fn updateDecl(self: *MachO, mod: *Module, decl_index: Module.Decl.Index) !void {
|
||||
@ -2840,6 +2857,62 @@ pub fn getDeclVAddr(self: *MachO, decl_index: Module.Decl.Index, reloc_info: Fil
|
||||
return 0;
|
||||
}
|
||||
|
||||
pub fn lowerAnonDecl(self: *MachO, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
|
||||
// This is basically the same as lowerUnnamedConst.
|
||||
// example:
|
||||
// const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
// const val = decl_val.toValue();
|
||||
// The symbol name can be something like `__anon_{d}` with `@intFromEnum(decl_val)`.
|
||||
// It doesn't have an owner decl because it's just an unnamed constant that might
|
||||
// be used by more than one function, however, its address is being used so we need
|
||||
// to put it in some location.
|
||||
// ...
|
||||
const gpa = self.base.allocator;
|
||||
const gop = try self.anon_decls.getOrPut(gpa, decl_val);
|
||||
if (!gop.found_existing) {
|
||||
const mod = self.base.options.module.?;
|
||||
const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
const val = decl_val.toValue();
|
||||
const tv = TypedValue{ .ty = ty, .val = val };
|
||||
const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(decl_val)});
|
||||
defer gpa.free(name);
|
||||
const res = self.lowerConst(name, tv, self.data_const_section_index.?, src_loc) catch |err| switch (err) {
|
||||
else => {
|
||||
// TODO improve error message
|
||||
const em = try Module.ErrorMsg.create(gpa, src_loc, "lowerAnonDecl failed with error: {s}", .{
|
||||
@errorName(err),
|
||||
});
|
||||
return .{ .fail = em };
|
||||
},
|
||||
};
|
||||
const atom_index = switch (res) {
|
||||
.ok => |atom_index| atom_index,
|
||||
.fail => |em| return .{ .fail = em },
|
||||
};
|
||||
gop.value_ptr.* = atom_index;
|
||||
}
|
||||
return .ok;
|
||||
}
|
||||
|
||||
pub fn getAnonDeclVAddr(self: *MachO, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
|
||||
assert(self.llvm_object == null);
|
||||
|
||||
const this_atom_index = self.anon_decls.get(decl_val).?;
|
||||
const sym_index = self.getAtom(this_atom_index).getSymbolIndex().?;
|
||||
const atom_index = self.getAtomIndexForSymbol(.{ .sym_index = reloc_info.parent_atom_index }).?;
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = .unsigned,
|
||||
.target = .{ .sym_index = sym_index },
|
||||
.offset = @as(u32, @intCast(reloc_info.offset)),
|
||||
.addend = reloc_info.addend,
|
||||
.pcrel = false,
|
||||
.length = 3,
|
||||
});
|
||||
try Atom.addRebase(self, atom_index, @as(u32, @intCast(reloc_info.offset)));
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn populateMissingMetadata(self: *MachO) !void {
|
||||
assert(self.mode == .incremental);
|
||||
|
||||
@ -5389,6 +5462,7 @@ const DeclMetadata = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const AnonDeclTable = std.AutoHashMapUnmanaged(InternPool.Index, Atom.Index);
|
||||
const BindingTable = std.AutoArrayHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(Atom.Binding));
|
||||
const UnnamedConstTable = std.AutoArrayHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(Atom.Index));
|
||||
const RebaseTable = std.AutoArrayHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(u32));
|
||||
|
||||
@ -82,6 +82,8 @@ unnamed_const_atoms: UnnamedConstTable = .{},
|
||||
|
||||
lazy_syms: LazySymbolTable = .{},
|
||||
|
||||
anon_decls: std.AutoHashMapUnmanaged(InternPool.Index, Atom.Index) = .{},
|
||||
|
||||
relocs: std.AutoHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(Reloc)) = .{},
|
||||
hdr: aout.ExecHdr = undefined,
|
||||
|
||||
@ -166,6 +168,9 @@ pub const Atom = struct {
|
||||
code_len: usize,
|
||||
decl_index: Module.Decl.Index,
|
||||
},
|
||||
fn fromSlice(slice: []u8) CodePtr {
|
||||
return .{ .code_ptr = slice.ptr, .other = .{ .code_len = slice.len } };
|
||||
}
|
||||
fn getCode(self: CodePtr, plan9: *const Plan9) []u8 {
|
||||
const mod = plan9.base.options.module.?;
|
||||
return if (self.code_ptr) |p| p[0..self.other.code_len] else blk: {
|
||||
@ -608,8 +613,9 @@ fn atomCount(self: *Plan9) usize {
|
||||
while (it_lazy.next()) |kv| {
|
||||
lazy_atom_count += kv.value_ptr.numberOfAtoms();
|
||||
}
|
||||
const anon_atom_count = self.anon_decls.count();
|
||||
const extern_atom_count = self.externCount();
|
||||
return data_decl_count + fn_decl_count + unnamed_const_count + lazy_atom_count + extern_atom_count;
|
||||
return data_decl_count + fn_decl_count + unnamed_const_count + lazy_atom_count + extern_atom_count + anon_atom_count;
|
||||
}
|
||||
|
||||
pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.Node) link.File.FlushError!void {
|
||||
@ -804,6 +810,27 @@ pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.No
|
||||
self.syms.items[atom.sym_index.?].value = off;
|
||||
}
|
||||
}
|
||||
// the anon decls
|
||||
{
|
||||
var it_anon = self.anon_decls.iterator();
|
||||
while (it_anon.next()) |kv| {
|
||||
const atom = self.getAtomPtr(kv.value_ptr.*);
|
||||
const code = atom.code.getOwnedCode().?;
|
||||
log.debug("write anon decl: {s}", .{self.syms.items[atom.sym_index.?].name});
|
||||
foff += code.len;
|
||||
iovecs[iovecs_i] = .{ .iov_base = code.ptr, .iov_len = code.len };
|
||||
iovecs_i += 1;
|
||||
const off = self.getAddr(data_i, .d);
|
||||
data_i += code.len;
|
||||
atom.offset = off;
|
||||
if (!self.sixtyfour_bit) {
|
||||
mem.writeInt(u32, got_table[atom.got_index.? * 4 ..][0..4], @as(u32, @intCast(off)), self.base.options.target.cpu.arch.endian());
|
||||
} else {
|
||||
mem.writeInt(u64, got_table[atom.got_index.? * 8 ..][0..8], off, self.base.options.target.cpu.arch.endian());
|
||||
}
|
||||
self.syms.items[atom.sym_index.?].value = off;
|
||||
}
|
||||
}
|
||||
// the lazy data symbols
|
||||
var it_lazy = self.lazy_syms.iterator();
|
||||
while (it_lazy.next()) |kv| {
|
||||
@ -1196,6 +1223,11 @@ pub fn deinit(self: *Plan9) void {
|
||||
while (itd.next()) |entry| {
|
||||
gpa.free(entry.value_ptr.*);
|
||||
}
|
||||
var it_anon = self.anon_decls.iterator();
|
||||
while (it_anon.next()) |entry| {
|
||||
const sym_index = self.getAtom(entry.value_ptr.*).sym_index.?;
|
||||
gpa.free(self.syms.items[sym_index].name);
|
||||
}
|
||||
self.data_decl_table.deinit(gpa);
|
||||
self.syms.deinit(gpa);
|
||||
self.got_index_free_list.deinit(gpa);
|
||||
@ -1418,6 +1450,63 @@ pub fn getDeclVAddr(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
pub fn lowerAnonDecl(self: *Plan9, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
|
||||
// This is basically the same as lowerUnnamedConst.
|
||||
// example:
|
||||
// const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
// const val = decl_val.toValue();
|
||||
// The symbol name can be something like `__anon_{d}` with `@intFromEnum(decl_val)`.
|
||||
// It doesn't have an owner decl because it's just an unnamed constant that might
|
||||
// be used by more than one function, however, its address is being used so we need
|
||||
// to put it in some location.
|
||||
// ...
|
||||
const gpa = self.base.allocator;
|
||||
var gop = try self.anon_decls.getOrPut(gpa, decl_val);
|
||||
const mod = self.base.options.module.?;
|
||||
if (!gop.found_existing) {
|
||||
const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
const val = decl_val.toValue();
|
||||
const tv = TypedValue{ .ty = ty, .val = val };
|
||||
const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(decl_val)});
|
||||
|
||||
const index = try self.createAtom();
|
||||
const got_index = self.allocateGotIndex();
|
||||
gop.value_ptr.* = index;
|
||||
// we need to free name latex
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
const res = try codegen.generateSymbol(&self.base, src_loc, tv, &code_buffer, .{ .none = {} }, .{ .parent_atom_index = index });
|
||||
const code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| return .{ .fail = em },
|
||||
};
|
||||
const atom_ptr = self.getAtomPtr(index);
|
||||
atom_ptr.* = .{
|
||||
.type = .d,
|
||||
.offset = undefined,
|
||||
.sym_index = null,
|
||||
.got_index = got_index,
|
||||
.code = Atom.CodePtr.fromSlice(code),
|
||||
};
|
||||
_ = try atom_ptr.getOrCreateSymbolTableEntry(self);
|
||||
self.syms.items[atom_ptr.sym_index.?] = .{
|
||||
.type = .d,
|
||||
.value = undefined,
|
||||
.name = name,
|
||||
};
|
||||
}
|
||||
return .ok;
|
||||
}
|
||||
|
||||
pub fn getAnonDeclVAddr(self: *Plan9, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
|
||||
const atom_index = self.anon_decls.get(decl_val).?;
|
||||
try self.addReloc(reloc_info.parent_atom_index, .{
|
||||
.target = atom_index,
|
||||
.offset = reloc_info.offset,
|
||||
.addend = reloc_info.addend,
|
||||
});
|
||||
return undefined;
|
||||
}
|
||||
|
||||
pub fn addReloc(self: *Plan9, parent_index: Atom.Index, reloc: Reloc) !void {
|
||||
const gop = try self.relocs.getOrPut(self.base.allocator, parent_index);
|
||||
if (!gop.found_existing) {
|
||||
|
||||
@ -187,6 +187,9 @@ debug_pubtypes_atom: ?Atom.Index = null,
|
||||
/// rather than by the linker.
|
||||
synthetic_functions: std.ArrayListUnmanaged(Atom.Index) = .{},
|
||||
|
||||
/// Map for storing anonymous declarations. Each anonymous decl maps to its Atom's index.
|
||||
anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, Atom.Index) = .{},
|
||||
|
||||
pub const Alignment = types.Alignment;
|
||||
|
||||
pub const Segment = struct {
|
||||
@ -1291,6 +1294,7 @@ pub fn deinit(wasm: *Wasm) void {
|
||||
}
|
||||
|
||||
wasm.decls.deinit(gpa);
|
||||
wasm.anon_decls.deinit(gpa);
|
||||
wasm.atom_types.deinit(gpa);
|
||||
wasm.symbols.deinit(gpa);
|
||||
wasm.symbols_free_list.deinit(gpa);
|
||||
@ -1548,17 +1552,38 @@ pub fn lowerUnnamedConst(wasm: *Wasm, tv: TypedValue, decl_index: Module.Decl.In
|
||||
assert(tv.ty.zigTypeTag(mod) != .Fn); // cannot create local symbols for functions
|
||||
const decl = mod.declPtr(decl_index);
|
||||
|
||||
// Create and initialize a new local symbol and atom
|
||||
const atom_index = try wasm.createAtom();
|
||||
const parent_atom_index = try wasm.getOrCreateAtomForDecl(decl_index);
|
||||
const parent_atom = wasm.getAtomPtr(parent_atom_index);
|
||||
const parent_atom = wasm.getAtom(parent_atom_index);
|
||||
const local_index = parent_atom.locals.items.len;
|
||||
try parent_atom.locals.append(wasm.base.allocator, atom_index);
|
||||
const fqn = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
|
||||
const name = try std.fmt.allocPrintZ(wasm.base.allocator, "__unnamed_{s}_{d}", .{
|
||||
fqn, local_index,
|
||||
});
|
||||
defer wasm.base.allocator.free(name);
|
||||
|
||||
switch (try wasm.lowerConst(name, tv, decl.srcLoc(mod))) {
|
||||
.ok => |atom_index| {
|
||||
try wasm.getAtomPtr(parent_atom_index).locals.append(wasm.base.allocator, atom_index);
|
||||
return wasm.getAtom(atom_index).getSymbolIndex().?;
|
||||
},
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
return error.CodegenFail;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const LowerConstResult = union(enum) {
|
||||
ok: Atom.Index,
|
||||
fail: *Module.ErrorMsg,
|
||||
};
|
||||
|
||||
fn lowerConst(wasm: *Wasm, name: []const u8, tv: TypedValue, src_loc: Module.SrcLoc) !LowerConstResult {
|
||||
const mod = wasm.base.options.module.?;
|
||||
|
||||
// Create and initialize a new local symbol and atom
|
||||
const atom_index = try wasm.createAtom();
|
||||
var value_bytes = std.ArrayList(u8).init(wasm.base.allocator);
|
||||
defer value_bytes.deinit();
|
||||
|
||||
@ -1576,7 +1601,7 @@ pub fn lowerUnnamedConst(wasm: *Wasm, tv: TypedValue, decl_index: Module.Decl.In
|
||||
|
||||
const result = try codegen.generateSymbol(
|
||||
&wasm.base,
|
||||
decl.srcLoc(mod),
|
||||
src_loc,
|
||||
tv,
|
||||
&value_bytes,
|
||||
.none,
|
||||
@ -1588,17 +1613,15 @@ pub fn lowerUnnamedConst(wasm: *Wasm, tv: TypedValue, decl_index: Module.Decl.In
|
||||
break :code switch (result) {
|
||||
.ok => value_bytes.items,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
return error.CodegenFail;
|
||||
return .{ .fail = em };
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const atom = wasm.getAtomPtr(atom_index);
|
||||
atom.size = @as(u32, @intCast(code.len));
|
||||
atom.size = @intCast(code.len);
|
||||
try atom.code.appendSlice(wasm.base.allocator, code);
|
||||
return atom.sym_index;
|
||||
return .{ .ok = atom_index };
|
||||
}
|
||||
|
||||
/// Returns the symbol index from a symbol of which its flag is set global,
|
||||
@ -1679,6 +1702,63 @@ pub fn getDeclVAddr(
|
||||
return target_symbol_index;
|
||||
}
|
||||
|
||||
pub fn lowerAnonDecl(wasm: *Wasm, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
|
||||
const gop = try wasm.anon_decls.getOrPut(wasm.base.allocator, decl_val);
|
||||
if (gop.found_existing) {
|
||||
return .ok;
|
||||
}
|
||||
|
||||
const mod = wasm.base.options.module.?;
|
||||
const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
const tv: TypedValue = .{ .ty = ty, .val = decl_val.toValue() };
|
||||
const name = try std.fmt.allocPrintZ(wasm.base.allocator, "__anon_{d}", .{@intFromEnum(decl_val)});
|
||||
defer wasm.base.allocator.free(name);
|
||||
|
||||
switch (try wasm.lowerConst(name, tv, src_loc)) {
|
||||
.ok => |atom_index| {
|
||||
gop.value_ptr.* = atom_index;
|
||||
return .ok;
|
||||
},
|
||||
.fail => |em| return .{ .fail = em },
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getAnonDeclVAddr(wasm: *Wasm, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
|
||||
const atom_index = wasm.anon_decls.get(decl_val).?;
|
||||
const target_symbol_index = wasm.getAtom(atom_index).getSymbolIndex().?;
|
||||
|
||||
const parent_atom_index = wasm.symbol_atom.get(.{ .file = null, .index = reloc_info.parent_atom_index }).?;
|
||||
const parent_atom = wasm.getAtomPtr(parent_atom_index);
|
||||
const is_wasm32 = wasm.base.options.target.cpu.arch == .wasm32;
|
||||
const mod = wasm.base.options.module.?;
|
||||
const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
if (ty.zigTypeTag(mod) == .Fn) {
|
||||
assert(reloc_info.addend == 0); // addend not allowed for function relocations
|
||||
// We found a function pointer, so add it to our table,
|
||||
// as function pointers are not allowed to be stored inside the data section.
|
||||
// They are instead stored in a function table which are called by index.
|
||||
try wasm.addTableFunction(target_symbol_index);
|
||||
try parent_atom.relocs.append(wasm.base.allocator, .{
|
||||
.index = target_symbol_index,
|
||||
.offset = @as(u32, @intCast(reloc_info.offset)),
|
||||
.relocation_type = if (is_wasm32) .R_WASM_TABLE_INDEX_I32 else .R_WASM_TABLE_INDEX_I64,
|
||||
});
|
||||
} else {
|
||||
try parent_atom.relocs.append(wasm.base.allocator, .{
|
||||
.index = target_symbol_index,
|
||||
.offset = @as(u32, @intCast(reloc_info.offset)),
|
||||
.relocation_type = if (is_wasm32) .R_WASM_MEMORY_ADDR_I32 else .R_WASM_MEMORY_ADDR_I64,
|
||||
.addend = @as(i32, @intCast(reloc_info.addend)),
|
||||
});
|
||||
}
|
||||
|
||||
// we do not know the final address at this point,
|
||||
// as atom allocation will determine the address and relocations
|
||||
// will calculate and rewrite this. Therefore, we simply return the symbol index
|
||||
// that was targeted.
|
||||
return target_symbol_index;
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(wasm: *Wasm, decl_index: Module.Decl.Index) void {
|
||||
if (wasm.llvm_object) |_| return;
|
||||
const atom_index = wasm.decls.get(decl_index) orelse return;
|
||||
@ -3442,6 +3522,15 @@ pub fn flushModule(wasm: *Wasm, comp: *Compilation, prog_node: *std.Progress.Nod
|
||||
try wasm.parseAtom(local_atom_index, .{ .data = .read_only });
|
||||
}
|
||||
}
|
||||
// parse anonymous declarations
|
||||
for (wasm.anon_decls.keys(), wasm.anon_decls.values()) |decl_val, atom_index| {
|
||||
const ty = mod.intern_pool.typeOf(decl_val).toType();
|
||||
if (ty.zigTypeTag(mod) == .Fn) {
|
||||
try wasm.parseAtom(atom_index, .function);
|
||||
} else {
|
||||
try wasm.parseAtom(atom_index, .{ .data = .read_only });
|
||||
}
|
||||
}
|
||||
|
||||
// also parse any backend-generated functions
|
||||
for (wasm.synthetic_functions.items) |atom_index| {
|
||||
|
||||
@ -1565,12 +1565,14 @@ pub const Value = struct {
|
||||
}
|
||||
|
||||
pub fn sliceLen(val: Value, mod: *Module) u64 {
|
||||
const ptr = mod.intern_pool.indexToKey(val.toIntern()).ptr;
|
||||
const ip = &mod.intern_pool;
|
||||
const ptr = ip.indexToKey(val.toIntern()).ptr;
|
||||
return switch (ptr.len) {
|
||||
.none => switch (mod.intern_pool.indexToKey(switch (ptr.addr) {
|
||||
.none => switch (ip.indexToKey(switch (ptr.addr) {
|
||||
.decl => |decl| mod.declPtr(decl).ty.toIntern(),
|
||||
.mut_decl => |mut_decl| mod.declPtr(mut_decl.decl).ty.toIntern(),
|
||||
.comptime_field => |comptime_field| mod.intern_pool.typeOf(comptime_field),
|
||||
.anon_decl => |anon_decl| ip.typeOf(anon_decl),
|
||||
.comptime_field => |comptime_field| ip.typeOf(comptime_field),
|
||||
else => unreachable,
|
||||
})) {
|
||||
.array_type => |array_type| array_type.len,
|
||||
@ -1602,6 +1604,7 @@ pub const Value = struct {
|
||||
})).toValue(),
|
||||
.ptr => |ptr| switch (ptr.addr) {
|
||||
.decl => |decl| mod.declPtr(decl).val.maybeElemValue(mod, index),
|
||||
.anon_decl => |anon_decl| anon_decl.toValue().maybeElemValue(mod, index),
|
||||
.mut_decl => |mut_decl| (try mod.declPtr(mut_decl.decl).internValue(mod))
|
||||
.toValue().maybeElemValue(mod, index),
|
||||
.int, .eu_payload => null,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user