Merge pull request #17545 from ziglang/more-anon-decls

migrate make_ptr_const to new anonymous decl mechanism
This commit is contained in:
Andrew Kelley 2023-10-21 21:38:55 -04:00 committed by GitHub
commit 7d50634e0a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 337 additions and 153 deletions

View File

@ -782,10 +782,10 @@ pub const Elf32_Sym = extern struct {
st_shndx: Elf32_Section,
pub inline fn st_type(self: @This()) u4 {
return @as(u4, @truncate(self.st_info));
return @truncate(self.st_info);
}
pub inline fn st_bind(self: @This()) u4 {
return @as(u4, @truncate(self.st_info >> 4));
return @truncate(self.st_info >> 4);
}
};
pub const Elf64_Sym = extern struct {
@ -797,10 +797,10 @@ pub const Elf64_Sym = extern struct {
st_size: Elf64_Xword,
pub inline fn st_type(self: @This()) u4 {
return @as(u4, @truncate(self.st_info));
return @truncate(self.st_info);
}
pub inline fn st_bind(self: @This()) u4 {
return @as(u4, @truncate(self.st_info >> 4));
return @truncate(self.st_info >> 4);
}
};
pub const Elf32_Syminfo = extern struct {
@ -816,10 +816,10 @@ pub const Elf32_Rel = extern struct {
r_info: Elf32_Word,
pub inline fn r_sym(self: @This()) u24 {
return @as(u24, @truncate(self.r_info >> 8));
return @truncate(self.r_info >> 8);
}
pub inline fn r_type(self: @This()) u8 {
return @as(u8, @truncate(self.r_info));
return @truncate(self.r_info);
}
};
pub const Elf64_Rel = extern struct {
@ -827,10 +827,10 @@ pub const Elf64_Rel = extern struct {
r_info: Elf64_Xword,
pub inline fn r_sym(self: @This()) u32 {
return @as(u32, @truncate(self.r_info >> 32));
return @truncate(self.r_info >> 32);
}
pub inline fn r_type(self: @This()) u32 {
return @as(u32, @truncate(self.r_info));
return @truncate(self.r_info);
}
};
pub const Elf32_Rela = extern struct {
@ -839,10 +839,10 @@ pub const Elf32_Rela = extern struct {
r_addend: Elf32_Sword,
pub inline fn r_sym(self: @This()) u24 {
return @as(u24, @truncate(self.r_info >> 8));
return @truncate(self.r_info >> 8);
}
pub inline fn r_type(self: @This()) u8 {
return @as(u8, @truncate(self.r_info));
return @truncate(self.r_info);
}
};
pub const Elf64_Rela = extern struct {
@ -851,10 +851,10 @@ pub const Elf64_Rela = extern struct {
r_addend: Elf64_Sxword,
pub inline fn r_sym(self: @This()) u32 {
return @as(u32, @truncate(self.r_info >> 32));
return @truncate(self.r_info >> 32);
}
pub inline fn r_type(self: @This()) u32 {
return @as(u32, @truncate(self.r_info));
return @truncate(self.r_info);
}
};
pub const Elf32_Dyn = extern struct {

View File

@ -3123,10 +3123,10 @@ fn varDecl(
if (nodeMayAppendToErrorTrace(tree, var_decl.ast.init_node))
_ = try gz.addSaveErrRetIndex(.{ .if_of_error_type = init_inst });
if (resolve_inferred_alloc != .none) {
const const_ptr = if (resolve_inferred_alloc != .none) p: {
_ = try gz.addUnNode(.resolve_inferred_alloc, resolve_inferred_alloc, node);
}
const const_ptr = try gz.addUnNode(.make_ptr_const, var_ptr, node);
break :p var_ptr;
} else try gz.addUnNode(.make_ptr_const, var_ptr, node);
try gz.addDbgVar(.dbg_var_ptr, ident_name, const_ptr);
@ -3533,7 +3533,9 @@ fn assignDestructureMaybeDecls(
else => unreachable,
};
// If the alloc was const, make it const.
const var_ptr = if (is_const) make_const: {
const var_ptr = if (is_const and full.ast.type_node != 0) make_const: {
// Note that we don't do this if type_node == 0 since `resolve_inferred_alloc`
// handles it for us.
break :make_const try gz.addUnNode(.make_ptr_const, raw_ptr, node);
} else raw_ptr;
const name_token = full.ast.mut_token + 1;

View File

@ -3545,6 +3545,7 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = .{},
.anon_decl_deps = .{},
.aligned_anon_decls = .{},
};
defer {
dg.ctypes.deinit(gpa);

View File

@ -1074,7 +1074,7 @@ pub const Key = union(enum) {
decl: Module.Decl.Index,
mut_decl: MutDecl,
anon_decl: Index,
anon_decl: AnonDecl,
comptime_field: Index,
int: Index,
eu_payload: Index,
@ -1090,6 +1090,14 @@ pub const Key = union(enum) {
base: Index,
index: u64,
};
pub const AnonDecl = extern struct {
val: Index,
/// Contains the canonical pointer type of the anonymous
/// declaration. This may equal `ty` of the `Ptr` or it may be
/// different. Importantly, when lowering the anonymous decl,
/// the original pointer type alignment must be used.
orig_ty: Index,
};
};
};
@ -1231,7 +1239,8 @@ pub const Key = union(enum) {
common ++ asBytes(&x.decl) ++ asBytes(&x.runtime_index),
),
.anon_decl,
.anon_decl => |x| Hash.hash(seed2, common ++ asBytes(&x)),
.int,
.eu_payload,
.opt_payload,
@ -1500,7 +1509,8 @@ pub const Key = union(enum) {
return switch (a_info.addr) {
.decl => |a_decl| a_decl == b_info.addr.decl,
.mut_decl => |a_mut_decl| std.meta.eql(a_mut_decl, b_info.addr.mut_decl),
.anon_decl => |a_decl| a_decl == b_info.addr.anon_decl,
.anon_decl => |ad| ad.val == b_info.addr.anon_decl.val and
ad.orig_ty == b_info.addr.anon_decl.orig_ty,
.int => |a_int| a_int == b_info.addr.int,
.eu_payload => |a_eu_payload| a_eu_payload == b_info.addr.eu_payload,
.opt_payload => |a_opt_payload| a_opt_payload == b_info.addr.opt_payload,
@ -2133,6 +2143,7 @@ pub const Index = enum(u32) {
ptr_decl: struct { data: *PtrDecl },
ptr_mut_decl: struct { data: *PtrMutDecl },
ptr_anon_decl: struct { data: *PtrAnonDecl },
ptr_anon_decl_aligned: struct { data: *PtrAnonDeclAligned },
ptr_comptime_field: struct { data: *PtrComptimeField },
ptr_int: struct { data: *PtrBase },
ptr_eu_payload: struct { data: *PtrBase },
@ -2583,8 +2594,16 @@ pub const Tag = enum(u8) {
/// data is extra index of `PtrMutDecl`, which contains the type and address.
ptr_mut_decl,
/// A pointer to an anonymous decl.
/// data is extra index of `PtrAnonDecl`, which contains the type and decl value.
/// data is extra index of `PtrAnonDecl`, which contains the pointer type and decl value.
/// The alignment of the anonymous decl is communicated via the pointer type.
ptr_anon_decl,
/// A pointer to an anonymous decl.
/// data is extra index of `PtrAnonDeclAligned`, which contains the pointer
/// type and decl value.
/// The original pointer type is also provided, which will be different than `ty`.
/// This encoding is only used when a pointer to an anonymous decl is
/// coerced to a different pointer type with a different alignment.
ptr_anon_decl_aligned,
/// data is extra index of `PtrComptimeField`, which contains the pointer type and field value.
ptr_comptime_field,
/// A pointer with an integer value.
@ -2781,6 +2800,7 @@ pub const Tag = enum(u8) {
.ptr_decl => PtrDecl,
.ptr_mut_decl => PtrMutDecl,
.ptr_anon_decl => PtrAnonDecl,
.ptr_anon_decl_aligned => PtrAnonDeclAligned,
.ptr_comptime_field => PtrComptimeField,
.ptr_int => PtrBase,
.ptr_eu_payload => PtrBase,
@ -3383,6 +3403,13 @@ pub const PtrAnonDecl = struct {
val: Index,
};
pub const PtrAnonDeclAligned = struct {
ty: Index,
val: Index,
/// Must be nonequal to `ty`. Only the alignment from this value is important.
orig_ty: Index,
};
pub const PtrMutDecl = struct {
ty: Index,
decl: Module.Decl.Index,
@ -3736,7 +3763,20 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
const info = ip.extraData(PtrAnonDecl, data);
return .{ .ptr = .{
.ty = info.ty,
.addr = .{ .anon_decl = info.val },
.addr = .{ .anon_decl = .{
.val = info.val,
.orig_ty = info.ty,
} },
} };
},
.ptr_anon_decl_aligned => {
const info = ip.extraData(PtrAnonDeclAligned, data);
return .{ .ptr = .{
.ty = info.ty,
.addr = .{ .anon_decl = .{
.val = info.val,
.orig_ty = info.orig_ty,
} },
} };
},
.ptr_comptime_field => {
@ -3817,7 +3857,17 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
} };
},
.ptr_anon_decl => .{
.anon_decl = ip.extraData(PtrAnonDecl, ptr_item.data).val,
.anon_decl = .{
.val = ip.extraData(PtrAnonDecl, ptr_item.data).val,
.orig_ty = info.ty,
},
},
.ptr_anon_decl_aligned => b: {
const sub_info = ip.extraData(PtrAnonDeclAligned, ptr_item.data);
break :b .{ .anon_decl = .{
.val = sub_info.val,
.orig_ty = sub_info.orig_ty,
} };
},
.ptr_comptime_field => .{
.comptime_field = ip.extraData(PtrComptimeField, ptr_item.data).field_val,
@ -4571,13 +4621,22 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
.runtime_index = mut_decl.runtime_index,
}),
}),
.anon_decl => |anon_decl| ip.items.appendAssumeCapacity(.{
.tag = .ptr_anon_decl,
.data = try ip.addExtra(gpa, PtrAnonDecl{
.ty = ptr.ty,
.val = anon_decl,
}),
}),
.anon_decl => |anon_decl| ip.items.appendAssumeCapacity(
if (ptrsHaveSameAlignment(ip, ptr.ty, ptr_type, anon_decl.orig_ty)) .{
.tag = .ptr_anon_decl,
.data = try ip.addExtra(gpa, PtrAnonDecl{
.ty = ptr.ty,
.val = anon_decl.val,
}),
} else .{
.tag = .ptr_anon_decl_aligned,
.data = try ip.addExtra(gpa, PtrAnonDeclAligned{
.ty = ptr.ty,
.val = anon_decl.val,
.orig_ty = anon_decl.orig_ty,
}),
},
),
.comptime_field => |field_val| {
assert(field_val != .none);
ip.items.appendAssumeCapacity(.{
@ -7184,6 +7243,7 @@ fn dumpStatsFallible(ip: *const InternPool, arena: Allocator) anyerror!void {
.ptr_decl => @sizeOf(PtrDecl),
.ptr_mut_decl => @sizeOf(PtrMutDecl),
.ptr_anon_decl => @sizeOf(PtrAnonDecl),
.ptr_anon_decl_aligned => @sizeOf(PtrAnonDeclAligned),
.ptr_comptime_field => @sizeOf(PtrComptimeField),
.ptr_int => @sizeOf(PtrBase),
.ptr_eu_payload => @sizeOf(PtrBase),
@ -7314,6 +7374,7 @@ fn dumpAllFallible(ip: *const InternPool) anyerror!void {
.ptr_decl,
.ptr_mut_decl,
.ptr_anon_decl,
.ptr_anon_decl_aligned,
.ptr_comptime_field,
.ptr_int,
.ptr_eu_payload,
@ -7695,6 +7756,7 @@ pub fn typeOf(ip: *const InternPool, index: Index) Index {
inline .ptr_decl,
.ptr_mut_decl,
.ptr_anon_decl,
.ptr_anon_decl_aligned,
.ptr_comptime_field,
.ptr_int,
.ptr_eu_payload,
@ -7855,7 +7917,7 @@ pub fn getBackingAddrTag(ip: *const InternPool, val: Index) ?Key.Ptr.Addr.Tag {
switch (ip.items.items(.tag)[base]) {
.ptr_decl => return .decl,
.ptr_mut_decl => return .mut_decl,
.ptr_anon_decl => return .anon_decl,
.ptr_anon_decl, .ptr_anon_decl_aligned => return .anon_decl,
.ptr_comptime_field => return .comptime_field,
.ptr_int => return .int,
inline .ptr_eu_payload,
@ -8032,6 +8094,7 @@ pub fn zigTypeTagOrPoison(ip: *const InternPool, index: Index) error{GenericPois
.ptr_decl,
.ptr_mut_decl,
.ptr_anon_decl,
.ptr_anon_decl_aligned,
.ptr_comptime_field,
.ptr_int,
.ptr_eu_payload,
@ -8281,3 +8344,12 @@ pub fn addFieldName(
ip.extra.items[names_start + field_index] = @intFromEnum(name);
return null;
}
/// Used only by `get` for pointer values, and mainly intended to use `Tag.ptr_anon_decl`
/// encoding instead of `Tag.ptr_anon_decl_aligned` when possible.
fn ptrsHaveSameAlignment(ip: *InternPool, a_ty: Index, a_info: Key.PtrType, b_ty: Index) bool {
if (a_ty == b_ty) return true;
const b_info = ip.indexToKey(b_ty).ptr_type;
return a_info.flags.alignment == b_info.flags.alignment and
(a_info.child == b_info.child or a_info.flags.alignment != .none);
}

View File

@ -3657,9 +3657,13 @@ fn zirMakePtrConst(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
const elem_ty = ptr_info.child.toType();
if (try sema.resolveComptimeKnownAllocValue(block, alloc, null)) |val| {
var anon_decl = try block.startAnonDecl();
defer anon_decl.deinit();
const new_mut_ptr = try sema.analyzeDeclRef(try anon_decl.finish(elem_ty, val.toValue(), ptr_info.flags.alignment));
const new_mut_ptr = Air.internedToRef((try mod.intern(.{ .ptr = .{
.ty = alloc_ty.toIntern(),
.addr = .{ .anon_decl = .{
.val = val,
.orig_ty = alloc_ty.toIntern(),
} },
} })));
return sema.makePtrConst(block, new_mut_ptr);
}
@ -3668,10 +3672,18 @@ fn zirMakePtrConst(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
implicit_ct: {
const ptr_val = try sema.resolveMaybeUndefVal(alloc) orelse break :implicit_ct;
if (!ptr_val.isComptimeMutablePtr(mod)) {
// It could still be a constant pointer to a decl
const decl_index = ptr_val.pointerDecl(mod) orelse break :implicit_ct;
const decl_val = mod.declPtr(decl_index).val.toIntern();
if (mod.intern_pool.isRuntimeValue(decl_val)) break :implicit_ct;
// It could still be a constant pointer to a decl.
switch (mod.intern_pool.indexToKey(ptr_val.toIntern()).ptr.addr) {
.anon_decl => |anon_decl| {
if (mod.intern_pool.isVariable(anon_decl.val))
break :implicit_ct;
},
else => {
const decl_index = ptr_val.pointerDecl(mod) orelse break :implicit_ct;
const decl_val = mod.declPtr(decl_index).val.toIntern();
if (mod.intern_pool.isRuntimeValue(decl_val)) break :implicit_ct;
},
}
}
return sema.makePtrConst(block, alloc);
}
@ -3911,17 +3923,19 @@ fn finishResolveComptimeKnownAllocValue(sema: *Sema, result_val: InternPool.Inde
return result_val;
}
fn makePtrConst(sema: *Sema, block: *Block, alloc: Air.Inst.Ref) CompileError!Air.Inst.Ref {
const mod = sema.mod;
const alloc_ty = sema.typeOf(alloc);
var ptr_info = alloc_ty.ptrInfo(mod);
fn makePtrTyConst(sema: *Sema, ptr_ty: Type) CompileError!Type {
var ptr_info = ptr_ty.ptrInfo(sema.mod);
ptr_info.flags.is_const = true;
const const_ptr_ty = try sema.ptrType(ptr_info);
return sema.ptrType(ptr_info);
}
fn makePtrConst(sema: *Sema, block: *Block, alloc: Air.Inst.Ref) CompileError!Air.Inst.Ref {
const alloc_ty = sema.typeOf(alloc);
const const_ptr_ty = try sema.makePtrTyConst(alloc_ty);
// Detect if a comptime value simply needs to have its type changed.
if (try sema.resolveMaybeUndefVal(alloc)) |val| {
return Air.internedToRef((try mod.getCoerced(val, const_ptr_ty)).toIntern());
return Air.internedToRef((try sema.mod.getCoerced(val, const_ptr_ty)).toIntern());
}
return block.addBitCast(const_ptr_ty, alloc);
@ -4035,6 +4049,7 @@ fn zirResolveInferredAlloc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Com
defer tracy.end();
const mod = sema.mod;
const gpa = sema.gpa;
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src = inst_data.src();
const ty_src: LazySrcLoc = .{ .node_offset_var_decl_ty = inst_data.src_node };
@ -4100,11 +4115,14 @@ fn zirResolveInferredAlloc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Com
if (!ia1.is_const) {
try sema.validateVarType(block, ty_src, final_elem_ty, false);
} else if (try sema.resolveComptimeKnownAllocValue(block, ptr, final_ptr_ty)) |val| {
var anon_decl = try block.startAnonDecl();
defer anon_decl.deinit();
const new_decl_index = try anon_decl.finish(final_elem_ty, val.toValue(), ia1.alignment);
const new_mut_ptr = Air.refToInterned(try sema.analyzeDeclRef(new_decl_index)).?.toValue();
const new_const_ptr = (try mod.getCoerced(new_mut_ptr, final_ptr_ty)).toIntern();
const const_ptr_ty = (try sema.makePtrTyConst(final_ptr_ty)).toIntern();
const new_const_ptr = try mod.intern(.{ .ptr = .{
.ty = const_ptr_ty,
.addr = .{ .anon_decl = .{
.val = val,
.orig_ty = const_ptr_ty,
} },
} });
// Remap the ZIR oeprand to the resolved pointer value
sema.inst_map.putAssumeCapacity(Zir.refToIndex(inst_data.operand).?, Air.internedToRef(new_const_ptr));
@ -4127,7 +4145,6 @@ fn zirResolveInferredAlloc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Com
// Now we need to go back over all the store instructions, and do the logic as if
// the new result ptr type was available.
const gpa = sema.gpa;
for (ia2.prongs.items) |placeholder_inst| {
var replacement_block = block.makeSubBlock();
@ -5539,7 +5556,10 @@ fn addStrLitNoAlias(sema: *Sema, bytes: []const u8) CompileError!Air.Inst.Ref {
});
return Air.internedToRef((try mod.intern(.{ .ptr = .{
.ty = ptr_ty.toIntern(),
.addr = .{ .anon_decl = val },
.addr = .{ .anon_decl = .{
.val = val,
.orig_ty = ptr_ty.toIntern(),
} },
} })));
}
@ -30545,7 +30565,8 @@ fn beginComptimePtrLoad(
.ty_without_well_defined_layout = if (!layout_defined) decl.ty else null,
};
},
.anon_decl => |decl_val| blk: {
.anon_decl => |anon_decl| blk: {
const decl_val = anon_decl.val;
if (decl_val.toValue().getVariable(mod) != null) return error.RuntimeLoad;
const decl_ty = ip.typeOf(decl_val).toType();
const decl_tv: TypedValue = .{ .ty = decl_ty, .val = decl_val.toValue() };
@ -36649,6 +36670,7 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value {
.simple_value,
.ptr_decl,
.ptr_anon_decl,
.ptr_anon_decl_aligned,
.ptr_mut_decl,
.ptr_comptime_field,
.ptr_int,

View File

@ -321,7 +321,8 @@ pub fn print(
.val = decl.val,
}, writer, level - 1, mod);
},
.anon_decl => |decl_val| {
.anon_decl => |anon_decl| {
const decl_val = anon_decl.val;
if (level == 0) return writer.print("(anon decl '{d}')", .{
@intFromEnum(decl_val),
});

View File

@ -997,8 +997,9 @@ pub const Inst = struct {
/// is the allocation that needs to have its type inferred.
/// Uses the `un_node` field. The AST node is the var decl.
resolve_inferred_alloc,
/// Turns a pointer coming from an `alloc`, `alloc_inferred`, `alloc_inferred_comptime` or
/// `Extended.alloc` into a constant version of the same pointer.
/// Turns a pointer coming from an `alloc` or `Extended.alloc` into a constant
/// version of the same pointer. For inferred allocations this is instead implicitly
/// handled by the `resolve_inferred_alloc` instruction.
/// Uses the `un_node` union field.
make_ptr_const,

View File

@ -3139,16 +3139,22 @@ fn lowerParentPtrDecl(func: *CodeGen, ptr_val: Value, decl_index: Module.Decl.In
return func.lowerDeclRefValue(.{ .ty = ptr_ty, .val = ptr_val }, decl_index, offset);
}
fn lowerAnonDeclRef(func: *CodeGen, anon_decl: InternPool.Index, offset: u32) InnerError!WValue {
fn lowerAnonDeclRef(
func: *CodeGen,
anon_decl: InternPool.Key.Ptr.Addr.AnonDecl,
offset: u32,
) InnerError!WValue {
const mod = func.bin_file.base.options.module.?;
const ty = mod.intern_pool.typeOf(anon_decl).toType();
const decl_val = anon_decl.val;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const is_fn_body = ty.zigTypeTag(mod) == .Fn;
if (!is_fn_body and !ty.hasRuntimeBitsIgnoreComptime(mod)) {
return WValue{ .imm32 = 0xaaaaaaaa };
}
const res = try func.bin_file.lowerAnonDecl(anon_decl, func.decl.srcLoc(mod));
const decl_align = mod.intern_pool.indexToKey(anon_decl.orig_ty).ptr_type.flags.alignment;
const res = try func.bin_file.lowerAnonDecl(decl_val, decl_align, func.decl.srcLoc(mod));
switch (res) {
.ok => {},
.fail => |em| {
@ -3156,7 +3162,7 @@ fn lowerAnonDeclRef(func: *CodeGen, anon_decl: InternPool.Index, offset: u32) In
return error.CodegenFail;
},
}
const target_atom_index = func.bin_file.anon_decls.get(anon_decl).?;
const target_atom_index = func.bin_file.anon_decls.get(decl_val).?;
const target_sym_index = func.bin_file.getAtom(target_atom_index).getSymbolIndex().?;
if (is_fn_body) {
return WValue{ .function_index = target_sym_index };

View File

@ -713,7 +713,7 @@ const RelocInfo = struct {
fn lowerAnonDeclRef(
bin_file: *link.File,
src_loc: Module.SrcLoc,
decl_val: InternPool.Index,
anon_decl: InternPool.Key.Ptr.Addr.AnonDecl,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
reloc_info: RelocInfo,
@ -723,6 +723,7 @@ fn lowerAnonDeclRef(
const mod = bin_file.options.module.?;
const ptr_width_bytes = @divExact(target.ptrBitWidth(), 8);
const decl_val = anon_decl.val;
const decl_ty = mod.intern_pool.typeOf(decl_val).toType();
const is_fn_body = decl_ty.zigTypeTag(mod) == .Fn;
if (!is_fn_body and !decl_ty.hasRuntimeBits(mod)) {
@ -730,7 +731,8 @@ fn lowerAnonDeclRef(
return Result.ok;
}
const res = try bin_file.lowerAnonDecl(decl_val, src_loc);
const decl_align = mod.intern_pool.indexToKey(anon_decl.orig_ty).ptr_type.flags.alignment;
const res = try bin_file.lowerAnonDecl(decl_val, decl_align, src_loc);
switch (res) {
.ok => {},
.fail => |em| return .{ .fail = em },

View File

@ -531,6 +531,7 @@ pub const DeclGen = struct {
/// Keeps track of anonymous decls that need to be rendered before this
/// (named) Decl in the output C code.
anon_decl_deps: std.AutoArrayHashMapUnmanaged(InternPool.Index, C.DeclBlock),
aligned_anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment),
fn fail(dg: *DeclGen, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } {
@setCold(true);
@ -548,11 +549,12 @@ pub const DeclGen = struct {
writer: anytype,
ty: Type,
ptr_val: Value,
decl_val: InternPool.Index,
anon_decl: InternPool.Key.Ptr.Addr.AnonDecl,
location: ValueRenderLocation,
) error{ OutOfMemory, AnalysisFail }!void {
const mod = dg.module;
const ip = &mod.intern_pool;
const decl_val = anon_decl.val;
const decl_ty = ip.typeOf(decl_val).toType();
// Render an undefined pointer if we have a pointer to a zero-bit or comptime type.
@ -592,8 +594,23 @@ pub const DeclGen = struct {
// Indicate that the anon decl should be rendered to the output so that
// our reference above is not undefined.
const ptr_type = ip.indexToKey(anon_decl.orig_ty).ptr_type;
const gop = try dg.anon_decl_deps.getOrPut(dg.gpa, decl_val);
if (!gop.found_existing) gop.value_ptr.* = .{};
// Only insert an alignment entry if the alignment is greater than ABI
// alignment. If there is already an entry, keep the greater alignment.
const explicit_alignment = ptr_type.flags.alignment;
if (explicit_alignment != .none) {
const abi_alignment = ptr_type.child.toType().abiAlignment(mod);
if (explicit_alignment.compareStrict(.gt, abi_alignment)) {
const aligned_gop = try dg.aligned_anon_decls.getOrPut(dg.gpa, decl_val);
aligned_gop.value_ptr.* = if (aligned_gop.found_existing)
aligned_gop.value_ptr.maxStrict(explicit_alignment)
else
explicit_alignment;
}
}
}
fn renderDeclValue(
@ -651,7 +668,7 @@ pub const DeclGen = struct {
switch (ptr.addr) {
.decl => |d| try dg.renderDeclValue(writer, ptr_ty, ptr_val.toValue(), d, location),
.mut_decl => |md| try dg.renderDeclValue(writer, ptr_ty, ptr_val.toValue(), md.decl, location),
.anon_decl => |decl_val| try dg.renderAnonDeclValue(writer, ptr_ty, ptr_val.toValue(), decl_val, location),
.anon_decl => |anon_decl| try dg.renderAnonDeclValue(writer, ptr_ty, ptr_val.toValue(), anon_decl, location),
.int => |int| {
try writer.writeByte('(');
try dg.renderCType(writer, ptr_cty);

View File

@ -3049,10 +3049,19 @@ pub const Object = struct {
o: *Object,
decl_val: InternPool.Index,
llvm_addr_space: Builder.AddrSpace,
alignment: InternPool.Alignment,
) Error!Builder.Variable.Index {
assert(alignment != .none);
// TODO: Add address space to the anon_decl_map
const gop = try o.anon_decl_map.getOrPut(o.gpa, decl_val);
if (gop.found_existing) return gop.value_ptr.ptr(&o.builder).kind.variable;
if (gop.found_existing) {
// Keep the greater of the two alignments.
const variable_index = gop.value_ptr.ptr(&o.builder).kind.variable;
const old_alignment = InternPool.Alignment.fromLlvm(variable_index.getAlignment(&o.builder));
const max_alignment = old_alignment.maxStrict(alignment);
variable_index.setAlignment(max_alignment.toLlvm(), &o.builder);
return variable_index;
}
errdefer assert(o.anon_decl_map.remove(decl_val));
const mod = o.module;
@ -3068,6 +3077,7 @@ pub const Object = struct {
try variable_index.setInitializer(try o.lowerValue(decl_val), &o.builder);
variable_index.setLinkage(.internal, &o.builder);
variable_index.setUnnamedAddr(.unnamed_addr, &o.builder);
variable_index.setAlignment(alignment.toLlvm(), &o.builder);
return variable_index;
}
@ -4250,13 +4260,6 @@ pub const Object = struct {
return o.builder.bigIntConst(try o.builder.intType(ty.intInfo(mod).bits), bigint);
}
fn lowerParentPtrAnonDecl(o: *Object, decl_val: InternPool.Index) Error!Builder.Constant {
const mod = o.module;
const decl_ty = mod.intern_pool.typeOf(decl_val).toType();
const ptr_ty = try mod.singleMutPtrType(decl_ty);
return o.lowerAnonDeclRef(ptr_ty, decl_val);
}
fn lowerParentPtrDecl(o: *Object, decl_index: Module.Decl.Index) Allocator.Error!Builder.Constant {
const mod = o.module;
const decl = mod.declPtr(decl_index);
@ -4272,7 +4275,7 @@ pub const Object = struct {
return switch (ptr.addr) {
.decl => |decl| try o.lowerParentPtrDecl(decl),
.mut_decl => |mut_decl| try o.lowerParentPtrDecl(mut_decl.decl),
.anon_decl => |anon_decl| try o.lowerParentPtrAnonDecl(anon_decl),
.anon_decl => |ad| try o.lowerAnonDeclRef(ad.orig_ty.toType(), ad),
.int => |int| try o.lowerIntAsPtr(int),
.eu_payload => |eu_ptr| {
const parent_ptr = try o.lowerParentPtr(eu_ptr.toValue());
@ -4391,10 +4394,11 @@ pub const Object = struct {
fn lowerAnonDeclRef(
o: *Object,
ptr_ty: Type,
decl_val: InternPool.Index,
anon_decl: InternPool.Key.Ptr.Addr.AnonDecl,
) Error!Builder.Constant {
const mod = o.module;
const ip = &mod.intern_pool;
const decl_val = anon_decl.val;
const decl_ty = ip.typeOf(decl_val).toType();
const target = mod.getTarget();
@ -4413,9 +4417,10 @@ pub const Object = struct {
if (is_fn_body)
@panic("TODO");
const addr_space = target_util.defaultAddressSpace(target, .global_constant);
const llvm_addr_space = toLlvmAddressSpace(addr_space, target);
const llvm_global = (try o.resolveGlobalAnonDecl(decl_val, llvm_addr_space)).ptrConst(&o.builder).global;
const orig_ty = anon_decl.orig_ty.toType();
const llvm_addr_space = toLlvmAddressSpace(orig_ty.ptrAddressSpace(mod), target);
const alignment = orig_ty.ptrAlignment(mod);
const llvm_global = (try o.resolveGlobalAnonDecl(decl_val, llvm_addr_space, alignment)).ptrConst(&o.builder).global;
const llvm_val = try o.builder.convConst(
.unneeded,

View File

@ -2477,6 +2477,12 @@ pub const Variable = struct {
self.ptr(builder).alignment = alignment;
}
pub fn getAlignment(self: Index, builder: *Builder) Alignment {
if (builder.useLibLlvm())
return Alignment.fromByteUnits(self.toLlvm(builder).getAlignment());
return self.ptr(builder).alignment;
}
pub fn toLlvm(self: Index, builder: *const Builder) *llvm.Value {
return self.ptrConst(builder).global.toLlvm(builder);
}

View File

@ -273,6 +273,9 @@ pub const Value = opaque {
pub const setAlignment = LLVMSetAlignment;
extern fn LLVMSetAlignment(V: *Value, Bytes: c_uint) void;
pub const getAlignment = LLVMGetAlignment;
extern fn LLVMGetAlignment(V: *Value) c_uint;
pub const setFunctionCallConv = LLVMSetFunctionCallConv;
extern fn LLVMSetFunctionCallConv(Fn: *Value, CC: CallConv) void;

View File

@ -959,12 +959,17 @@ const DeclGen = struct {
}
}
fn constantAnonDeclRef(self: *DeclGen, ty: Type, decl_val: InternPool.Index) !IdRef {
fn constantAnonDeclRef(
self: *DeclGen,
ty: Type,
anon_decl: InternPool.Key.Ptr.Addr.AnonDecl,
) !IdRef {
// TODO: Merge this function with constantDeclRef.
const mod = self.module;
const ip = &mod.intern_pool;
const ty_ref = try self.resolveType(ty, .direct);
const decl_val = anon_decl.val;
const decl_ty = ip.typeOf(decl_val).toType();
if (decl_val.toValue().getFunction(mod)) |func| {

View File

@ -940,15 +940,15 @@ pub const File = struct {
pub const LowerResult = @import("codegen.zig").Result;
pub fn lowerAnonDecl(base: *File, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !LowerResult {
pub fn lowerAnonDecl(base: *File, decl_val: InternPool.Index, decl_align: InternPool.Alignment, src_loc: Module.SrcLoc) !LowerResult {
if (build_options.only_c) unreachable;
switch (base.tag) {
.coff => return @fieldParentPtr(Coff, "base", base).lowerAnonDecl(decl_val, src_loc),
.elf => return @fieldParentPtr(Elf, "base", base).lowerAnonDecl(decl_val, src_loc),
.macho => return @fieldParentPtr(MachO, "base", base).lowerAnonDecl(decl_val, src_loc),
.coff => return @fieldParentPtr(Coff, "base", base).lowerAnonDecl(decl_val, decl_align, src_loc),
.elf => return @fieldParentPtr(Elf, "base", base).lowerAnonDecl(decl_val, decl_align, src_loc),
.macho => return @fieldParentPtr(MachO, "base", base).lowerAnonDecl(decl_val, decl_align, src_loc),
.plan9 => return @fieldParentPtr(Plan9, "base", base).lowerAnonDecl(decl_val, src_loc),
.c => unreachable,
.wasm => return @fieldParentPtr(Wasm, "base", base).lowerAnonDecl(decl_val, src_loc),
.wasm => return @fieldParentPtr(Wasm, "base", base).lowerAnonDecl(decl_val, decl_align, src_loc),
.spirv => unreachable,
.nvptx => unreachable,
}

View File

@ -7,6 +7,7 @@ const fs = std.fs;
const C = @This();
const Module = @import("../Module.zig");
const InternPool = @import("../InternPool.zig");
const Alignment = InternPool.Alignment;
const Compilation = @import("../Compilation.zig");
const codegen = @import("../codegen/c.zig");
const link = @import("../link.zig");
@ -30,6 +31,10 @@ string_bytes: std.ArrayListUnmanaged(u8) = .{},
/// Tracks all the anonymous decls that are used by all the decls so they can
/// be rendered during flush().
anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, DeclBlock) = .{},
/// Sparse set of anon decls that are overaligned. Underaligned anon decls are
/// lowered the same as ABI-aligned anon decls. The keys here are a subset of
/// the keys of `anon_decls`.
aligned_anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment) = .{},
/// Optimization, `updateDecl` reuses this buffer rather than creating a new
/// one with every call.
@ -125,6 +130,7 @@ pub fn deinit(self: *C) void {
db.deinit(gpa);
}
self.anon_decls.deinit(gpa);
self.aligned_anon_decls.deinit(gpa);
self.string_bytes.deinit(gpa);
self.fwd_decl_buf.deinit(gpa);
@ -179,6 +185,7 @@ pub fn updateFunc(
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = ctypes.*,
.anon_decl_deps = self.anon_decls,
.aligned_anon_decls = self.aligned_anon_decls,
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
@ -189,6 +196,7 @@ pub fn updateFunc(
function.object.indent_writer = .{ .underlying_writer = function.object.code.writer() };
defer {
self.anon_decls = function.object.dg.anon_decl_deps;
self.aligned_anon_decls = function.object.dg.aligned_anon_decls;
fwd_decl.* = function.object.dg.fwd_decl.moveToUnmanaged();
code.* = function.object.code.moveToUnmanaged();
function.deinit();
@ -232,6 +240,7 @@ fn updateAnonDecl(self: *C, module: *Module, i: usize) !void {
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = .{},
.anon_decl_deps = self.anon_decls,
.aligned_anon_decls = self.aligned_anon_decls,
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
@ -240,6 +249,7 @@ fn updateAnonDecl(self: *C, module: *Module, i: usize) !void {
defer {
self.anon_decls = object.dg.anon_decl_deps;
self.aligned_anon_decls = object.dg.aligned_anon_decls;
object.dg.ctypes.deinit(object.dg.gpa);
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
code.* = object.code.moveToUnmanaged();
@ -250,7 +260,8 @@ fn updateAnonDecl(self: *C, module: *Module, i: usize) !void {
.val = anon_decl.toValue(),
};
const c_value: codegen.CValue = .{ .constant = anon_decl };
codegen.genDeclValue(&object, tv, false, c_value, .none, .none) catch |err| switch (err) {
const alignment: Alignment = self.aligned_anon_decls.get(anon_decl) orelse .none;
codegen.genDeclValue(&object, tv, false, c_value, alignment, .none) catch |err| switch (err) {
error.AnalysisFail => {
@panic("TODO: C backend AnalysisFail on anonymous decl");
//try module.failed_decls.put(gpa, decl_index, object.dg.error_msg.?);
@ -296,6 +307,7 @@ pub fn updateDecl(self: *C, module: *Module, decl_index: Module.Decl.Index) !voi
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = ctypes.*,
.anon_decl_deps = self.anon_decls,
.aligned_anon_decls = self.aligned_anon_decls,
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
@ -303,6 +315,7 @@ pub fn updateDecl(self: *C, module: *Module, decl_index: Module.Decl.Index) !voi
object.indent_writer = .{ .underlying_writer = object.code.writer() };
defer {
self.anon_decls = object.dg.anon_decl_deps;
self.aligned_anon_decls = object.dg.aligned_anon_decls;
object.dg.ctypes.deinit(object.dg.gpa);
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
code.* = object.code.moveToUnmanaged();
@ -602,6 +615,7 @@ fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = ctypes.*,
.anon_decl_deps = self.anon_decls,
.aligned_anon_decls = self.aligned_anon_decls,
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
@ -609,6 +623,7 @@ fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
object.indent_writer = .{ .underlying_writer = object.code.writer() };
defer {
self.anon_decls = object.dg.anon_decl_deps;
self.aligned_anon_decls = object.dg.aligned_anon_decls;
object.dg.ctypes.deinit(gpa);
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
code.* = object.code.moveToUnmanaged();
@ -642,6 +657,7 @@ fn flushLazyFn(
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = ctypes.*,
.anon_decl_deps = .{},
.aligned_anon_decls = .{},
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
@ -651,6 +667,7 @@ fn flushLazyFn(
// If this assert trips just handle the anon_decl_deps the same as
// `updateFunc()` does.
assert(object.dg.anon_decl_deps.count() == 0);
assert(object.dg.aligned_anon_decls.count() == 0);
object.dg.ctypes.deinit(gpa);
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
code.* = object.code.moveToUnmanaged();

View File

@ -1091,7 +1091,7 @@ pub fn lowerUnnamedConst(self: *Coff, tv: TypedValue, decl_index: Module.Decl.In
const index = unnamed_consts.items.len;
const sym_name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
defer gpa.free(sym_name);
const atom_index = switch (try self.lowerConst(sym_name, tv, self.rdata_section_index.?, decl.srcLoc(mod))) {
const atom_index = switch (try self.lowerConst(sym_name, tv, tv.ty.abiAlignment(mod), self.rdata_section_index.?, decl.srcLoc(mod))) {
.ok => |atom_index| atom_index,
.fail => |em| {
decl.analysis = .codegen_failure;
@ -1109,13 +1109,12 @@ const LowerConstResult = union(enum) {
fail: *Module.ErrorMsg,
};
fn lowerConst(self: *Coff, name: []const u8, tv: TypedValue, sect_id: u16, src_loc: Module.SrcLoc) !LowerConstResult {
fn lowerConst(self: *Coff, name: []const u8, tv: TypedValue, required_alignment: InternPool.Alignment, sect_id: u16, src_loc: Module.SrcLoc) !LowerConstResult {
const gpa = self.base.allocator;
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const mod = self.base.options.module.?;
const atom_index = try self.createAtom();
const sym = self.getAtom(atom_index).getSymbolPtr(self);
try self.setSymbolName(sym, name);
@ -1129,10 +1128,13 @@ fn lowerConst(self: *Coff, name: []const u8, tv: TypedValue, sect_id: u16, src_l
.fail => |em| return .{ .fail = em },
};
const required_alignment: u32 = @intCast(tv.ty.abiAlignment(mod).toByteUnits(0));
const atom = self.getAtomPtr(atom_index);
atom.size = @as(u32, @intCast(code.len));
atom.getSymbolPtr(self).value = try self.allocateAtom(atom_index, atom.size, required_alignment);
atom.getSymbolPtr(self).value = try self.allocateAtom(
atom_index,
atom.size,
@intCast(required_alignment.toByteUnitsOptional().?),
);
errdefer self.freeAtom(atom_index);
log.debug("allocated atom for {s} at 0x{x}", .{ name, atom.getSymbol(self).value });
@ -1736,7 +1738,7 @@ pub fn getDeclVAddr(self: *Coff, decl_index: Module.Decl.Index, reloc_info: link
return 0;
}
pub fn lowerAnonDecl(self: *Coff, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
pub fn lowerAnonDecl(self: *Coff, decl_val: InternPool.Index, decl_align: InternPool.Alignment, src_loc: Module.SrcLoc) !codegen.Result {
// This is basically the same as lowerUnnamedConst.
// example:
// const ty = mod.intern_pool.typeOf(decl_val).toType();
@ -1747,15 +1749,21 @@ pub fn lowerAnonDecl(self: *Coff, decl_val: InternPool.Index, src_loc: Module.Sr
// to put it in some location.
// ...
const gpa = self.base.allocator;
const mod = self.base.options.module.?;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const gop = try self.anon_decls.getOrPut(gpa, decl_val);
if (!gop.found_existing) {
const mod = self.base.options.module.?;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const required_alignment = switch (decl_align) {
.none => ty.abiAlignment(mod),
else => decl_align,
};
if (!gop.found_existing or
!required_alignment.check(self.getAtom(gop.value_ptr.*).getSymbol(self).value))
{
const val = decl_val.toValue();
const tv = TypedValue{ .ty = ty, .val = val };
const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(decl_val)});
defer gpa.free(name);
const res = self.lowerConst(name, tv, self.rdata_section_index.?, src_loc) catch |err| switch (err) {
const res = self.lowerConst(name, tv, required_alignment, self.rdata_section_index.?, src_loc) catch |err| switch (err) {
else => {
// TODO improve error message
const em = try Module.ErrorMsg.create(gpa, src_loc, "lowerAnonDecl failed with error: {s}", .{

View File

@ -87,15 +87,6 @@ pub const DeclState = struct {
self.exprloc_relocs.deinit(self.gpa);
}
fn addExprlocReloc(self: *DeclState, target: u32, offset: u32, is_ptr: bool) !void {
log.debug("{x}: target sym %{d}, via GOT {}", .{ offset, target, is_ptr });
try self.exprloc_relocs.append(self.gpa, .{
.type = if (is_ptr) .got_load else .direct_load,
.target = target,
.offset = offset,
});
}
/// Adds local type relocation of the form: @offset => @this + addend
/// @this signifies the offset within the .debug_abbrev section of the containing atom.
fn addTypeRelocLocal(self: *DeclState, atom_index: Atom.Index, offset: u32, addend: u32) !void {
@ -807,11 +798,25 @@ pub const DeclState = struct {
try dbg_info.append(DW.OP.deref);
}
switch (loc) {
.linker_load => |load_struct| try self.addExprlocReloc(
load_struct.sym_index,
offset,
is_ptr,
),
.linker_load => |load_struct| switch (load_struct.type) {
.direct => {
log.debug("{x}: target sym %{d}", .{ offset, load_struct.sym_index });
try self.exprloc_relocs.append(self.gpa, .{
.type = .direct_load,
.target = load_struct.sym_index,
.offset = offset,
});
},
.got => {
log.debug("{x}: target sym %{d} via GOT", .{ offset, load_struct.sym_index });
try self.exprloc_relocs.append(self.gpa, .{
.type = .got_load,
.target = load_struct.sym_index,
.offset = offset,
});
},
else => {}, // TODO
},
else => {},
}
},

View File

@ -473,7 +473,7 @@ pub fn getDeclVAddr(self: *Elf, decl_index: Module.Decl.Index, reloc_info: link.
return vaddr;
}
pub fn lowerAnonDecl(self: *Elf, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
pub fn lowerAnonDecl(self: *Elf, decl_val: InternPool.Index, decl_align: InternPool.Alignment, src_loc: Module.SrcLoc) !codegen.Result {
// This is basically the same as lowerUnnamedConst.
// example:
// const ty = mod.intern_pool.typeOf(decl_val).toType();
@ -484,15 +484,21 @@ pub fn lowerAnonDecl(self: *Elf, decl_val: InternPool.Index, src_loc: Module.Src
// to put it in some location.
// ...
const gpa = self.base.allocator;
const mod = self.base.options.module.?;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const gop = try self.anon_decls.getOrPut(gpa, decl_val);
if (!gop.found_existing) {
const mod = self.base.options.module.?;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const required_alignment = switch (decl_align) {
.none => ty.abiAlignment(mod),
else => decl_align,
};
if (!gop.found_existing or
required_alignment.order(self.symbol(gop.value_ptr.*).atom(self).?.alignment).compare(.gt))
{
const val = decl_val.toValue();
const tv = TypedValue{ .ty = ty, .val = val };
const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(decl_val)});
defer gpa.free(name);
const res = self.lowerConst(name, tv, self.zig_rodata_section_index.?, src_loc) catch |err| switch (err) {
const res = self.lowerConst(name, tv, required_alignment, self.zig_rodata_section_index.?, src_loc) catch |err| switch (err) {
else => {
// TODO improve error message
const em = try Module.ErrorMsg.create(gpa, src_loc, "lowerAnonDecl failed with error: {s}", .{
@ -3479,7 +3485,7 @@ pub fn lowerUnnamedConst(self: *Elf, typed_value: TypedValue, decl_index: Module
const index = unnamed_consts.items.len;
const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
defer gpa.free(name);
const sym_index = switch (try self.lowerConst(name, typed_value, self.zig_rodata_section_index.?, decl.srcLoc(mod))) {
const sym_index = switch (try self.lowerConst(name, typed_value, typed_value.ty.abiAlignment(mod), self.zig_rodata_section_index.?, decl.srcLoc(mod))) {
.ok => |sym_index| sym_index,
.fail => |em| {
decl.analysis = .codegen_failure;
@ -3502,6 +3508,7 @@ fn lowerConst(
self: *Elf,
name: []const u8,
tv: TypedValue,
required_alignment: InternPool.Alignment,
output_section_index: u16,
src_loc: Module.SrcLoc,
) !LowerConstResult {
@ -3510,7 +3517,6 @@ fn lowerConst(
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const mod = self.base.options.module.?;
const zig_module = self.file(self.zig_module_index.?).?.zig_module;
const sym_index = try zig_module.addAtom(self);
@ -3524,7 +3530,6 @@ fn lowerConst(
.fail => |em| return .{ .fail = em },
};
const required_alignment = tv.ty.abiAlignment(mod);
const phdr_index = self.phdr_to_shdr_table.get(output_section_index).?;
const local_sym = self.symbol(sym_index);
const name_str_index = try self.strtab.insert(gpa, name);

View File

@ -2196,7 +2196,7 @@ pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl_index: Modu
const index = unnamed_consts.items.len;
const name = try std.fmt.allocPrint(gpa, "___unnamed_{s}_{d}", .{ decl_name, index });
defer gpa.free(name);
const atom_index = switch (try self.lowerConst(name, typed_value, self.data_const_section_index.?, decl.srcLoc(mod))) {
const atom_index = switch (try self.lowerConst(name, typed_value, typed_value.ty.abiAlignment(mod), self.data_const_section_index.?, decl.srcLoc(mod))) {
.ok => |atom_index| atom_index,
.fail => |em| {
decl.analysis = .codegen_failure;
@ -2219,6 +2219,7 @@ fn lowerConst(
self: *MachO,
name: []const u8,
tv: TypedValue,
required_alignment: InternPool.Alignment,
sect_id: u8,
src_loc: Module.SrcLoc,
) !LowerConstResult {
@ -2227,8 +2228,6 @@ fn lowerConst(
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const mod = self.base.options.module.?;
log.debug("allocating symbol indexes for {s}", .{name});
const sym_index = try self.allocateSymbol();
@ -2243,7 +2242,6 @@ fn lowerConst(
.fail => |em| return .{ .fail = em },
};
const required_alignment = tv.ty.abiAlignment(mod);
const atom = self.getAtomPtr(atom_index);
atom.size = code.len;
// TODO: work out logic for disambiguating functions from function pointers
@ -2868,7 +2866,7 @@ pub fn getDeclVAddr(self: *MachO, decl_index: Module.Decl.Index, reloc_info: Fil
return 0;
}
pub fn lowerAnonDecl(self: *MachO, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
pub fn lowerAnonDecl(self: *MachO, decl_val: InternPool.Index, decl_align: InternPool.Alignment, src_loc: Module.SrcLoc) !codegen.Result {
// This is basically the same as lowerUnnamedConst.
// example:
// const ty = mod.intern_pool.typeOf(decl_val).toType();
@ -2879,15 +2877,21 @@ pub fn lowerAnonDecl(self: *MachO, decl_val: InternPool.Index, src_loc: Module.S
// to put it in some location.
// ...
const gpa = self.base.allocator;
const mod = self.base.options.module.?;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const gop = try self.anon_decls.getOrPut(gpa, decl_val);
if (!gop.found_existing) {
const mod = self.base.options.module.?;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const required_alignment = switch (decl_align) {
.none => ty.abiAlignment(mod),
else => decl_align,
};
if (!gop.found_existing or
!required_alignment.check(self.getAtom(gop.value_ptr.*).getSymbol(self).n_value))
{
const val = decl_val.toValue();
const tv = TypedValue{ .ty = ty, .val = val };
const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(decl_val)});
defer gpa.free(name);
const res = self.lowerConst(name, tv, self.data_const_section_index.?, src_loc) catch |err| switch (err) {
const res = self.lowerConst(name, tv, required_alignment, self.data_const_section_index.?, src_loc) catch |err| switch (err) {
else => {
// TODO improve error message
const em = try Module.ErrorMsg.create(gpa, src_loc, "lowerAnonDecl failed with error: {s}", .{

View File

@ -1702,25 +1702,30 @@ pub fn getDeclVAddr(
return target_symbol_index;
}
pub fn lowerAnonDecl(wasm: *Wasm, decl_val: InternPool.Index, src_loc: Module.SrcLoc) !codegen.Result {
pub fn lowerAnonDecl(wasm: *Wasm, decl_val: InternPool.Index, decl_align: Alignment, src_loc: Module.SrcLoc) !codegen.Result {
const gop = try wasm.anon_decls.getOrPut(wasm.base.allocator, decl_val);
if (gop.found_existing) {
return .ok;
if (!gop.found_existing) {
const mod = wasm.base.options.module.?;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const tv: TypedValue = .{ .ty = ty, .val = decl_val.toValue() };
const name = try std.fmt.allocPrintZ(wasm.base.allocator, "__anon_{d}", .{@intFromEnum(decl_val)});
defer wasm.base.allocator.free(name);
switch (try wasm.lowerConst(name, tv, src_loc)) {
.ok => |atom_index| gop.value_ptr.* = atom_index,
.fail => |em| return .{ .fail = em },
}
}
const mod = wasm.base.options.module.?;
const ty = mod.intern_pool.typeOf(decl_val).toType();
const tv: TypedValue = .{ .ty = ty, .val = decl_val.toValue() };
const name = try std.fmt.allocPrintZ(wasm.base.allocator, "__anon_{d}", .{@intFromEnum(decl_val)});
defer wasm.base.allocator.free(name);
switch (try wasm.lowerConst(name, tv, src_loc)) {
.ok => |atom_index| {
gop.value_ptr.* = atom_index;
return .ok;
const atom = wasm.getAtomPtr(gop.value_ptr.*);
atom.alignment = switch (atom.alignment) {
.none => decl_align,
else => switch (decl_align) {
.none => atom.alignment,
else => atom.alignment.maxStrict(decl_align),
},
.fail => |em| return .{ .fail = em },
}
};
return .ok;
}
pub fn getAnonDeclVAddr(wasm: *Wasm, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {

View File

@ -1571,7 +1571,7 @@ pub const Value = struct {
.none => switch (ip.indexToKey(switch (ptr.addr) {
.decl => |decl| mod.declPtr(decl).ty.toIntern(),
.mut_decl => |mut_decl| mod.declPtr(mut_decl.decl).ty.toIntern(),
.anon_decl => |anon_decl| ip.typeOf(anon_decl),
.anon_decl => |anon_decl| ip.typeOf(anon_decl.val),
.comptime_field => |comptime_field| ip.typeOf(comptime_field),
else => unreachable,
})) {
@ -1604,7 +1604,7 @@ pub const Value = struct {
})).toValue(),
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| mod.declPtr(decl).val.maybeElemValue(mod, index),
.anon_decl => |anon_decl| anon_decl.toValue().maybeElemValue(mod, index),
.anon_decl => |anon_decl| anon_decl.val.toValue().maybeElemValue(mod, index),
.mut_decl => |mut_decl| (try mod.declPtr(mut_decl.decl).internValue(mod))
.toValue().maybeElemValue(mod, index),
.int, .eu_payload => null,

View File

@ -37,7 +37,6 @@ test {
_ = @import("behavior/bugs/1500.zig");
_ = @import("behavior/bugs/1607.zig");
_ = @import("behavior/bugs/1735.zig");
_ = @import("behavior/bugs/1741.zig");
_ = @import("behavior/bugs/1851.zig");
_ = @import("behavior/bugs/1914.zig");
_ = @import("behavior/bugs/2006.zig");

View File

@ -15,6 +15,15 @@ test "global variable alignment" {
}
}
test "large alignment of local constant" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // flaky
const x: f32 align(128) = 12.34;
try std.testing.expect(@intFromPtr(&x) % 128 == 0);
}
test "slicing array of length 1 can not assume runtime index is always zero" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO

View File

@ -1,11 +0,0 @@
const std = @import("std");
const builtin = @import("builtin");
test "fixed" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // flaky
const x: f32 align(128) = 12.34;
try std.testing.expect(@intFromPtr(&x) % 128 == 0);
}