x86_64: implement error name

This commit is contained in:
Jacob Young 2023-04-01 03:08:55 -04:00
parent ccefa9dbf5
commit 677427bc3a
11 changed files with 529 additions and 42 deletions

View File

@ -1923,7 +1923,7 @@ fn genSetStackTruncatedOverflowCompare(
); );
try self.genSetStack(ty, stack_offset, .{ .register = scratch_reg }, .{}); try self.genSetStack(ty, stack_offset, .{ .register = scratch_reg }, .{});
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{ try self.genSetStack(Type.u1, stack_offset - overflow_bit_offset, .{
.register = overflow_reg.to8(), .register = overflow_reg.to8(),
}, .{}); }, .{});
} }
@ -5280,8 +5280,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
if (self.bin_file.cast(link.File.Elf)) |elf_file| { if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl); const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl);
const atom = elf_file.getAtom(atom_index); const got_addr = elf_file.getAtom(atom_index).getOffsetTableAddress(elf_file);
const got_addr = atom.getOffsetTableAddress(elf_file);
try self.asmMemory(.call, Memory.sib(.qword, .{ try self.asmMemory(.call, Memory.sib(.qword, .{
.base = .ds, .base = .ds,
.disp = @intCast(i32, got_addr), .disp = @intCast(i32, got_addr),
@ -5289,22 +5288,18 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| { } else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const atom_index = try coff_file.getOrCreateAtomForDecl(func.owner_decl); const atom_index = try coff_file.getOrCreateAtomForDecl(func.owner_decl);
const sym_index = coff_file.getAtom(atom_index).getSymbolIndex().?; const sym_index = coff_file.getAtom(atom_index).getSymbolIndex().?;
try self.genSetReg(Type.initTag(.usize), .rax, .{ try self.genSetReg(Type.usize, .rax, .{ .linker_load = .{
.linker_load = .{
.type = .got, .type = .got,
.sym_index = sym_index, .sym_index = sym_index,
}, } });
});
try self.asmRegister(.call, .rax); try self.asmRegister(.call, .rax);
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| { } else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const atom_index = try macho_file.getOrCreateAtomForDecl(func.owner_decl); const atom_index = try macho_file.getOrCreateAtomForDecl(func.owner_decl);
const sym_index = macho_file.getAtom(atom_index).getSymbolIndex().?; const sym_index = macho_file.getAtom(atom_index).getSymbolIndex().?;
try self.genSetReg(Type.initTag(.usize), .rax, .{ try self.genSetReg(Type.usize, .rax, .{ .linker_load = .{
.linker_load = .{
.type = .got, .type = .got,
.sym_index = sym_index, .sym_index = sym_index,
}, } });
});
try self.asmRegister(.call, .rax); try self.asmRegister(.call, .rax);
} else if (self.bin_file.cast(link.File.Plan9)) |p9| { } else if (self.bin_file.cast(link.File.Plan9)) |p9| {
const decl_block_index = try p9.seeDecl(func.owner_decl); const decl_block_index = try p9.seeDecl(func.owner_decl);
@ -5325,7 +5320,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const lib_name = mem.sliceTo(extern_fn.lib_name, 0); const lib_name = mem.sliceTo(extern_fn.lib_name, 0);
if (self.bin_file.cast(link.File.Coff)) |coff_file| { if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const sym_index = try coff_file.getGlobalSymbol(decl_name, lib_name); const sym_index = try coff_file.getGlobalSymbol(decl_name, lib_name);
try self.genSetReg(Type.initTag(.usize), .rax, .{ try self.genSetReg(Type.usize, .rax, .{
.linker_load = .{ .linker_load = .{
.type = .import, .type = .import,
.sym_index = sym_index, .sym_index = sym_index,
@ -5353,7 +5348,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
} else { } else {
assert(ty.zigTypeTag() == .Pointer); assert(ty.zigTypeTag() == .Pointer);
const mcv = try self.resolveInst(callee); const mcv = try self.resolveInst(callee);
try self.genSetReg(Type.initTag(.usize), .rax, mcv); try self.genSetReg(Type.usize, .rax, mcv);
try self.asmRegister(.call, .rax); try self.asmRegister(.call, .rax);
} }
@ -7299,7 +7294,7 @@ fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else blk: { const result: MCValue = if (self.liveness.isUnused(inst)) .dead else blk: {
const stack_offset = @intCast(i32, try self.allocMem(inst, 16, 16)); const stack_offset = @intCast(i32, try self.allocMem(inst, 16, 16));
try self.genSetStack(ptr_ty, stack_offset, ptr, .{}); try self.genSetStack(ptr_ty, stack_offset, ptr, .{});
try self.genSetStack(Type.initTag(.u64), stack_offset - 8, .{ .immediate = array_len }, .{}); try self.genSetStack(Type.u64, stack_offset - 8, .{ .immediate = array_len }, .{});
break :blk .{ .stack_offset = stack_offset }; break :blk .{ .stack_offset = stack_offset };
}; };
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none }); return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
@ -7809,10 +7804,92 @@ fn airTagName(self: *Self, inst: Air.Inst.Index) !void {
fn airErrorName(self: *Self, inst: Air.Inst.Index) !void { fn airErrorName(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op; const un_op = self.air.instructions.items(.data)[inst].un_op;
const operand = try self.resolveInst(un_op); const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else { const err_ty = self.air.typeOf(un_op);
_ = operand; const err_mcv = try self.resolveInst(un_op);
return self.fail("TODO implement airErrorName for x86_64", .{}); const err_reg = try self.copyToTmpRegister(err_ty, err_mcv);
const err_lock = self.register_manager.lockRegAssumeUnused(err_reg);
defer self.register_manager.unlockReg(err_lock);
const addr_reg = try self.register_manager.allocReg(null, gp);
const addr_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
defer self.register_manager.unlockReg(addr_lock);
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForLazySymbol(
.{ .kind = .const_data, .ty = Type.anyerror },
4, // dword alignment
);
const got_addr = elf_file.getAtom(atom_index).getOffsetTableAddress(elf_file);
try self.asmRegisterMemory(.mov, addr_reg.to64(), Memory.sib(.qword, .{
.base = .ds,
.disp = @intCast(i32, got_addr),
}));
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const atom_index = try coff_file.getOrCreateAtomForLazySymbol(
.{ .kind = .const_data, .ty = Type.anyerror },
4, // dword alignment
);
const sym_index = coff_file.getAtom(atom_index).getSymbolIndex().?;
try self.genSetReg(Type.usize, addr_reg, .{ .linker_load = .{
.type = .got,
.sym_index = sym_index,
} });
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const atom_index = try macho_file.getOrCreateAtomForLazySymbol(
.{ .kind = .const_data, .ty = Type.anyerror },
4, // dword alignment
);
const sym_index = macho_file.getAtom(atom_index).getSymbolIndex().?;
try self.genSetReg(Type.usize, addr_reg, .{ .linker_load = .{
.type = .got,
.sym_index = sym_index,
} });
} else {
return self.fail("TODO implement airErrorName for x86_64 {s}", .{@tagName(self.bin_file.tag)});
}
const start_reg = try self.register_manager.allocReg(null, gp);
const start_lock = self.register_manager.lockRegAssumeUnused(start_reg);
defer self.register_manager.unlockReg(start_lock);
const end_reg = try self.register_manager.allocReg(null, gp);
const end_lock = self.register_manager.lockRegAssumeUnused(end_reg);
defer self.register_manager.unlockReg(end_lock);
try self.truncateRegister(err_ty, err_reg.to32());
try self.asmRegisterMemory(.mov, start_reg.to32(), Memory.sib(.dword, .{
.base = addr_reg.to64(),
.scale_index = .{ .scale = 4, .index = err_reg.to64() },
.disp = 0,
}));
try self.asmRegisterMemory(.mov, end_reg.to32(), Memory.sib(.dword, .{
.base = addr_reg.to64(),
.scale_index = .{ .scale = 4, .index = err_reg.to64() },
.disp = 4,
}));
try self.asmRegisterRegister(.sub, end_reg.to32(), start_reg.to32());
try self.asmRegisterMemory(.lea, start_reg.to64(), Memory.sib(.byte, .{
.base = addr_reg.to64(),
.scale_index = .{ .scale = 1, .index = start_reg.to64() },
.disp = 0,
}));
try self.asmRegisterMemory(.lea, end_reg.to32(), Memory.sib(.byte, .{
.base = end_reg.to64(),
.disp = -1,
}));
const dst_mcv = try self.allocRegOrMem(inst, false);
try self.asmMemoryRegister(.mov, Memory.sib(.qword, .{
.base = .rbp,
.disp = 0 - dst_mcv.stack_offset,
}), start_reg.to64());
try self.asmMemoryRegister(.mov, Memory.sib(.qword, .{
.base = .rbp,
.disp = 8 - dst_mcv.stack_offset,
}), end_reg.to64());
break :result dst_mcv;
}; };
return self.finishAir(inst, result, .{ un_op, .none, .none }); return self.finishAir(inst, result, .{ un_op, .none, .none });
} }
@ -8046,7 +8123,7 @@ fn limitImmediateType(self: *Self, operand: Air.Inst.Ref, comptime T: type) !MCV
// This immediate is unsigned. // This immediate is unsigned.
const U = std.meta.Int(.unsigned, ti.bits - @boolToInt(ti.signedness == .signed)); const U = std.meta.Int(.unsigned, ti.bits - @boolToInt(ti.signedness == .signed));
if (imm >= math.maxInt(U)) { if (imm >= math.maxInt(U)) {
return MCValue{ .register = try self.copyToTmpRegister(Type.initTag(.usize), mcv) }; return MCValue{ .register = try self.copyToTmpRegister(Type.usize, mcv) };
} }
}, },
else => {}, else => {},
@ -8321,8 +8398,8 @@ fn truncateRegister(self: *Self, ty: Type, reg: Register) !void {
.unsigned => { .unsigned => {
const shift = @intCast(u6, max_reg_bit_width - int_info.bits); const shift = @intCast(u6, max_reg_bit_width - int_info.bits);
const mask = (~@as(u64, 0)) >> shift; const mask = (~@as(u64, 0)) >> shift;
if (int_info.bits < 32) { if (int_info.bits <= 32) {
try self.genBinOpMir(.@"and", Type.usize, .{ .register = reg }, .{ .immediate = mask }); try self.genBinOpMir(.@"and", Type.u32, .{ .register = reg }, .{ .immediate = mask });
} else { } else {
const tmp_reg = try self.copyToTmpRegister(Type.usize, .{ .immediate = mask }); const tmp_reg = try self.copyToTmpRegister(Type.usize, .{ .immediate = mask });
try self.genBinOpMir(.@"and", Type.usize, .{ .register = reg }, .{ .register = tmp_reg }); try self.genBinOpMir(.@"and", Type.usize, .{ .register = reg }, .{ .register = tmp_reg });

View File

@ -99,6 +99,47 @@ fn writeFloat(comptime F: type, f: F, target: Target, endian: std.builtin.Endian
mem.writeInt(Int, code[0..@sizeOf(Int)], int, endian); mem.writeInt(Int, code[0..@sizeOf(Int)], int, endian);
} }
pub fn generateLazySymbol(
bin_file: *link.File,
src_loc: Module.SrcLoc,
lazy_sym: link.File.LazySymbol,
code: *std.ArrayList(u8),
debug_output: DebugInfoOutput,
reloc_info: RelocInfo,
) CodeGenError!Result {
_ = debug_output;
_ = reloc_info;
const tracy = trace(@src());
defer tracy.end();
const target = bin_file.options.target;
const endian = target.cpu.arch.endian();
const mod = bin_file.options.module.?;
log.debug("generateLazySymbol: kind = {s}, ty = {}", .{
@tagName(lazy_sym.kind),
lazy_sym.ty.fmt(mod),
});
if (lazy_sym.kind == .const_data and lazy_sym.ty.isAnyError()) {
const err_names = mod.error_name_list.items;
try code.resize(err_names.len * 4);
for (err_names, 0..) |err_name, index| {
mem.writeInt(u32, code.items[index * 4 ..][0..4], @intCast(u32, code.items.len), endian);
try code.ensureUnusedCapacity(err_name.len + 1);
code.appendSliceAssumeCapacity(err_name);
code.appendAssumeCapacity(0);
}
return Result.ok;
} else return .{ .fail = try ErrorMsg.create(
bin_file.allocator,
src_loc,
"TODO implement generateLazySymbol for {s} {}",
.{ @tagName(lazy_sym.kind), lazy_sym.ty.fmt(mod) },
) };
}
pub fn generateSymbol( pub fn generateSymbol(
bin_file: *link.File, bin_file: *link.File,
src_loc: Module.SrcLoc, src_loc: Module.SrcLoc,
@ -118,9 +159,10 @@ pub fn generateSymbol(
const target = bin_file.options.target; const target = bin_file.options.target;
const endian = target.cpu.arch.endian(); const endian = target.cpu.arch.endian();
const mod = bin_file.options.module.?;
log.debug("generateSymbol: ty = {}, val = {}", .{ log.debug("generateSymbol: ty = {}, val = {}", .{
typed_value.ty.fmtDebug(), typed_value.ty.fmt(mod),
typed_value.val.fmtDebug(), typed_value.val.fmtValue(typed_value.ty, mod),
}); });
if (typed_value.val.isUndefDeep()) { if (typed_value.val.isUndefDeep()) {
@ -170,7 +212,6 @@ pub fn generateSymbol(
}, },
.str_lit => { .str_lit => {
const str_lit = typed_value.val.castTag(.str_lit).?.data; const str_lit = typed_value.val.castTag(.str_lit).?.data;
const mod = bin_file.options.module.?;
const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len]; const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
try code.ensureUnusedCapacity(bytes.len + 1); try code.ensureUnusedCapacity(bytes.len + 1);
code.appendSliceAssumeCapacity(bytes); code.appendSliceAssumeCapacity(bytes);
@ -300,7 +341,6 @@ pub fn generateSymbol(
switch (container_ptr.tag()) { switch (container_ptr.tag()) {
.decl_ref => { .decl_ref => {
const decl_index = container_ptr.castTag(.decl_ref).?.data; const decl_index = container_ptr.castTag(.decl_ref).?.data;
const mod = bin_file.options.module.?;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
const addend = blk: { const addend = blk: {
switch (decl.ty.zigTypeTag()) { switch (decl.ty.zigTypeTag()) {
@ -493,7 +533,6 @@ pub fn generateSymbol(
const field_vals = typed_value.val.castTag(.aggregate).?.data; const field_vals = typed_value.val.castTag(.aggregate).?.data;
const abi_size = math.cast(usize, typed_value.ty.abiSize(target)) orelse return error.Overflow; const abi_size = math.cast(usize, typed_value.ty.abiSize(target)) orelse return error.Overflow;
const current_pos = code.items.len; const current_pos = code.items.len;
const mod = bin_file.options.module.?;
try code.resize(current_pos + abi_size); try code.resize(current_pos + abi_size);
var bits: u16 = 0; var bits: u16 = 0;
@ -570,7 +609,6 @@ pub fn generateSymbol(
} }
const union_ty = typed_value.ty.cast(Type.Payload.Union).?.data; const union_ty = typed_value.ty.cast(Type.Payload.Union).?.data;
const mod = bin_file.options.module.?;
const field_index = typed_value.ty.unionTagFieldIndex(union_obj.tag, mod).?; const field_index = typed_value.ty.unionTagFieldIndex(union_obj.tag, mod).?;
assert(union_ty.haveFieldTypes()); assert(union_ty.haveFieldTypes());
const field_ty = union_ty.fields.values()[field_index].ty; const field_ty = union_ty.fields.values()[field_index].ty;
@ -776,7 +814,6 @@ pub fn generateSymbol(
}, },
.str_lit => { .str_lit => {
const str_lit = typed_value.val.castTag(.str_lit).?.data; const str_lit = typed_value.val.castTag(.str_lit).?.data;
const mod = bin_file.options.module.?;
const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len]; const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
try code.ensureUnusedCapacity(str_lit.len); try code.ensureUnusedCapacity(str_lit.len);
code.appendSliceAssumeCapacity(bytes); code.appendSliceAssumeCapacity(bytes);

View File

@ -687,6 +687,7 @@ pub const File = struct {
FrameworkNotFound, FrameworkNotFound,
FunctionSignatureMismatch, FunctionSignatureMismatch,
GlobalTypeMismatch, GlobalTypeMismatch,
HotSwapUnavailableOnHostOperatingSystem,
InvalidCharacter, InvalidCharacter,
InvalidEntryKind, InvalidEntryKind,
InvalidFeatureSet, InvalidFeatureSet,
@ -1104,6 +1105,26 @@ pub const File = struct {
missing_libc: bool = false, missing_libc: bool = false,
}; };
pub const LazySymbol = struct {
kind: enum { code, const_data },
ty: Type,
pub const Context = struct {
mod: *Module,
pub fn hash(ctx: @This(), sym: LazySymbol) u32 {
var hasher = std.hash.Wyhash.init(0);
std.hash.autoHash(&hasher, sym.kind);
sym.ty.hashWithHasher(&hasher, ctx.mod);
return @truncate(u32, hasher.final());
}
pub fn eql(ctx: @This(), lhs: LazySymbol, rhs: LazySymbol, _: usize) bool {
return lhs.kind == rhs.kind and lhs.ty.eql(rhs.ty, ctx.mod);
}
};
};
pub const C = @import("link/C.zig"); pub const C = @import("link/C.zig");
pub const Coff = @import("link/Coff.zig"); pub const Coff = @import("link/Coff.zig");
pub const Plan9 = @import("link/Plan9.zig"); pub const Plan9 = @import("link/Plan9.zig");

View File

@ -49,6 +49,9 @@ imports_count_dirty: bool = true,
/// Virtual address of the entry point procedure relative to image base. /// Virtual address of the entry point procedure relative to image base.
entry_addr: ?u32 = null, entry_addr: ?u32 = null,
/// Table of tracked LazySymbols.
lazy_syms: LazySymbolTable = .{},
/// Table of tracked Decls. /// Table of tracked Decls.
decls: std.AutoArrayHashMapUnmanaged(Module.Decl.Index, DeclMetadata) = .{}, decls: std.AutoArrayHashMapUnmanaged(Module.Decl.Index, DeclMetadata) = .{},
@ -142,6 +145,18 @@ const Section = struct {
free_list: std.ArrayListUnmanaged(Atom.Index) = .{}, free_list: std.ArrayListUnmanaged(Atom.Index) = .{},
}; };
const LazySymbolTable = std.ArrayHashMapUnmanaged(
link.File.LazySymbol,
LazySymbolMetadata,
link.File.LazySymbol.Context,
true,
);
const LazySymbolMetadata = struct {
atom: Atom.Index,
section: u16,
};
const DeclMetadata = struct { const DeclMetadata = struct {
atom: Atom.Index, atom: Atom.Index,
section: u16, section: u16,
@ -1168,6 +1183,100 @@ pub fn updateDecl(self: *Coff, module: *Module, decl_index: Module.Decl.Index) !
return self.updateDeclExports(module, decl_index, module.getDeclExports(decl_index)); return self.updateDeclExports(module, decl_index, module.getDeclExports(decl_index));
} }
fn updateLazySymbol(
self: *Coff,
lazy_sym: link.File.LazySymbol,
lazy_metadata: LazySymbolMetadata,
) !void {
const gpa = self.base.allocator;
const mod = self.base.options.module.?;
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{
@tagName(lazy_sym.kind),
lazy_sym.ty.fmt(mod),
});
defer gpa.free(name);
const atom_index = lazy_metadata.atom;
const atom = self.getAtomPtr(atom_index);
const local_sym_index = atom.getSymbolIndex().?;
const src = if (lazy_sym.ty.getOwnerDeclOrNull()) |owner_decl|
mod.declPtr(owner_decl).srcLoc()
else
Module.SrcLoc{
.file_scope = undefined,
.parent_decl_node = undefined,
.lazy = .unneeded,
};
const res = try codegen.generateLazySymbol(
&self.base,
src,
lazy_sym,
&code_buffer,
.none,
.{ .parent_atom_index = local_sym_index },
);
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
log.err("{s}", .{em.msg});
return error.CodegenFail;
},
};
const required_alignment = atom.alignment;
const code_len = @intCast(u32, code.len);
const symbol = atom.getSymbolPtr(self);
try self.setSymbolName(symbol, name);
symbol.section_number = @intToEnum(coff.SectionNumber, lazy_metadata.section + 1);
symbol.type = .{ .complex_type = .NULL, .base_type = .NULL };
const vaddr = try self.allocateAtom(atom_index, code_len, required_alignment);
errdefer self.freeAtom(atom_index);
log.debug("allocated atom for {s} at 0x{x}", .{ name, vaddr });
log.debug(" (required alignment 0x{x})", .{required_alignment});
atom.size = code_len;
symbol.value = vaddr;
const got_target = SymbolWithLoc{ .sym_index = local_sym_index, .file = null };
const got_index = try self.allocateGotEntry(got_target);
const got_atom_index = try self.createGotAtom(got_target);
const got_atom = self.getAtom(got_atom_index);
self.got_entries.items[got_index].sym_index = got_atom.getSymbolIndex().?;
try self.writePtrWidthAtom(got_atom_index);
self.markRelocsDirtyByTarget(atom.getSymbolWithLoc());
try self.writeAtom(atom_index, code);
}
pub fn getOrCreateAtomForLazySymbol(
self: *Coff,
lazy_sym: link.File.LazySymbol,
alignment: u32,
) !Atom.Index {
const gop = try self.lazy_syms.getOrPutContext(self.base.allocator, lazy_sym, .{
.mod = self.base.options.module.?,
});
errdefer _ = self.lazy_syms.pop();
if (!gop.found_existing) {
gop.value_ptr.* = .{
.atom = try self.createAtom(),
.section = switch (lazy_sym.kind) {
.code => self.text_section_index.?,
.const_data => self.rdata_section_index.?,
},
};
self.getAtomPtr(gop.value_ptr.atom).alignment = alignment;
}
return gop.value_ptr.atom;
}
pub fn getOrCreateAtomForDecl(self: *Coff, decl_index: Module.Decl.Index) !Atom.Index { pub fn getOrCreateAtomForDecl(self: *Coff, decl_index: Module.Decl.Index) !Atom.Index {
const gop = try self.decls.getOrPut(self.base.allocator, decl_index); const gop = try self.decls.getOrPut(self.base.allocator, decl_index);
if (!gop.found_existing) { if (!gop.found_existing) {
@ -1498,6 +1607,19 @@ pub fn flushModule(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
sub_prog_node.activate(); sub_prog_node.activate();
defer sub_prog_node.end(); defer sub_prog_node.end();
{
var lazy_it = self.lazy_syms.iterator();
while (lazy_it.next()) |lazy_entry| {
self.updateLazySymbol(
lazy_entry.key_ptr.*,
lazy_entry.value_ptr.*,
) catch |err| switch (err) {
error.CodegenFail => return error.FlushFailure,
else => |e| return e,
};
}
}
const gpa = self.base.allocator; const gpa = self.base.allocator;
while (self.unresolved.popOrNull()) |entry| { while (self.unresolved.popOrNull()) |entry| {

View File

@ -63,6 +63,12 @@ const Section = struct {
free_list: std.ArrayListUnmanaged(Atom.Index) = .{}, free_list: std.ArrayListUnmanaged(Atom.Index) = .{},
}; };
const LazySymbolMetadata = struct {
atom: Atom.Index,
shdr: u16,
alignment: u32,
};
const DeclMetadata = struct { const DeclMetadata = struct {
atom: Atom.Index, atom: Atom.Index,
shdr: u16, shdr: u16,
@ -157,6 +163,9 @@ debug_line_header_dirty: bool = false,
error_flags: File.ErrorFlags = File.ErrorFlags{}, error_flags: File.ErrorFlags = File.ErrorFlags{},
/// Table of tracked LazySymbols.
lazy_syms: LazySymbolTable = .{},
/// Table of tracked Decls. /// Table of tracked Decls.
decls: std.AutoHashMapUnmanaged(Module.Decl.Index, DeclMetadata) = .{}, decls: std.AutoHashMapUnmanaged(Module.Decl.Index, DeclMetadata) = .{},
@ -194,6 +203,7 @@ relocs: RelocTable = .{},
const RelocTable = std.AutoHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(Atom.Reloc)); const RelocTable = std.AutoHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(Atom.Reloc));
const UnnamedConstTable = std.AutoHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(Atom.Index)); const UnnamedConstTable = std.AutoHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(Atom.Index));
const LazySymbolTable = std.ArrayHashMapUnmanaged(File.LazySymbol, LazySymbolMetadata, File.LazySymbol.Context, true);
/// When allocating, the ideal_capacity is calculated by /// When allocating, the ideal_capacity is calculated by
/// actual_capacity + (actual_capacity / ideal_factor) /// actual_capacity + (actual_capacity / ideal_factor)
@ -1011,6 +1021,19 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
sub_prog_node.activate(); sub_prog_node.activate();
defer sub_prog_node.end(); defer sub_prog_node.end();
{
var lazy_it = self.lazy_syms.iterator();
while (lazy_it.next()) |lazy_entry| {
self.updateLazySymbol(
lazy_entry.key_ptr.*,
lazy_entry.value_ptr.*,
) catch |err| switch (err) {
error.CodegenFail => return error.FlushFailure,
else => |e| return e,
};
}
}
// TODO This linker code currently assumes there is only 1 compilation unit and it // TODO This linker code currently assumes there is only 1 compilation unit and it
// corresponds to the Zig source code. // corresponds to the Zig source code.
const module = self.base.options.module orelse return error.LinkingWithoutZigSourceUnimplemented; const module = self.base.options.module orelse return error.LinkingWithoutZigSourceUnimplemented;
@ -2344,6 +2367,24 @@ pub fn freeDecl(self: *Elf, decl_index: Module.Decl.Index) void {
} }
} }
pub fn getOrCreateAtomForLazySymbol(self: *Elf, lazy_sym: File.LazySymbol, alignment: u32) !Atom.Index {
const gop = try self.lazy_syms.getOrPutContext(self.base.allocator, lazy_sym, .{
.mod = self.base.options.module.?,
});
errdefer _ = self.lazy_syms.pop();
if (!gop.found_existing) {
gop.value_ptr.* = .{
.atom = try self.createAtom(),
.shdr = switch (lazy_sym.kind) {
.code => self.text_section_index.?,
.const_data => self.rodata_section_index.?,
},
.alignment = alignment,
};
}
return gop.value_ptr.atom;
}
pub fn getOrCreateAtomForDecl(self: *Elf, decl_index: Module.Decl.Index) !Atom.Index { pub fn getOrCreateAtomForDecl(self: *Elf, decl_index: Module.Decl.Index) !Atom.Index {
const gop = try self.decls.getOrPut(self.base.allocator, decl_index); const gop = try self.decls.getOrPut(self.base.allocator, decl_index);
if (!gop.found_existing) { if (!gop.found_existing) {
@ -2610,6 +2651,79 @@ pub fn updateDecl(self: *Elf, module: *Module, decl_index: Module.Decl.Index) !v
return self.updateDeclExports(module, decl_index, module.getDeclExports(decl_index)); return self.updateDeclExports(module, decl_index, module.getDeclExports(decl_index));
} }
fn updateLazySymbol(self: *Elf, lazy_sym: File.LazySymbol, lazy_metadata: LazySymbolMetadata) !void {
const gpa = self.base.allocator;
const mod = self.base.options.module.?;
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const name_str_index = blk: {
const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{
@tagName(lazy_sym.kind),
lazy_sym.ty.fmt(mod),
});
defer gpa.free(name);
break :blk try self.shstrtab.insert(gpa, name);
};
const name = self.shstrtab.get(name_str_index).?;
const atom_index = lazy_metadata.atom;
const atom = self.getAtom(atom_index);
const local_sym_index = atom.getSymbolIndex().?;
const src = if (lazy_sym.ty.getOwnerDeclOrNull()) |owner_decl|
mod.declPtr(owner_decl).srcLoc()
else
Module.SrcLoc{
.file_scope = undefined,
.parent_decl_node = undefined,
.lazy = .unneeded,
};
const res = try codegen.generateLazySymbol(
&self.base,
src,
lazy_sym,
&code_buffer,
.none,
.{ .parent_atom_index = local_sym_index },
);
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
log.err("{s}", .{em.msg});
return error.CodegenFail;
},
};
const shdr_index = lazy_metadata.shdr;
const phdr_index = self.sections.items(.phdr_index)[shdr_index];
const local_sym = atom.getSymbolPtr(self);
local_sym.* = .{
.st_name = name_str_index,
.st_info = (elf.STB_LOCAL << 4) | elf.STT_OBJECT,
.st_other = 0,
.st_shndx = shdr_index,
.st_value = 0,
.st_size = 0,
};
const required_alignment = lazy_metadata.alignment;
const vaddr = try self.allocateAtom(atom_index, code.len, required_alignment);
errdefer self.freeAtom(atom_index);
log.debug("allocated text block for {s} at 0x{x}", .{ name, vaddr });
self.offset_table.items[atom.offset_table_index] = vaddr;
local_sym.st_value = vaddr;
local_sym.st_size = code.len;
try self.writeSymbol(local_sym_index);
try self.writeOffsetTableEntry(atom.offset_table_index);
const section_offset = vaddr - self.program_headers.items[phdr_index].p_vaddr;
const file_offset = self.sections.items(.shdr)[shdr_index].sh_offset + section_offset;
try self.base.file.?.pwriteAll(code, file_offset);
}
pub fn lowerUnnamedConst(self: *Elf, typed_value: TypedValue, decl_index: Module.Decl.Index) !u32 { pub fn lowerUnnamedConst(self: *Elf, typed_value: TypedValue, decl_index: Module.Decl.Index) !u32 {
const gpa = self.base.allocator; const gpa = self.base.allocator;

View File

@ -218,6 +218,9 @@ bindings: BindingTable = .{},
/// this will be a table indexed by index into the list of Atoms. /// this will be a table indexed by index into the list of Atoms.
lazy_bindings: BindingTable = .{}, lazy_bindings: BindingTable = .{},
/// Table of tracked LazySymbols.
lazy_syms: LazySymbolTable = .{},
/// Table of tracked Decls. /// Table of tracked Decls.
decls: std.AutoArrayHashMapUnmanaged(Module.Decl.Index, DeclMetadata) = .{}, decls: std.AutoArrayHashMapUnmanaged(Module.Decl.Index, DeclMetadata) = .{},
@ -229,6 +232,18 @@ const is_hot_update_compatible = switch (builtin.target.os.tag) {
else => false, else => false,
}; };
const LazySymbolTable = std.ArrayHashMapUnmanaged(
link.File.LazySymbol,
LazySymbolMetadata,
link.File.LazySymbol.Context,
true,
);
const LazySymbolMetadata = struct {
atom: Atom.Index,
section: u8,
};
const DeclMetadata = struct { const DeclMetadata = struct {
atom: Atom.Index, atom: Atom.Index,
section: u8, section: u8,
@ -497,6 +512,19 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
sub_prog_node.activate(); sub_prog_node.activate();
defer sub_prog_node.end(); defer sub_prog_node.end();
{
var lazy_it = self.lazy_syms.iterator();
while (lazy_it.next()) |lazy_entry| {
self.updateLazySymbol(
lazy_entry.key_ptr.*,
lazy_entry.value_ptr.*,
) catch |err| switch (err) {
error.CodegenFail => return error.FlushFailure,
else => |e| return e,
};
}
}
const module = self.base.options.module orelse return error.LinkingWithoutZigSourceUnimplemented; const module = self.base.options.module orelse return error.LinkingWithoutZigSourceUnimplemented;
if (self.d_sym) |*d_sym| { if (self.d_sym) |*d_sym| {
@ -2163,13 +2191,13 @@ pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl_index: Modu
const name_str_index = blk: { const name_str_index = blk: {
const index = unnamed_consts.items.len; const index = unnamed_consts.items.len;
const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index }); const name = try std.fmt.allocPrint(gpa, "___unnamed_{s}_{d}", .{ decl_name, index });
defer gpa.free(name); defer gpa.free(name);
break :blk try self.strtab.insert(gpa, name); break :blk try self.strtab.insert(gpa, name);
}; };
const name = self.strtab.get(name_str_index); const name = self.strtab.get(name_str_index).?;
log.debug("allocating symbol indexes for {?s}", .{name}); log.debug("allocating symbol indexes for {s}", .{name});
const atom_index = try self.createAtom(); const atom_index = try self.createAtom();
@ -2202,7 +2230,7 @@ pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl_index: Modu
try unnamed_consts.append(gpa, atom_index); try unnamed_consts.append(gpa, atom_index);
log.debug("allocated atom for {?s} at 0x{x}", .{ name, symbol.n_value }); log.debug("allocated atom for {s} at 0x{x}", .{ name, symbol.n_value });
log.debug(" (required alignment 0x{x})", .{required_alignment}); log.debug(" (required alignment 0x{x})", .{required_alignment});
try self.writeAtom(atom_index, code); try self.writeAtom(atom_index, code);
@ -2282,6 +2310,100 @@ pub fn updateDecl(self: *MachO, module: *Module, decl_index: Module.Decl.Index)
try self.updateDeclExports(module, decl_index, module.getDeclExports(decl_index)); try self.updateDeclExports(module, decl_index, module.getDeclExports(decl_index));
} }
fn updateLazySymbol(self: *MachO, lazy_sym: File.LazySymbol, lazy_metadata: LazySymbolMetadata) !void {
const gpa = self.base.allocator;
const mod = self.base.options.module.?;
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const name_str_index = blk: {
const name = try std.fmt.allocPrint(gpa, "___lazy_{s}_{}", .{
@tagName(lazy_sym.kind),
lazy_sym.ty.fmt(mod),
});
defer gpa.free(name);
break :blk try self.strtab.insert(gpa, name);
};
const name = self.strtab.get(name_str_index).?;
const atom_index = lazy_metadata.atom;
const atom = self.getAtomPtr(atom_index);
const local_sym_index = atom.getSymbolIndex().?;
const src = if (lazy_sym.ty.getOwnerDeclOrNull()) |owner_decl|
mod.declPtr(owner_decl).srcLoc()
else
Module.SrcLoc{
.file_scope = undefined,
.parent_decl_node = undefined,
.lazy = .unneeded,
};
const res = try codegen.generateLazySymbol(
&self.base,
src,
lazy_sym,
&code_buffer,
.none,
.{ .parent_atom_index = local_sym_index },
);
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
log.err("{s}", .{em.msg});
return error.CodegenFail;
},
};
const required_alignment = atom.alignment;
const symbol = atom.getSymbolPtr(self);
symbol.n_strx = name_str_index;
symbol.n_type = macho.N_SECT;
symbol.n_sect = lazy_metadata.section + 1;
symbol.n_desc = 0;
const vaddr = try self.allocateAtom(atom_index, code.len, required_alignment);
errdefer self.freeAtom(atom_index);
log.debug("allocated atom for {s} at 0x{x}", .{ name, vaddr });
log.debug(" (required alignment 0x{x}", .{required_alignment});
atom.size = code.len;
symbol.n_value = vaddr;
const got_target = SymbolWithLoc{ .sym_index = local_sym_index, .file = null };
const got_index = try self.allocateGotEntry(got_target);
const got_atom_index = try self.createGotAtom(got_target);
const got_atom = self.getAtom(got_atom_index);
self.got_entries.items[got_index].sym_index = got_atom.getSymbolIndex().?;
try self.writePtrWidthAtom(got_atom_index);
self.markRelocsDirtyByTarget(atom.getSymbolWithLoc());
try self.writeAtom(atom_index, code);
}
pub fn getOrCreateAtomForLazySymbol(
self: *MachO,
lazy_sym: File.LazySymbol,
alignment: u32,
) !Atom.Index {
const gop = try self.lazy_syms.getOrPutContext(self.base.allocator, lazy_sym, .{
.mod = self.base.options.module.?,
});
errdefer _ = self.lazy_syms.pop();
if (!gop.found_existing) {
gop.value_ptr.* = .{
.atom = try self.createAtom(),
.section = switch (lazy_sym.kind) {
.code => self.text_section_index.?,
.const_data => self.data_const_section_index.?,
},
};
self.getAtomPtr(gop.value_ptr.atom).alignment = alignment;
}
return gop.value_ptr.atom;
}
pub fn getOrCreateAtomForDecl(self: *MachO, decl_index: Module.Decl.Index) !Atom.Index { pub fn getOrCreateAtomForDecl(self: *MachO, decl_index: Module.Decl.Index) !Atom.Index {
const gop = try self.decls.getOrPut(self.base.allocator, decl_index); const gop = try self.decls.getOrPut(self.base.allocator, decl_index);
if (!gop.found_existing) { if (!gop.found_existing) {

View File

@ -11,7 +11,6 @@ fn getError2() !void {
test "`try`ing an if/else expression" { test "`try`ing an if/else expression" {
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO

View File

@ -110,7 +110,6 @@ test "mixing normal and error defers" {
} }
test "errdefer with payload" { test "errdefer with payload" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO

View File

@ -575,7 +575,6 @@ fn gimmeItBroke() anyerror {
} }
test "@errorName sentinel length matches slice length" { test "@errorName sentinel length matches slice length" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
@ -851,7 +850,6 @@ test "catch within a function that calls no errorable functions" {
test "error from comptime string" { test "error from comptime string" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
const name = "Weird error name!"; const name = "Weird error name!";

View File

@ -468,7 +468,6 @@ test "peer type resolution in nested if expressions" {
test "cast slice to const slice nested in error union and optional" { test "cast slice to const slice nested in error union and optional" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
const S = struct { const S = struct {
fn inner() !?[]u8 { fn inner() !?[]u8 {

View File

@ -419,7 +419,6 @@ test "switch on integer with else capturing expr" {
} }
test "else prong of switch on error set excludes other cases" { test "else prong of switch on error set excludes other cases" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO