diff --git a/src/arch/riscv64/Emit.zig b/src/arch/riscv64/Emit.zig index a4fadad526..258941f19d 100644 --- a/src/arch/riscv64/Emit.zig +++ b/src/arch/riscv64/Emit.zig @@ -56,17 +56,17 @@ pub fn emitMir(emit: *Emit) Error!void { const hi_r_type: u32 = @intFromEnum(std.elf.R_RISCV.HI20); const lo_r_type: u32 = @intFromEnum(std.elf.R_RISCV.LO12_I); - try atom_ptr.addReloc(elf_file, .{ + try atom_ptr.addReloc(elf_file.base.comp.gpa, .{ .r_offset = start_offset, .r_info = (@as(u64, @intCast(symbol.sym_index)) << 32) | hi_r_type, .r_addend = 0, - }); + }, zo); - try atom_ptr.addReloc(elf_file, .{ + try atom_ptr.addReloc(elf_file.base.comp.gpa, .{ .r_offset = start_offset + 4, .r_info = (@as(u64, @intCast(symbol.sym_index)) << 32) | lo_r_type, .r_addend = 0, - }); + }, zo); }, .load_tlv_reloc => |symbol| { const elf_file = emit.bin_file.cast(.elf).?; @@ -76,23 +76,23 @@ pub fn emitMir(emit: *Emit) Error!void { const R_RISCV = std.elf.R_RISCV; - try atom_ptr.addReloc(elf_file, .{ + try atom_ptr.addReloc(elf_file.base.comp.gpa, .{ .r_offset = start_offset, .r_info = (@as(u64, @intCast(symbol.sym_index)) << 32) | @intFromEnum(R_RISCV.TPREL_HI20), .r_addend = 0, - }); + }, zo); - try atom_ptr.addReloc(elf_file, .{ + try atom_ptr.addReloc(elf_file.base.comp.gpa, .{ .r_offset = start_offset + 4, .r_info = (@as(u64, @intCast(symbol.sym_index)) << 32) | @intFromEnum(R_RISCV.TPREL_ADD), .r_addend = 0, - }); + }, zo); - try atom_ptr.addReloc(elf_file, .{ + try atom_ptr.addReloc(elf_file.base.comp.gpa, .{ .r_offset = start_offset + 8, .r_info = (@as(u64, @intCast(symbol.sym_index)) << 32) | @intFromEnum(R_RISCV.TPREL_LO12_I), .r_addend = 0, - }); + }, zo); }, .call_extern_fn_reloc => |symbol| { const elf_file = emit.bin_file.cast(.elf).?; @@ -101,11 +101,11 @@ pub fn emitMir(emit: *Emit) Error!void { const r_type: u32 = @intFromEnum(std.elf.R_RISCV.CALL_PLT); - try atom_ptr.addReloc(elf_file, .{ + try atom_ptr.addReloc(elf_file.base.comp.gpa, .{ .r_offset = start_offset, .r_info = (@as(u64, @intCast(symbol.sym_index)) << 32) | r_type, .r_addend = 0, - }); + }, zo); }, }; } diff --git a/src/arch/x86_64/Emit.zig b/src/arch/x86_64/Emit.zig index 579fd00d9d..9fce779862 100644 --- a/src/arch/x86_64/Emit.zig +++ b/src/arch/x86_64/Emit.zig @@ -48,11 +48,11 @@ pub fn emitMir(emit: *Emit) Error!void { const zo = elf_file.zigObjectPtr().?; const atom_ptr = zo.symbol(emit.atom_index).atom(elf_file).?; const r_type = @intFromEnum(std.elf.R_X86_64.PLT32); - try atom_ptr.addReloc(elf_file, .{ + try atom_ptr.addReloc(elf_file.base.comp.gpa, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(sym_index)) << 32) | r_type, .r_addend = lowered_relocs[0].off - 4, - }); + }, zo); } else if (emit.lower.bin_file.cast(.macho)) |macho_file| { // Add relocation to the decl. const zo = macho_file.getZigObject().?; @@ -95,22 +95,22 @@ pub fn emitMir(emit: *Emit) Error!void { const zo = elf_file.zigObjectPtr().?; const atom = zo.symbol(emit.atom_index).atom(elf_file).?; const r_type = @intFromEnum(std.elf.R_X86_64.TLSLD); - try atom.addReloc(elf_file, .{ + try atom.addReloc(elf_file.base.comp.gpa, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(sym_index)) << 32) | r_type, .r_addend = lowered_relocs[0].off - 4, - }); + }, zo); }, .linker_dtpoff => |sym_index| { const elf_file = emit.lower.bin_file.cast(.elf).?; const zo = elf_file.zigObjectPtr().?; const atom = zo.symbol(emit.atom_index).atom(elf_file).?; const r_type = @intFromEnum(std.elf.R_X86_64.DTPOFF32); - try atom.addReloc(elf_file, .{ + try atom.addReloc(elf_file.base.comp.gpa, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(sym_index)) << 32) | r_type, .r_addend = lowered_relocs[0].off, - }); + }, zo); }, .linker_reloc => |sym_index| if (emit.lower.bin_file.cast(.elf)) |elf_file| { const zo = elf_file.zigObjectPtr().?; @@ -121,21 +121,21 @@ pub fn emitMir(emit: *Emit) Error!void { @intFromEnum(std.elf.R_X86_64.GOTPCREL) else @intFromEnum(std.elf.R_X86_64.PC32); - try atom.addReloc(elf_file, .{ + try atom.addReloc(elf_file.base.comp.gpa, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(sym_index)) << 32) | r_type, .r_addend = lowered_relocs[0].off - 4, - }); + }, zo); } else { const r_type: u32 = if (sym.flags.is_tls) @intFromEnum(std.elf.R_X86_64.TPOFF32) else @intFromEnum(std.elf.R_X86_64.@"32"); - try atom.addReloc(elf_file, .{ + try atom.addReloc(elf_file.base.comp.gpa, .{ .r_offset = end_offset - 4, .r_info = (@as(u64, @intCast(sym_index)) << 32) | r_type, .r_addend = lowered_relocs[0].off, - }); + }, zo); } } else if (emit.lower.bin_file.cast(.macho)) |macho_file| { const zo = macho_file.getZigObject().?; diff --git a/src/link/Dwarf.zig b/src/link/Dwarf.zig index 60c0bac4a8..593aed3f68 100644 --- a/src/link/Dwarf.zig +++ b/src/link/Dwarf.zig @@ -201,7 +201,7 @@ const StringSection = struct { }; /// A linker section containing a sequence of `Unit`s. -const Section = struct { +pub const Section = struct { dirty: bool, pad_to_ideal: bool, alignment: InternPool.Alignment, @@ -287,7 +287,7 @@ const Section = struct { return sec.getUnit(unit).addEntry(sec, dwarf); } - fn getUnit(sec: *Section, unit: Unit.Index) *Unit { + pub fn getUnit(sec: *Section, unit: Unit.Index) *Unit { return &sec.units.items[@intFromEnum(unit)]; } @@ -368,7 +368,7 @@ const Unit = struct { none = std.math.maxInt(u32), _, - fn unwrap(uio: Optional) ?Index { + pub fn unwrap(uio: Optional) ?Index { return if (uio != .none) @enumFromInt(@intFromEnum(uio)) else null; } }; @@ -415,7 +415,7 @@ const Unit = struct { return entry; } - fn getEntry(unit: *Unit, entry: Entry.Index) *Entry { + pub fn getEntry(unit: *Unit, entry: Entry.Index) *Entry { return &unit.entries.items[@intFromEnum(entry)]; } @@ -614,7 +614,7 @@ const Entry = struct { none = std.math.maxInt(u32), _, - fn unwrap(eio: Optional) ?Index { + pub fn unwrap(eio: Optional) ?Index { return if (eio != .none) @enumFromInt(@intFromEnum(eio)) else null; } }; @@ -736,7 +736,7 @@ const Entry = struct { } } - fn assertNonEmpty(entry: *Entry, unit: *Unit, sec: *Section, dwarf: *Dwarf) *Entry { + pub fn assertNonEmpty(entry: *Entry, unit: *Unit, sec: *Section, dwarf: *Dwarf) *Entry { if (entry.len > 0) return entry; if (std.debug.runtime_safety) { log.err("missing {} from {s}", .{ @@ -1958,11 +1958,10 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool const loc = tree.tokenLocation(0, tree.nodes.items(.main_token)[decl_inst.data.declaration.src_node]); assert(loc.line == zcu.navSrcLine(nav_index)); - const unit = try dwarf.getUnit(file.mod); var wip_nav: WipNav = .{ .dwarf = dwarf, .pt = pt, - .unit = unit, + .unit = try dwarf.getUnit(file.mod), .entry = undefined, .any_children = false, .func = .none, @@ -1981,7 +1980,7 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool switch (ip.indexToKey(nav_val.toIntern())) { .func => |func| { if (nav_gop.found_existing) { - const unit_ptr = dwarf.debug_info.section.getUnit(unit); + const unit_ptr = dwarf.debug_info.section.getUnit(wip_nav.unit); const entry_ptr = unit_ptr.getEntry(nav_gop.value_ptr.*); if (entry_ptr.len >= AbbrevCode.decl_bytes) { var abbrev_code_buf: [AbbrevCode.decl_bytes]u8 = undefined; @@ -2000,7 +1999,7 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool } } entry_ptr.clear(); - } else nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + } else nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); wip_nav.entry = nav_gop.value_ptr.*; const parent_type, const accessibility: u8 = if (nav.analysis_owner.unwrap()) |cau| parent: { @@ -2074,8 +2073,14 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool if (type_inst_info.inst != value_inst) break :decl_struct; const type_gop = try dwarf.types.getOrPut(dwarf.gpa, nav_val.toIntern()); - if (type_gop.found_existing) nav_gop.value_ptr.* = type_gop.value_ptr.* else { - if (!nav_gop.found_existing) nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + if (type_gop.found_existing) { + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(type_gop.value_ptr.*).clear(); + nav_gop.value_ptr.* = type_gop.value_ptr.*; + } else { + if (nav_gop.found_existing) + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(nav_gop.value_ptr.*).clear() + else + nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); type_gop.value_ptr.* = nav_gop.value_ptr.*; } wip_nav.entry = nav_gop.value_ptr.*; @@ -2139,7 +2144,10 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool break :done; } - if (!nav_gop.found_existing) nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + if (nav_gop.found_existing) + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(nav_gop.value_ptr.*).clear() + else + nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); wip_nav.entry = nav_gop.value_ptr.*; const diw = wip_nav.debug_info.writer(dwarf.gpa); try uleb128(diw, @intFromEnum(AbbrevCode.decl_alias)); @@ -2190,8 +2198,14 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool if (type_inst_info.inst != value_inst) break :decl_enum; const type_gop = try dwarf.types.getOrPut(dwarf.gpa, nav_val.toIntern()); - if (type_gop.found_existing) nav_gop.value_ptr.* = type_gop.value_ptr.* else { - if (!nav_gop.found_existing) nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + if (type_gop.found_existing) { + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(type_gop.value_ptr.*).clear(); + nav_gop.value_ptr.* = type_gop.value_ptr.*; + } else { + if (nav_gop.found_existing) + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(nav_gop.value_ptr.*).clear() + else + nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); type_gop.value_ptr.* = nav_gop.value_ptr.*; } wip_nav.entry = nav_gop.value_ptr.*; @@ -2215,7 +2229,10 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool break :done; } - if (!nav_gop.found_existing) nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + if (nav_gop.found_existing) + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(nav_gop.value_ptr.*).clear() + else + nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); wip_nav.entry = nav_gop.value_ptr.*; const diw = wip_nav.debug_info.writer(dwarf.gpa); try uleb128(diw, @intFromEnum(AbbrevCode.decl_alias)); @@ -2264,8 +2281,14 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool if (type_inst_info.inst != value_inst) break :decl_union; const type_gop = try dwarf.types.getOrPut(dwarf.gpa, nav_val.toIntern()); - if (type_gop.found_existing) nav_gop.value_ptr.* = type_gop.value_ptr.* else { - if (!nav_gop.found_existing) nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + if (type_gop.found_existing) { + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(type_gop.value_ptr.*).clear(); + nav_gop.value_ptr.* = type_gop.value_ptr.*; + } else { + if (nav_gop.found_existing) + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(nav_gop.value_ptr.*).clear() + else + nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); type_gop.value_ptr.* = nav_gop.value_ptr.*; } wip_nav.entry = nav_gop.value_ptr.*; @@ -2328,7 +2351,10 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool break :done; } - if (!nav_gop.found_existing) nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + if (nav_gop.found_existing) + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(nav_gop.value_ptr.*).clear() + else + nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); wip_nav.entry = nav_gop.value_ptr.*; const diw = wip_nav.debug_info.writer(dwarf.gpa); try uleb128(diw, @intFromEnum(AbbrevCode.decl_alias)); @@ -2377,8 +2403,14 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool if (type_inst_info.inst != value_inst) break :decl_opaque; const type_gop = try dwarf.types.getOrPut(dwarf.gpa, nav_val.toIntern()); - if (type_gop.found_existing) nav_gop.value_ptr.* = type_gop.value_ptr.* else { - if (!nav_gop.found_existing) nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + if (type_gop.found_existing) { + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(type_gop.value_ptr.*).clear(); + nav_gop.value_ptr.* = type_gop.value_ptr.*; + } else { + if (nav_gop.found_existing) + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(nav_gop.value_ptr.*).clear() + else + nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); type_gop.value_ptr.* = nav_gop.value_ptr.*; } wip_nav.entry = nav_gop.value_ptr.*; @@ -2394,7 +2426,10 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool break :done; } - if (!nav_gop.found_existing) nav_gop.value_ptr.* = try dwarf.addCommonEntry(unit); + if (nav_gop.found_existing) + dwarf.debug_info.section.getUnit(wip_nav.unit).getEntry(nav_gop.value_ptr.*).clear() + else + nav_gop.value_ptr.* = try dwarf.addCommonEntry(wip_nav.unit); wip_nav.entry = nav_gop.value_ptr.*; const diw = wip_nav.debug_info.writer(dwarf.gpa); try uleb128(diw, @intFromEnum(AbbrevCode.decl_alias)); @@ -2412,7 +2447,6 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool }, } try dwarf.debug_info.section.replaceEntry(wip_nav.unit, wip_nav.entry, dwarf, wip_nav.debug_info.items); - try dwarf.debug_loclists.section.replaceEntry(wip_nav.unit, wip_nav.entry, dwarf, wip_nav.debug_loclists.items); try wip_nav.flush(); } diff --git a/src/link/Elf.zig b/src/link/Elf.zig index 4afc592002..333e490d17 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -585,7 +585,7 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { const ptr_size = self.ptrWidthBytes(); const target = self.base.comp.root_mod.resolved_target.result; const ptr_bit_width = target.ptrBitWidth(); - const zig_object = self.zigObjectPtr().?; + const zo = self.zigObjectPtr().?; const fillSection = struct { fn fillSection(elf_file: *Elf, shdr: *elf.Elf64_Shdr, size: u64, phndx: ?u16) !void { @@ -766,7 +766,27 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { try self.last_atom_and_free_list_table.putNoClobber(gpa, self.zig_bss_section_index.?, .{}); } - if (zig_object.dwarf) |*dwarf| { + if (zo.dwarf) |*dwarf| { + const addSectionSymbol = struct { + fn addSectionSymbol( + zig_object: *ZigObject, + alloc: Allocator, + name: [:0]const u8, + alignment: Atom.Alignment, + shndx: u32, + ) !Symbol.Index { + const name_off = try zig_object.addString(alloc, name); + const index = try zig_object.newSymbolWithAtom(alloc, name_off); + const sym = zig_object.symbol(index); + const esym = &zig_object.symtab.items(.elf_sym)[sym.esym_index]; + esym.st_info |= elf.STT_SECTION; + const atom_ptr = zig_object.atom(sym.ref.index).?; + atom_ptr.alignment = alignment; + atom_ptr.output_section_index = shndx; + return index; + } + }.addSectionSymbol; + if (self.debug_str_section_index == null) { self.debug_str_section_index = try self.addSection(.{ .name = try self.insertShString(".debug_str"), @@ -775,7 +795,8 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { .type = elf.SHT_PROGBITS, .addralign = 1, }); - zig_object.debug_str_section_dirty = true; + zo.debug_str_section_dirty = true; + zo.debug_str_index = try addSectionSymbol(zo, gpa, ".debug_str", .@"1", self.debug_str_section_index.?); try self.output_sections.putNoClobber(gpa, self.debug_str_section_index.?, .{}); } @@ -785,7 +806,8 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { .type = elf.SHT_PROGBITS, .addralign = 1, }); - zig_object.debug_info_section_dirty = true; + zo.debug_info_section_dirty = true; + zo.debug_info_index = try addSectionSymbol(zo, gpa, ".debug_info", .@"1", self.debug_info_section_index.?); try self.output_sections.putNoClobber(gpa, self.debug_info_section_index.?, .{}); } @@ -795,7 +817,8 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { .type = elf.SHT_PROGBITS, .addralign = 1, }); - zig_object.debug_abbrev_section_dirty = true; + zo.debug_abbrev_section_dirty = true; + zo.debug_abbrev_index = try addSectionSymbol(zo, gpa, ".debug_abbrev", .@"1", self.debug_abbrev_section_index.?); try self.output_sections.putNoClobber(gpa, self.debug_abbrev_section_index.?, .{}); } @@ -805,7 +828,8 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { .type = elf.SHT_PROGBITS, .addralign = 16, }); - zig_object.debug_aranges_section_dirty = true; + zo.debug_aranges_section_dirty = true; + zo.debug_aranges_index = try addSectionSymbol(zo, gpa, ".debug_aranges", .@"16", self.debug_aranges_section_index.?); try self.output_sections.putNoClobber(gpa, self.debug_aranges_section_index.?, .{}); } @@ -815,7 +839,8 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { .type = elf.SHT_PROGBITS, .addralign = 1, }); - zig_object.debug_line_section_dirty = true; + zo.debug_line_section_dirty = true; + zo.debug_line_index = try addSectionSymbol(zo, gpa, ".debug_line", .@"1", self.debug_line_section_index.?); try self.output_sections.putNoClobber(gpa, self.debug_line_section_index.?, .{}); } @@ -827,7 +852,8 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { .type = elf.SHT_PROGBITS, .addralign = 1, }); - zig_object.debug_line_str_section_dirty = true; + zo.debug_line_str_section_dirty = true; + zo.debug_line_str_index = try addSectionSymbol(zo, gpa, ".debug_line_str", .@"1", self.debug_line_str_section_index.?); try self.output_sections.putNoClobber(gpa, self.debug_line_str_section_index.?, .{}); } @@ -837,7 +863,8 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { .type = elf.SHT_PROGBITS, .addralign = 1, }); - zig_object.debug_loclists_section_dirty = true; + zo.debug_loclists_section_dirty = true; + zo.debug_loclists_index = try addSectionSymbol(zo, gpa, ".debug_loclists", .@"1", self.debug_loclists_section_index.?); try self.output_sections.putNoClobber(gpa, self.debug_loclists_section_index.?, .{}); } @@ -847,7 +874,8 @@ pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void { .type = elf.SHT_PROGBITS, .addralign = 1, }); - zig_object.debug_rnglists_section_dirty = true; + zo.debug_rnglists_section_dirty = true; + zo.debug_rnglists_index = try addSectionSymbol(zo, gpa, ".debug_rnglists", .@"1", self.debug_rnglists_section_index.?); try self.output_sections.putNoClobber(gpa, self.debug_rnglists_section_index.?, .{}); } @@ -1254,7 +1282,6 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod try self.addCommentString(); try self.finalizeMergeSections(); try self.initOutputSections(); - try self.initMergeSections(); if (self.linkerDefinedPtr()) |obj| { try obj.initStartStopSymbols(self); } @@ -1317,8 +1344,6 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod try self.base.file.?.pwriteAll(code, file_offset); } - if (zo.dwarf) |*dwarf| try dwarf.resolveRelocs(); - if (has_reloc_errors) return error.FlushFailure; } @@ -2652,15 +2677,6 @@ fn linkWithLLD(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: s } } -fn writeDwarfAddrAssumeCapacity(self: *Elf, buf: *std.ArrayList(u8), addr: u64) void { - const target = self.base.comp.root_mod.resolved_target.result; - const target_endian = target.cpu.arch.endian(); - switch (self.ptr_width) { - .p32 => mem.writeInt(u32, buf.addManyAsArrayAssumeCapacity(4), @as(u32, @intCast(addr)), target_endian), - .p64 => mem.writeInt(u64, buf.addManyAsArrayAssumeCapacity(8), addr, target_endian), - } -} - pub fn writeShdrTable(self: *Elf) !void { const gpa = self.base.comp.gpa; const target = self.base.comp.root_mod.resolved_target.result; @@ -3031,17 +3047,17 @@ pub fn finalizeMergeSections(self: *Elf) !void { } pub fn updateMergeSectionSizes(self: *Elf) !void { + for (self.merge_sections.items) |*msec| { + msec.updateSize(); + } for (self.merge_sections.items) |*msec| { const shdr = &self.shdrs.items[msec.output_section_index]; - for (msec.finalized_subsections.items) |msub_index| { - const msub = msec.mergeSubsection(msub_index); - assert(msub.alive); - const offset = msub.alignment.forward(shdr.sh_size); - const padding = offset - shdr.sh_size; - msub.value = @intCast(offset); - shdr.sh_size += padding + msub.size; - shdr.sh_addralign = @max(shdr.sh_addralign, msub.alignment.toByteUnits() orelse 1); - } + const offset = msec.alignment.forward(shdr.sh_size); + const padding = offset - shdr.sh_size; + msec.value = @intCast(offset); + shdr.sh_size += padding + msec.size; + shdr.sh_addralign = @max(shdr.sh_addralign, msec.alignment.toByteUnits() orelse 1); + shdr.sh_entsize = if (shdr.sh_entsize == 0) msec.entsize else @min(shdr.sh_entsize, msec.entsize); } } @@ -3052,7 +3068,8 @@ pub fn writeMergeSections(self: *Elf) !void { for (self.merge_sections.items) |*msec| { const shdr = self.shdrs.items[msec.output_section_index]; - const size = math.cast(usize, shdr.sh_size) orelse return error.Overflow; + const fileoff = math.cast(usize, msec.value + shdr.sh_offset) orelse return error.Overflow; + const size = math.cast(usize, msec.size) orelse return error.Overflow; try buffer.ensureTotalCapacity(size); buffer.appendNTimesAssumeCapacity(0, size); @@ -3064,7 +3081,7 @@ pub fn writeMergeSections(self: *Elf) !void { @memcpy(buffer.items[off..][0..string.len], string); } - try self.base.file.?.pwriteAll(buffer.items, shdr.sh_offset); + try self.base.file.?.pwriteAll(buffer.items, fileoff); buffer.clearRetainingCapacity(); } } @@ -3073,26 +3090,9 @@ fn initOutputSections(self: *Elf) !void { for (self.objects.items) |index| { try self.file(index).?.object.initOutputSections(self); } -} - -pub fn initMergeSections(self: *Elf) !void { for (self.merge_sections.items) |*msec| { if (msec.finalized_subsections.items.len == 0) continue; - const name = msec.name(self); - const shndx = self.sectionByName(name) orelse try self.addSection(.{ - .name = msec.name_offset, - .type = msec.type, - .flags = msec.flags, - }); - msec.output_section_index = shndx; - - var entsize = msec.mergeSubsection(msec.finalized_subsections.items[0]).entsize; - for (msec.finalized_subsections.items) |msub_index| { - const msub = msec.mergeSubsection(msub_index); - entsize = @min(entsize, msub.entsize); - } - const shdr = &self.shdrs.items[shndx]; - shdr.sh_entsize = entsize; + try msec.initOutputSection(self); } } @@ -4184,26 +4184,21 @@ pub fn allocateNonAllocSections(self: *Elf) !void { shdr.sh_offset, new_offset, }); - const zig_object = self.zigObjectPtr().?; - const existing_size = blk: { - if (shndx == self.debug_info_section_index.?) - break :blk zig_object.debug_info_section_zig_size; - if (shndx == self.debug_abbrev_section_index.?) - break :blk zig_object.debug_abbrev_section_zig_size; - if (shndx == self.debug_str_section_index.?) - break :blk zig_object.debug_str_section_zig_size; - if (shndx == self.debug_aranges_section_index.?) - break :blk zig_object.debug_aranges_section_zig_size; - if (shndx == self.debug_line_section_index.?) - break :blk zig_object.debug_line_section_zig_size; - if (shndx == self.debug_line_str_section_index.?) - break :blk zig_object.debug_line_str_section_zig_size; - if (shndx == self.debug_loclists_section_index.?) - break :blk zig_object.debug_loclists_section_zig_size; - if (shndx == self.debug_rnglists_section_index.?) - break :blk zig_object.debug_rnglists_section_zig_size; - unreachable; - }; + const zo = self.zigObjectPtr().?; + const existing_size = for ([_]Symbol.Index{ + zo.debug_info_index.?, + zo.debug_abbrev_index.?, + zo.debug_aranges_index.?, + zo.debug_str_index.?, + zo.debug_line_index.?, + zo.debug_line_str_index.?, + zo.debug_loclists_index.?, + zo.debug_rnglists_index.?, + }) |sym_index| { + const sym = zo.symbol(sym_index); + const atom_ptr = sym.atom(self).?; + if (atom_ptr.output_section_index == shndx) break atom_ptr.size; + } else 0; const amt = try self.base.file.?.copyRangeAll( shdr.sh_offset, self.base.file.?, @@ -4299,24 +4294,21 @@ fn writeAtoms(self: *Elf) !void { // TODO really, really handle debug section separately const base_offset = if (self.isDebugSection(@intCast(shndx))) blk: { - const zig_object = self.zigObjectPtr().?; - if (shndx == self.debug_info_section_index.?) - break :blk zig_object.debug_info_section_zig_size; - if (shndx == self.debug_abbrev_section_index.?) - break :blk zig_object.debug_abbrev_section_zig_size; - if (shndx == self.debug_str_section_index.?) - break :blk zig_object.debug_str_section_zig_size; - if (shndx == self.debug_aranges_section_index.?) - break :blk zig_object.debug_aranges_section_zig_size; - if (shndx == self.debug_line_section_index.?) - break :blk zig_object.debug_line_section_zig_size; - if (shndx == self.debug_line_str_section_index.?) - break :blk zig_object.debug_line_str_section_zig_size; - if (shndx == self.debug_loclists_section_index.?) - break :blk zig_object.debug_loclists_section_zig_size; - if (shndx == self.debug_rnglists_section_index.?) - break :blk zig_object.debug_rnglists_section_zig_size; - unreachable; + const zo = self.zigObjectPtr().?; + break :blk for ([_]Symbol.Index{ + zo.debug_info_index.?, + zo.debug_abbrev_index.?, + zo.debug_aranges_index.?, + zo.debug_str_index.?, + zo.debug_line_index.?, + zo.debug_line_str_index.?, + zo.debug_loclists_index.?, + zo.debug_rnglists_index.?, + }) |sym_index| { + const sym = zo.symbol(sym_index); + const atom_ptr = sym.atom(self).?; + if (atom_ptr.output_section_index == shndx) break atom_ptr.size; + } else 0; } else 0; const sh_offset = shdr.sh_offset + base_offset; const sh_size = math.cast(usize, shdr.sh_size - base_offset) orelse return error.Overflow; @@ -4410,7 +4402,6 @@ pub fn updateSymtabSize(self: *Elf) !void { if (self.eh_frame_section_index) |_| { nlocals += 1; } - nlocals += @intCast(self.merge_sections.items.len); if (self.requiresThunks()) for (self.thunks.items) |*th| { th.output_symtab_ctx.ilocal = nlocals + 1; @@ -4734,30 +4725,12 @@ fn writeSectionSymbols(self: *Elf) void { }; ilocal += 1; } - - for (self.merge_sections.items) |msec| { - const shdr = self.shdrs.items[msec.output_section_index]; - const out_sym = &self.symtab.items[ilocal]; - out_sym.* = .{ - .st_name = 0, - .st_value = shdr.sh_addr, - .st_info = elf.STT_SECTION, - .st_shndx = @intCast(msec.output_section_index), - .st_size = 0, - .st_other = 0, - }; - ilocal += 1; - } } pub fn sectionSymbolOutputSymtabIndex(self: Elf, shndx: u32) u32 { if (self.eh_frame_section_index) |index| { if (index == shndx) return @intCast(self.output_sections.keys().len + 1); } - const base: usize = if (self.eh_frame_section_index == null) 0 else 1; - for (self.merge_sections.items, 0..) |msec, index| { - if (msec.output_section_index == shndx) return @intCast(self.output_sections.keys().len + 1 + index + base); - } return @intCast(self.output_sections.getIndex(shndx).? + 1); } @@ -5520,10 +5493,11 @@ fn formatShdr( _ = options; _ = unused_fmt_string; const shdr = ctx.shdr; - try writer.print("{s} : @{x} ({x}) : align({x}) : size({x}) : flags({})", .{ + try writer.print("{s} : @{x} ({x}) : align({x}) : size({x}) : entsize({x}) : flags({})", .{ ctx.elf_file.getShString(shdr.sh_name), shdr.sh_offset, shdr.sh_addr, shdr.sh_addralign, - shdr.sh_size, fmtShdrFlags(shdr.sh_flags), + shdr.sh_size, shdr.sh_entsize, + fmtShdrFlags(shdr.sh_flags), }); } diff --git a/src/link/Elf/Atom.zig b/src/link/Elf/Atom.zig index a9050dcd00..3eb447ab75 100644 --- a/src/link/Elf/Atom.zig +++ b/src/link/Elf/Atom.zig @@ -302,7 +302,7 @@ pub fn free(self: *Atom, elf_file: *Elf) void { } // TODO create relocs free list - self.freeRelocs(elf_file); + self.freeRelocs(zo); // TODO figure out how to free input section mappind in ZigModule // const zig_object = elf_file.zigObjectPtr().? // assert(zig_object.atoms.swapRemove(self.atom_index)); @@ -377,21 +377,19 @@ pub fn markFdesDead(self: Atom, elf_file: *Elf) void { } } -pub fn addReloc(self: Atom, elf_file: *Elf, reloc: elf.Elf64_Rela) !void { - const comp = elf_file.base.comp; - const gpa = comp.gpa; - const file_ptr = self.file(elf_file).?; - assert(file_ptr == .zig_object); - const zig_object = file_ptr.zig_object; - const rels = &zig_object.relocs.items[self.relocs_section_index]; - try rels.append(gpa, reloc); +pub fn addReloc(self: Atom, alloc: Allocator, reloc: elf.Elf64_Rela, zo: *ZigObject) !void { + const rels = &zo.relocs.items[self.relocs_section_index]; + try rels.ensureUnusedCapacity(alloc, 1); + self.addRelocAssumeCapacity(reloc, zo); } -pub fn freeRelocs(self: Atom, elf_file: *Elf) void { - const file_ptr = self.file(elf_file).?; - assert(file_ptr == .zig_object); - const zig_object = file_ptr.zig_object; - zig_object.relocs.items[self.relocs_section_index].clearRetainingCapacity(); +pub fn addRelocAssumeCapacity(self: Atom, reloc: elf.Elf64_Rela, zo: *ZigObject) void { + const rels = &zo.relocs.items[self.relocs_section_index]; + rels.appendAssumeCapacity(reloc); +} + +pub fn freeRelocs(self: Atom, zo: *ZigObject) void { + zo.relocs.items[self.relocs_section_index].clearRetainingCapacity(); } pub fn scanRelocsRequiresCode(self: Atom, elf_file: *Elf) bool { diff --git a/src/link/Elf/ZigObject.zig b/src/link/Elf/ZigObject.zig index a6a6f76d04..3c51d5d11b 100644 --- a/src/link/Elf/ZigObject.zig +++ b/src/link/Elf/ZigObject.zig @@ -50,16 +50,14 @@ debug_line_str_section_dirty: bool = false, debug_loclists_section_dirty: bool = false, debug_rnglists_section_dirty: bool = false, -/// Size contribution of Zig's metadata to each debug section. -/// Used to track start of metadata from input object files. -debug_info_section_zig_size: u64 = 0, -debug_abbrev_section_zig_size: u64 = 0, -debug_str_section_zig_size: u64 = 0, -debug_aranges_section_zig_size: u64 = 0, -debug_line_section_zig_size: u64 = 0, -debug_line_str_section_zig_size: u64 = 0, -debug_loclists_section_zig_size: u64 = 0, -debug_rnglists_section_zig_size: u64 = 0, +debug_info_index: ?Symbol.Index = null, +debug_abbrev_index: ?Symbol.Index = null, +debug_aranges_index: ?Symbol.Index = null, +debug_str_index: ?Symbol.Index = null, +debug_line_index: ?Symbol.Index = null, +debug_line_str_index: ?Symbol.Index = null, +debug_loclists_index: ?Symbol.Index = null, +debug_rnglists_index: ?Symbol.Index = null, pub const global_symbol_bit: u32 = 0x80000000; pub const symbol_mask: u32 = 0x7fffffff; @@ -171,13 +169,154 @@ pub fn flushModule(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !voi if (self.dwarf) |*dwarf| { const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.module.?, .tid = tid }; try dwarf.flushModule(pt); + try dwarf.resolveRelocs(); + + const gpa = elf_file.base.comp.gpa; + const cpu_arch = elf_file.getTarget().cpu.arch; + + // TODO invert this logic so that we manage the output section with the atom, not the + // other way around + for ([_]u32{ + self.debug_info_index.?, + self.debug_abbrev_index.?, + self.debug_str_index.?, + self.debug_aranges_index.?, + self.debug_line_index.?, + self.debug_line_str_index.?, + self.debug_loclists_index.?, + self.debug_rnglists_index.?, + }, [_]*Dwarf.Section{ + &dwarf.debug_info.section, + &dwarf.debug_abbrev.section, + &dwarf.debug_str.section, + &dwarf.debug_aranges.section, + &dwarf.debug_line.section, + &dwarf.debug_line_str.section, + &dwarf.debug_loclists.section, + &dwarf.debug_rnglists.section, + }) |sym_index, sect| { + const sym = self.symbol(sym_index); + const atom_ptr = self.atom(sym.ref.index).?; + if (!atom_ptr.alive) continue; + const shndx = sym.outputShndx(elf_file).?; + const shdr = elf_file.shdrs.items[shndx]; + const esym = &self.symtab.items(.elf_sym)[sym.esym_index]; + esym.st_size = shdr.sh_size; + atom_ptr.size = shdr.sh_size; + atom_ptr.alignment = Atom.Alignment.fromNonzeroByteUnits(shdr.sh_addralign); + + log.debug("parsing relocs in {s}", .{sym.name(elf_file)}); + + const relocs = &self.relocs.items[atom_ptr.relocsShndx().?]; + for (sect.units.items) |*unit| { + try relocs.ensureUnusedCapacity(gpa, unit.cross_section_relocs.items.len); + for (unit.cross_section_relocs.items) |reloc| { + const target_sym_index = switch (reloc.target_sec) { + .debug_abbrev => self.debug_abbrev_index.?, + .debug_info => self.debug_info_index.?, + .debug_line => self.debug_line_index.?, + .debug_line_str => self.debug_line_str_index.?, + .debug_loclists => self.debug_loclists_index.?, + .debug_rnglists => self.debug_rnglists_index.?, + .debug_str => self.debug_str_index.?, + }; + const target_sec = switch (reloc.target_sec) { + inline else => |target_sec| &@field(dwarf, @tagName(target_sec)).section, + }; + const target_unit = target_sec.getUnit(reloc.target_unit); + const r_offset = unit.off + reloc.source_off; + const r_addend: i64 = @intCast(target_unit.off + reloc.target_off + (if (reloc.target_entry.unwrap()) |target_entry| + target_unit.header_len + target_unit.getEntry(target_entry).assertNonEmpty(unit, sect, dwarf).off + else + 0)); + const r_type = relocation.dwarf.crossSectionRelocType(dwarf.format, cpu_arch); + log.debug(" {s} <- r_off={x}, r_add={x}, r_type={}", .{ + self.symbol(target_sym_index).name(elf_file), + r_offset, + r_addend, + relocation.fmtRelocType(r_type, cpu_arch), + }); + atom_ptr.addRelocAssumeCapacity(.{ + .r_offset = r_offset, + .r_addend = r_addend, + .r_info = (@as(u64, @intCast(target_sym_index)) << 32) | r_type, + }, self); + } + + for (unit.entries.items) |*entry| { + const entry_off = unit.off + unit.header_len + entry.off; + + try relocs.ensureUnusedCapacity(gpa, entry.cross_section_relocs.items.len); + for (entry.cross_section_relocs.items) |reloc| { + const target_sym_index = switch (reloc.target_sec) { + .debug_abbrev => self.debug_abbrev_index.?, + .debug_info => self.debug_info_index.?, + .debug_line => self.debug_line_index.?, + .debug_line_str => self.debug_line_str_index.?, + .debug_loclists => self.debug_loclists_index.?, + .debug_rnglists => self.debug_rnglists_index.?, + .debug_str => self.debug_str_index.?, + }; + const target_sec = switch (reloc.target_sec) { + inline else => |target_sec| &@field(dwarf, @tagName(target_sec)).section, + }; + const target_unit = target_sec.getUnit(reloc.target_unit); + const r_offset = entry_off + reloc.source_off; + const r_addend: i64 = @intCast(target_unit.off + reloc.target_off + (if (reloc.target_entry.unwrap()) |target_entry| + target_unit.header_len + target_unit.getEntry(target_entry).assertNonEmpty(unit, sect, dwarf).off + else + 0)); + const r_type = relocation.dwarf.crossSectionRelocType(dwarf.format, cpu_arch); + log.debug(" {s} <- r_off={x}, r_add={x}, r_type={}", .{ + self.symbol(target_sym_index).name(elf_file), + r_offset, + r_addend, + relocation.fmtRelocType(r_type, cpu_arch), + }); + atom_ptr.addRelocAssumeCapacity(.{ + .r_offset = r_offset, + .r_addend = r_addend, + .r_info = (@as(u64, @intCast(target_sym_index)) << 32) | r_type, + }, self); + } + + try relocs.ensureUnusedCapacity(gpa, entry.external_relocs.items.len); + for (entry.external_relocs.items) |reloc| { + const target_sym = self.symbol(reloc.target_sym); + const r_offset = entry_off + reloc.source_off; + const r_addend: i64 = @intCast(reloc.target_off); + const r_type = relocation.dwarf.externalRelocType(target_sym.*, dwarf.address_size, cpu_arch); + log.debug(" {s} <- r_off={x}, r_add={x}, r_type={}", .{ + target_sym.name(elf_file), + r_offset, + r_addend, + relocation.fmtRelocType(r_type, cpu_arch), + }); + atom_ptr.addRelocAssumeCapacity(.{ + .r_offset = r_offset, + .r_addend = r_addend, + .r_info = (@as(u64, @intCast(reloc.target_sym)) << 32) | r_type, + }, self); + } + } + } + + if (elf_file.base.isRelocatable() and relocs.items.len > 0) { + const gop = try elf_file.output_rela_sections.getOrPut(gpa, shndx); + if (!gop.found_existing) { + const rela_sect_name = try std.fmt.allocPrintZ(gpa, ".rela{s}", .{elf_file.getShString(shdr.sh_name)}); + defer gpa.free(rela_sect_name); + const rela_sh_name = try elf_file.insertShString(rela_sect_name); + const rela_shndx = try elf_file.addRelaShdr(rela_sh_name, shndx); + gop.value_ptr.* = .{ .shndx = rela_shndx }; + } + } + } self.debug_abbrev_section_dirty = false; self.debug_aranges_section_dirty = false; self.debug_rnglists_section_dirty = false; self.debug_str_section_dirty = false; - - self.saveDebugSectionsSizes(elf_file); } // The point of flushModule() is to commit changes, so in theory, nothing should @@ -190,33 +329,6 @@ pub fn flushModule(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !voi assert(!self.debug_str_section_dirty); } -fn saveDebugSectionsSizes(self: *ZigObject, elf_file: *Elf) void { - if (elf_file.debug_info_section_index) |shndx| { - self.debug_info_section_zig_size = elf_file.shdrs.items[shndx].sh_size; - } - if (elf_file.debug_abbrev_section_index) |shndx| { - self.debug_abbrev_section_zig_size = elf_file.shdrs.items[shndx].sh_size; - } - if (elf_file.debug_str_section_index) |shndx| { - self.debug_str_section_zig_size = elf_file.shdrs.items[shndx].sh_size; - } - if (elf_file.debug_aranges_section_index) |shndx| { - self.debug_aranges_section_zig_size = elf_file.shdrs.items[shndx].sh_size; - } - if (elf_file.debug_line_section_index) |shndx| { - self.debug_line_section_zig_size = elf_file.shdrs.items[shndx].sh_size; - } - if (elf_file.debug_line_str_section_index) |shndx| { - self.debug_line_str_section_zig_size = elf_file.shdrs.items[shndx].sh_size; - } - if (elf_file.debug_loclists_section_index) |shndx| { - self.debug_loclists_section_zig_size = elf_file.shdrs.items[shndx].sh_size; - } - if (elf_file.debug_rnglists_section_index) |shndx| { - self.debug_rnglists_section_zig_size = elf_file.shdrs.items[shndx].sh_size; - } -} - fn newSymbol(self: *ZigObject, allocator: Allocator, name_off: u32, st_bind: u4) !Symbol.Index { try self.symtab.ensureUnusedCapacity(allocator, 1); try self.symbols.ensureUnusedCapacity(allocator, 1); @@ -278,7 +390,7 @@ fn newAtom(self: *ZigObject, allocator: Allocator, name_off: u32) !Atom.Index { return index; } -fn newSymbolWithAtom(self: *ZigObject, allocator: Allocator, name_off: u32) !Symbol.Index { +pub fn newSymbolWithAtom(self: *ZigObject, allocator: Allocator, name_off: u32) !Symbol.Index { const atom_index = try self.newAtom(allocator, name_off); const sym_index = try self.newLocalSymbol(allocator, name_off); const sym = self.symbol(sym_index); @@ -642,11 +754,11 @@ pub fn getNavVAddr( const vaddr = this_sym.address(.{}, elf_file); const parent_atom = self.symbol(reloc_info.parent_atom_index).atom(elf_file).?; const r_type = relocation.encode(.abs, elf_file.getTarget().cpu.arch); - try parent_atom.addReloc(elf_file, .{ + try parent_atom.addReloc(elf_file.base.comp.gpa, .{ .r_offset = reloc_info.offset, .r_info = (@as(u64, @intCast(this_sym_index)) << 32) | r_type, .r_addend = reloc_info.addend, - }); + }, self); return @intCast(vaddr); } @@ -661,11 +773,11 @@ pub fn getUavVAddr( const vaddr = sym.address(.{}, elf_file); const parent_atom = self.symbol(reloc_info.parent_atom_index).atom(elf_file).?; const r_type = relocation.encode(.abs, elf_file.getTarget().cpu.arch); - try parent_atom.addReloc(elf_file, .{ + try parent_atom.addReloc(elf_file.base.comp.gpa, .{ .r_offset = reloc_info.offset, .r_info = (@as(u64, @intCast(sym_index)) << 32) | r_type, .r_addend = reloc_info.addend, - }); + }, self); return @intCast(vaddr); } @@ -1012,7 +1124,7 @@ pub fn updateFunc( log.debug("updateFunc {}({d})", .{ ip.getNav(func.owner_nav).fqn.fmt(ip), func.owner_nav }); const sym_index = try self.getOrCreateMetadataForNav(elf_file, func.owner_nav); - self.symbol(sym_index).atom(elf_file).?.freeRelocs(elf_file); + self.symbol(sym_index).atom(elf_file).?.freeRelocs(self); var code_buffer = std.ArrayList(u8).init(gpa); defer code_buffer.deinit(); @@ -1140,7 +1252,7 @@ pub fn updateNav( if (nav_init != .none and Value.fromInterned(nav_init).typeOf(zcu).hasRuntimeBits(pt)) { const sym_index = try self.getOrCreateMetadataForNav(elf_file, nav_index); - self.symbol(sym_index).atom(elf_file).?.freeRelocs(elf_file); + self.symbol(sym_index).atom(elf_file).?.freeRelocs(self); var code_buffer = std.ArrayList(u8).init(zcu.gpa); defer code_buffer.deinit(); @@ -1529,7 +1641,7 @@ pub fn asFile(self: *ZigObject) File { return .{ .zig_object = self }; } -fn addString(self: *ZigObject, allocator: Allocator, string: []const u8) !u32 { +pub fn addString(self: *ZigObject, allocator: Allocator, string: []const u8) !u32 { return self.strtab.insert(allocator, string); } diff --git a/src/link/Elf/merge_section.zig b/src/link/Elf/merge_section.zig index baec316c3d..7ffb17e963 100644 --- a/src/link/Elf/merge_section.zig +++ b/src/link/Elf/merge_section.zig @@ -1,4 +1,8 @@ pub const MergeSection = struct { + value: u64 = 0, + size: u64 = 0, + alignment: Atom.Alignment = .@"1", + entsize: u32 = 0, name_offset: u32 = 0, type: u32 = 0, flags: u64 = 0, @@ -26,7 +30,7 @@ pub const MergeSection = struct { pub fn address(msec: MergeSection, elf_file: *Elf) i64 { const shdr = elf_file.shdrs.items[msec.output_section_index]; - return @intCast(shdr.sh_addr); + return @intCast(shdr.sh_addr + msec.value); } const InsertResult = struct { @@ -90,6 +94,29 @@ pub const MergeSection = struct { std.mem.sort(MergeSubsection.Index, msec.finalized_subsections.items, msec, sortFn); } + pub fn updateSize(msec: *MergeSection) void { + for (msec.finalized_subsections.items) |msub_index| { + const msub = msec.mergeSubsection(msub_index); + assert(msub.alive); + const offset = msub.alignment.forward(msec.size); + const padding = offset - msec.size; + msub.value = @intCast(offset); + msec.size += padding + msub.size; + msec.alignment = msec.alignment.max(msub.alignment); + msec.entsize = if (msec.entsize == 0) msub.entsize else @min(msec.entsize, msub.entsize); + } + } + + pub fn initOutputSection(msec: *MergeSection, elf_file: *Elf) !void { + const shndx = elf_file.sectionByName(msec.name(elf_file)) orelse try elf_file.addSection(.{ + .name = msec.name_offset, + .type = msec.type, + .flags = msec.flags, + }); + try elf_file.output_sections.put(elf_file.base.comp.gpa, shndx, .{}); + msec.output_section_index = shndx; + } + pub fn addMergeSubsection(msec: *MergeSection, allocator: Allocator) !MergeSubsection.Index { const index: MergeSubsection.Index = @intCast(msec.subsections.items.len); const msub = try msec.subsections.addOne(allocator); @@ -163,9 +190,12 @@ pub const MergeSection = struct { _ = unused_fmt_string; const msec = ctx.msec; const elf_file = ctx.elf_file; - try writer.print("{s} : @{x} : type({x}) : flags({x})\n", .{ + try writer.print("{s} : @{x} : size({x}) : align({x}) : entsize({x}) : type({x}) : flags({x})\n", .{ msec.name(elf_file), msec.address(elf_file), + msec.size, + msec.alignment.toByteUnits() orelse 0, + msec.entsize, msec.type, msec.flags, }); diff --git a/src/link/Elf/relocatable.zig b/src/link/Elf/relocatable.zig index 213b43a95d..50b9b562d1 100644 --- a/src/link/Elf/relocatable.zig +++ b/src/link/Elf/relocatable.zig @@ -42,7 +42,11 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]co try elf_file.finalizeMergeSections(); zig_object.claimUnresolvedObject(elf_file); - try elf_file.initMergeSections(); + for (elf_file.merge_sections.items) |*msec| { + if (msec.finalized_subsections.items.len == 0) continue; + try msec.initOutputSection(elf_file); + } + try elf_file.initSymtab(); try elf_file.initShStrtab(); try elf_file.sortShdrs(); @@ -198,7 +202,6 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]const claimUnresolved(elf_file); try initSections(elf_file); - try elf_file.initMergeSections(); try elf_file.sortShdrs(); if (elf_file.zigObjectPtr()) |zig_object| { try zig_object.addAtomsToRelaSections(elf_file); @@ -294,6 +297,11 @@ fn initSections(elf_file: *Elf) !void { try object.initRelaSections(elf_file); } + for (elf_file.merge_sections.items) |*msec| { + if (msec.finalized_subsections.items.len == 0) continue; + try msec.initOutputSection(elf_file); + } + const needs_eh_frame = for (elf_file.objects.items) |index| { if (elf_file.file(index).?.object.cies.items.len > 0) break true; } else false; @@ -423,24 +431,21 @@ fn writeAtoms(elf_file: *Elf) !void { // TODO really, really handle debug section separately const base_offset = if (elf_file.isDebugSection(@intCast(shndx))) blk: { - const zig_object = elf_file.zigObjectPtr().?; - if (shndx == elf_file.debug_info_section_index.?) - break :blk zig_object.debug_info_section_zig_size; - if (shndx == elf_file.debug_abbrev_section_index.?) - break :blk zig_object.debug_abbrev_section_zig_size; - if (shndx == elf_file.debug_str_section_index.?) - break :blk zig_object.debug_str_section_zig_size; - if (shndx == elf_file.debug_aranges_section_index.?) - break :blk zig_object.debug_aranges_section_zig_size; - if (shndx == elf_file.debug_line_section_index.?) - break :blk zig_object.debug_line_section_zig_size; - if (shndx == elf_file.debug_line_str_section_index.?) - break :blk zig_object.debug_line_str_section_zig_size; - if (shndx == elf_file.debug_loclists_section_index.?) - break :blk zig_object.debug_loclists_section_zig_size; - if (shndx == elf_file.debug_rnglists_section_index.?) - break :blk zig_object.debug_rnglists_section_zig_size; - unreachable; + const zo = elf_file.zigObjectPtr().?; + break :blk for ([_]Symbol.Index{ + zo.debug_info_index.?, + zo.debug_abbrev_index.?, + zo.debug_aranges_index.?, + zo.debug_str_index.?, + zo.debug_line_index.?, + zo.debug_line_str_index.?, + zo.debug_loclists_index.?, + zo.debug_rnglists_index.?, + }) |sym_index| { + const sym = zo.symbol(sym_index); + const atom_ptr = sym.atom(elf_file).?; + if (atom_ptr.output_section_index == shndx) break atom_ptr.size; + } else 0; } else 0; const sh_offset = shdr.sh_offset + base_offset; const sh_size = math.cast(usize, shdr.sh_size - base_offset) orelse return error.Overflow; @@ -586,3 +591,4 @@ const Compilation = @import("../../Compilation.zig"); const Elf = @import("../Elf.zig"); const File = @import("file.zig").File; const Object = @import("Object.zig"); +const Symbol = @import("Symbol.zig"); diff --git a/src/link/Elf/relocation.zig b/src/link/Elf/relocation.zig index e1a317f67d..d6f8dc5d10 100644 --- a/src/link/Elf/relocation.zig +++ b/src/link/Elf/relocation.zig @@ -91,6 +91,44 @@ pub fn encode(comptime kind: Kind, cpu_arch: std.Target.Cpu.Arch) u32 { }; } +pub const dwarf = struct { + pub fn crossSectionRelocType(format: DW.Format, cpu_arch: std.Target.Cpu.Arch) u32 { + return switch (cpu_arch) { + .x86_64 => @intFromEnum(switch (format) { + .@"32" => elf.R_X86_64.@"32", + .@"64" => .@"64", + }), + .riscv64 => @intFromEnum(switch (format) { + .@"32" => elf.R_RISCV.@"32", + .@"64" => .@"64", + }), + else => @panic("TODO unhandled cpu arch"), + }; + } + + pub fn externalRelocType( + target: Symbol, + address_size: Dwarf.AddressSize, + cpu_arch: std.Target.Cpu.Arch, + ) u32 { + return switch (cpu_arch) { + .x86_64 => @intFromEnum(switch (address_size) { + .@"32" => if (target.flags.is_tls) elf.R_X86_64.DTPOFF32 else .@"32", + .@"64" => if (target.flags.is_tls) elf.R_X86_64.DTPOFF64 else .@"64", + else => unreachable, + }), + .riscv64 => @intFromEnum(switch (address_size) { + .@"32" => elf.R_RISCV.@"32", + .@"64" => elf.R_RISCV.@"64", + else => unreachable, + }), + else => @panic("TODO unhandled cpu arch"), + }; + } + + const DW = std.dwarf; +}; + const FormatRelocTypeCtx = struct { r_type: u32, cpu_arch: std.Target.Cpu.Arch, @@ -124,4 +162,6 @@ const assert = std.debug.assert; const elf = std.elf; const std = @import("std"); +const Dwarf = @import("../Dwarf.zig"); const Elf = @import("../Elf.zig"); +const Symbol = @import("Symbol.zig"); diff --git a/test/src/Debugger.zig b/test/src/Debugger.zig index f92f7f8a95..4f2c82a56b 100644 --- a/test/src/Debugger.zig +++ b/test/src/Debugger.zig @@ -695,6 +695,44 @@ pub fn addTestsForTarget(db: *Debugger, target: Target) void { \\1 breakpoints deleted; 0 breakpoint locations disabled. }, ); + db.addLldbTest( + "link_object", + target, + &.{ + .{ + .path = "main.zig", + .source = + \\extern fn fabsf(f32) f32; + \\pub fn main() void { + \\ var x: f32 = -1234.5; + \\ x = fabsf(x); + \\ _ = &x; + \\} + , + }, + }, + \\breakpoint set --file main.zig --source-pattern-regexp 'x = fabsf\(x\);' + \\process launch + \\frame variable x + \\breakpoint delete --force 1 + \\ + \\breakpoint set --file main.zig --source-pattern-regexp '_ = &x;' + \\process continue + \\frame variable x + \\breakpoint delete --force 2 + , + &.{ + \\(lldb) frame variable x + \\(f32) x = -1234.5 + \\(lldb) breakpoint delete --force 1 + \\1 breakpoints deleted; 0 breakpoint locations disabled. + , + \\(lldb) frame variable x + \\(f32) x = 1234.5 + \\(lldb) breakpoint delete --force 2 + \\1 breakpoints deleted; 0 breakpoint locations disabled. + }, + ); } const File = struct { import: ?[]const u8 = null, path: []const u8, source: []const u8 };