Merge pull request #15102 from ziglang/fix-15075

coff: handle multiple DLLs
This commit is contained in:
Jakub Konka 2023-03-28 18:28:48 +02:00 committed by GitHub
commit 43487eb3ef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 392 additions and 214 deletions

View File

@ -583,6 +583,7 @@ set(ZIG_STAGE2_SOURCES
"${CMAKE_SOURCE_DIR}/src/link/C.zig"
"${CMAKE_SOURCE_DIR}/src/link/Coff.zig"
"${CMAKE_SOURCE_DIR}/src/link/Coff/Atom.zig"
"${CMAKE_SOURCE_DIR}/src/link/Coff/ImportTable.zig"
"${CMAKE_SOURCE_DIR}/src/link/Coff/Object.zig"
"${CMAKE_SOURCE_DIR}/src/link/Coff/lld.zig"
"${CMAKE_SOURCE_DIR}/src/link/Elf.zig"

View File

@ -4318,16 +4318,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
});
} else if (func_value.castTag(.extern_fn)) |func_payload| {
const extern_fn = func_payload.data;
const decl_name = mod.declPtr(extern_fn.owner_decl).name;
if (extern_fn.lib_name) |lib_name| {
log.debug("TODO enforce that '{s}' is expected in '{s}' library", .{
decl_name,
lib_name,
});
}
const decl_name = mem.sliceTo(mod.declPtr(extern_fn.owner_decl).name, 0);
const lib_name = mem.sliceTo(extern_fn.lib_name, 0);
if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const sym_index = try macho_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
const sym_index = try macho_file.getGlobalSymbol(decl_name, lib_name);
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
const atom_index = macho_file.getAtom(atom).getSymbolIndex().?;
_ = try self.addInst(.{
@ -4340,7 +4334,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
},
});
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const sym_index = try coff_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
const sym_index = try coff_file.getGlobalSymbol(decl_name, lib_name);
try self.genSetReg(Type.initTag(.u64), .x30, .{
.linker_load = .{
.type = .import,

View File

@ -6121,7 +6121,7 @@ fn callIntrinsic(
args: []const WValue,
) InnerError!WValue {
assert(param_types.len == args.len);
const symbol_index = func.bin_file.base.getGlobalSymbol(name) catch |err| {
const symbol_index = func.bin_file.base.getGlobalSymbol(name, null) catch |err| {
return func.fail("Could not find or create global symbol '{s}'", .{@errorName(err)});
};

View File

@ -5317,16 +5317,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
} else unreachable;
} else if (func_value.castTag(.extern_fn)) |func_payload| {
const extern_fn = func_payload.data;
const decl_name = mod.declPtr(extern_fn.owner_decl).name;
if (extern_fn.lib_name) |lib_name| {
log.debug("TODO enforce that '{s}' is expected in '{s}' library", .{
decl_name,
lib_name,
});
}
const decl_name = mem.sliceTo(mod.declPtr(extern_fn.owner_decl).name, 0);
const lib_name = mem.sliceTo(extern_fn.lib_name, 0);
if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const sym_index = try coff_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
const sym_index = try coff_file.getGlobalSymbol(decl_name, lib_name);
try self.genSetReg(Type.initTag(.usize), .rax, .{
.linker_load = .{
.type = .import,
@ -5335,7 +5329,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
});
try self.asmRegister(.call, .rax);
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const sym_index = try macho_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
const sym_index = try macho_file.getGlobalSymbol(decl_name, lib_name);
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
const atom_index = macho_file.getAtom(atom).getSymbolIndex().?;
_ = try self.addInst(.{

View File

@ -504,18 +504,20 @@ pub const File = struct {
/// Called from within CodeGen to retrieve the symbol index of a global symbol.
/// If no symbol exists yet with this name, a new undefined global symbol will
/// be created. This symbol may get resolved once all relocatables are (re-)linked.
pub fn getGlobalSymbol(base: *File, name: []const u8) UpdateDeclError!u32 {
/// Optionally, it is possible to specify where to expect the symbol defined if it
/// is an import.
pub fn getGlobalSymbol(base: *File, name: []const u8, lib_name: ?[]const u8) UpdateDeclError!u32 {
if (build_options.only_c) @compileError("unreachable");
log.debug("getGlobalSymbol '{s}'", .{name});
log.debug("getGlobalSymbol '{s}' (expected in '{?s}')", .{ name, lib_name });
switch (base.tag) {
// zig fmt: off
.coff => return @fieldParentPtr(Coff, "base", base).getGlobalSymbol(name),
.coff => return @fieldParentPtr(Coff, "base", base).getGlobalSymbol(name, lib_name),
.elf => unreachable,
.macho => return @fieldParentPtr(MachO, "base", base).getGlobalSymbol(name),
.macho => return @fieldParentPtr(MachO, "base", base).getGlobalSymbol(name, lib_name),
.plan9 => unreachable,
.spirv => unreachable,
.c => unreachable,
.wasm => return @fieldParentPtr(Wasm, "base", base).getGlobalSymbol(name),
.wasm => return @fieldParentPtr(Wasm, "base", base).getGlobalSymbol(name, lib_name),
.nvptx => unreachable,
// zig fmt: on
}

View File

@ -1,36 +1,7 @@
const Coff = @This();
const std = @import("std");
const build_options = @import("build_options");
const builtin = @import("builtin");
const assert = std.debug.assert;
const coff = std.coff;
const fmt = std.fmt;
const log = std.log.scoped(.link);
const math = std.math;
const mem = std.mem;
const Allocator = std.mem.Allocator;
const codegen = @import("../codegen.zig");
const link = @import("../link.zig");
const lld = @import("Coff/lld.zig");
const trace = @import("../tracy.zig").trace;
const Air = @import("../Air.zig");
pub const Atom = @import("Coff/Atom.zig");
const Compilation = @import("../Compilation.zig");
const Liveness = @import("../Liveness.zig");
const LlvmObject = @import("../codegen/llvm.zig").Object;
const Module = @import("../Module.zig");
const Object = @import("Coff/Object.zig");
const Relocation = @import("Coff/Relocation.zig");
const StringTable = @import("strtab.zig").StringTable;
const TypedValue = @import("../TypedValue.zig");
pub const base_tag: link.File.Tag = .coff;
const msdos_stub = @embedFile("msdos-stub.bin");
//! The main driver of the COFF linker.
//! Currently uses our own implementation for the incremental linker, and falls back to
//! LLD for traditional linking (linking relocatable object files).
//! LLD is also the default linker for LLVM.
/// If this is not null, an object file is created by LLVM and linked with LLD afterwards.
llvm_object: ?*LlvmObject = null,
@ -64,13 +35,16 @@ globals_free_list: std.ArrayListUnmanaged(u32) = .{},
strtab: StringTable(.strtab) = .{},
strtab_offset: ?u32 = null,
temp_strtab: StringTable(.temp_strtab) = .{},
got_entries: std.ArrayListUnmanaged(Entry) = .{},
got_entries_free_list: std.ArrayListUnmanaged(u32) = .{},
got_entries_table: std.AutoHashMapUnmanaged(SymbolWithLoc, u32) = .{},
imports: std.ArrayListUnmanaged(Entry) = .{},
imports_free_list: std.ArrayListUnmanaged(u32) = .{},
imports_table: std.AutoHashMapUnmanaged(SymbolWithLoc, u32) = .{},
/// A table of ImportTables partitioned by the library name.
/// Key is an offset into the interning string table `temp_strtab`.
import_tables: std.AutoArrayHashMapUnmanaged(u32, ImportTable) = .{},
imports_count_dirty: bool = true,
/// Virtual address of the entry point procedure relative to image base.
entry_addr: ?u32 = null,
@ -309,12 +283,15 @@ pub fn deinit(self: *Coff) void {
self.locals_free_list.deinit(gpa);
self.globals_free_list.deinit(gpa);
self.strtab.deinit(gpa);
self.temp_strtab.deinit(gpa);
self.got_entries.deinit(gpa);
self.got_entries_free_list.deinit(gpa);
self.got_entries_table.deinit(gpa);
self.imports.deinit(gpa);
self.imports_free_list.deinit(gpa);
self.imports_table.deinit(gpa);
for (self.import_tables.values()) |*itab| {
itab.deinit(gpa);
}
self.import_tables.deinit(gpa);
{
var it = self.decls.iterator();
@ -358,6 +335,8 @@ fn populateMissingMetadata(self: *Coff) !void {
try self.strtab.buffer.ensureUnusedCapacity(gpa, @sizeOf(u32));
self.strtab.buffer.appendNTimesAssumeCapacity(0, @sizeOf(u32));
try self.temp_strtab.buffer.append(gpa, 0);
// Index 0 is always a null symbol.
try self.locals.append(gpa, .{
.name = [_]u8{0} ** 8,
@ -725,28 +704,6 @@ pub fn allocateGotEntry(self: *Coff, target: SymbolWithLoc) !u32 {
return index;
}
pub fn allocateImportEntry(self: *Coff, target: SymbolWithLoc) !u32 {
const gpa = self.base.allocator;
try self.imports.ensureUnusedCapacity(gpa, 1);
const index: u32 = blk: {
if (self.imports_free_list.popOrNull()) |index| {
log.debug(" (reusing import entry index {d})", .{index});
break :blk index;
} else {
log.debug(" (allocating import entry at index {d})", .{self.imports.items.len});
const index = @intCast(u32, self.imports.items.len);
_ = self.imports.addOneAssumeCapacity();
break :blk index;
}
};
self.imports.items[index] = .{ .target = target, .sym_index = 0 };
try self.imports_table.putNoClobber(gpa, target, index);
return index;
}
pub fn createAtom(self: *Coff) !Atom.Index {
const gpa = self.base.allocator;
const atom_index = @intCast(Atom.Index, self.atoms.items.len);
@ -797,21 +754,6 @@ fn createGotAtom(self: *Coff, target: SymbolWithLoc) !Atom.Index {
return atom_index;
}
fn createImportAtom(self: *Coff) !Atom.Index {
const atom_index = try self.createAtom();
const atom = self.getAtomPtr(atom_index);
atom.size = @sizeOf(u64);
atom.alignment = @alignOf(u64);
const sym = atom.getSymbolPtr(self);
sym.section_number = @intToEnum(coff.SectionNumber, self.idata_section_index.? + 1);
sym.value = try self.allocateAtom(atom_index, atom.size, atom.alignment);
log.debug("allocated import atom at 0x{x}", .{sym.value});
return atom_index;
}
fn growAtom(self: *Coff, atom_index: Atom.Index, new_atom_size: u32, alignment: u32) !u32 {
const atom = self.getAtom(atom_index);
const sym = atom.getSymbol(self);
@ -871,10 +813,8 @@ fn markRelocsDirtyByAddress(self: *Coff, addr: u32) void {
var it = self.relocs.valueIterator();
while (it.next()) |relocs| {
for (relocs.items) |*reloc| {
const target_atom_index = reloc.getTargetAtomIndex(self) orelse continue;
const target_atom = self.getAtom(target_atom_index);
const target_sym = target_atom.getSymbol(self);
if (target_sym.value < addr) continue;
const target_vaddr = reloc.getTargetAddress(self) orelse continue;
if (target_vaddr < addr) continue;
reloc.dirty = true;
}
}
@ -1463,35 +1403,42 @@ pub fn flushModule(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
sub_prog_node.activate();
defer sub_prog_node.end();
const gpa = self.base.allocator;
while (self.unresolved.popOrNull()) |entry| {
assert(entry.value); // We only expect imports generated by the incremental linker for now.
const global = self.globals.items[entry.key];
if (self.imports_table.contains(global)) continue;
const import_index = try self.allocateImportEntry(global);
const import_atom_index = try self.createImportAtom();
const import_atom = self.getAtom(import_atom_index);
self.imports.items[import_index].sym_index = import_atom.getSymbolIndex().?;
try self.writePtrWidthAtom(import_atom_index);
}
if (build_options.enable_logging) {
self.logSymtab();
const sym = self.getSymbol(global);
const res = try self.import_tables.getOrPut(gpa, sym.value);
const itable = res.value_ptr;
if (!res.found_existing) {
itable.* = .{};
}
if (itable.lookup.contains(global)) continue;
// TODO: we could technically write the pointer placeholder for to-be-bound import here,
// but since this happens in flush, there is currently no point.
_ = try itable.addImport(gpa, global);
self.imports_count_dirty = true;
}
try self.writeImportTables();
{
var it = self.relocs.keyIterator();
while (it.next()) |atom| {
try self.resolveRelocs(atom.*);
}
}
try self.writeImportTable();
try self.writeBaseRelocations();
if (self.getEntryPoint()) |entry_sym_loc| {
self.entry_addr = self.getSymbol(entry_sym_loc).value;
}
if (build_options.enable_logging) {
self.logSymtab();
self.logImportTables();
}
try self.writeStrtab();
try self.writeDataDirectoriesHeaders();
try self.writeSectionHeaders();
@ -1504,6 +1451,8 @@ pub fn flushModule(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
self.error_flags.no_entry_point_found = false;
try self.writeHeader();
}
assert(!self.imports_count_dirty);
}
pub fn getDeclVAddr(self: *Coff, decl_index: Module.Decl.Index, reloc_info: link.File.RelocInfo) !u64 {
@ -1526,7 +1475,7 @@ pub fn getDeclVAddr(self: *Coff, decl_index: Module.Decl.Index, reloc_info: link
return 0;
}
pub fn getGlobalSymbol(self: *Coff, name: []const u8) !u32 {
pub fn getGlobalSymbol(self: *Coff, name: []const u8, lib_name_name: ?[]const u8) !u32 {
const gop = try self.getOrPutGlobalPtr(name);
const global_index = self.getGlobalIndex(name).?;
@ -1543,6 +1492,12 @@ pub fn getGlobalSymbol(self: *Coff, name: []const u8) !u32 {
try self.setSymbolName(sym, name);
sym.storage_class = .EXTERNAL;
if (lib_name_name) |lib_name| {
// We repurpose the 'value' of the Symbol struct to store an offset into
// temporary string table where we will store the library name hint.
sym.value = try self.temp_strtab.insert(gpa, lib_name);
}
try self.unresolved.putNoClobber(gpa, global_index, true);
return global_index;
@ -1625,12 +1580,10 @@ fn writeBaseRelocations(self: *Coff) !void {
const needed_size = @intCast(u32, buffer.items.len);
if (needed_size > sect_capacity) {
const new_offset = self.findFreeSpace(needed_size, default_file_alignment);
log.debug("writing {s} at 0x{x} to 0x{x} (0x{x} - 0x{x})", .{
log.debug("moving {s} from 0x{x} to 0x{x}", .{
self.getSectionName(header),
header.pointer_to_raw_data,
header.pointer_to_raw_data + needed_size,
new_offset,
new_offset + needed_size,
});
header.pointer_to_raw_data = new_offset;
@ -1651,88 +1604,144 @@ fn writeBaseRelocations(self: *Coff) !void {
};
}
fn writeImportTable(self: *Coff) !void {
fn writeImportTables(self: *Coff) !void {
if (self.idata_section_index == null) return;
if (!self.imports_count_dirty) return;
const gpa = self.base.allocator;
const section = self.sections.get(self.idata_section_index.?);
const last_atom_index = section.last_atom_index orelse return;
const last_atom = self.getAtom(last_atom_index);
const ext = ".dll";
const header = &self.sections.items(.header)[self.idata_section_index.?];
const iat_rva = section.header.virtual_address;
const iat_size = last_atom.getSymbol(self).value + last_atom.size * 2 - iat_rva; // account for sentinel zero pointer
// Calculate needed size
var iat_size: u32 = 0;
var dir_table_size: u32 = @sizeOf(coff.ImportDirectoryEntry); // sentinel
var lookup_table_size: u32 = 0;
var names_table_size: u32 = 0;
var dll_names_size: u32 = 0;
for (self.import_tables.keys(), 0..) |off, i| {
const lib_name = self.temp_strtab.getAssumeExists(off);
const itable = self.import_tables.values()[i];
iat_size += itable.size() + 8;
dir_table_size += @sizeOf(coff.ImportDirectoryEntry);
lookup_table_size += @intCast(u32, itable.entries.items.len + 1) * @sizeOf(coff.ImportLookupEntry64.ByName);
for (itable.entries.items) |entry| {
const sym_name = self.getSymbolName(entry);
names_table_size += 2 + mem.alignForwardGeneric(u32, @intCast(u32, sym_name.len + 1), 2);
}
dll_names_size += @intCast(u32, lib_name.len + ext.len + 1);
}
const dll_name = "KERNEL32.dll";
const needed_size = iat_size + dir_table_size + lookup_table_size + names_table_size + dll_names_size;
const sect_capacity = self.allocatedSize(header.pointer_to_raw_data);
if (needed_size > sect_capacity) {
const new_offset = self.findFreeSpace(needed_size, default_file_alignment);
log.debug("moving .idata from 0x{x} to 0x{x}", .{ header.pointer_to_raw_data, new_offset });
header.pointer_to_raw_data = new_offset;
var import_dir_entry = coff.ImportDirectoryEntry{
.import_lookup_table_rva = @sizeOf(coff.ImportDirectoryEntry) * 2,
const sect_vm_capacity = self.allocatedVirtualSize(header.virtual_address);
if (needed_size > sect_vm_capacity) {
try self.growSectionVM(self.idata_section_index.?, needed_size);
}
header.virtual_size = @max(header.virtual_size, needed_size);
header.size_of_raw_data = needed_size;
}
// Do the actual writes
var buffer = std.ArrayList(u8).init(gpa);
defer buffer.deinit();
try buffer.ensureTotalCapacityPrecise(needed_size);
buffer.resize(needed_size) catch unreachable;
const dir_header_size = @sizeOf(coff.ImportDirectoryEntry);
const lookup_entry_size = @sizeOf(coff.ImportLookupEntry64.ByName);
var iat_offset: u32 = 0;
var dir_table_offset = iat_size;
var lookup_table_offset = dir_table_offset + dir_table_size;
var names_table_offset = lookup_table_offset + lookup_table_size;
var dll_names_offset = names_table_offset + names_table_size;
for (self.import_tables.keys(), 0..) |off, i| {
const lib_name = self.temp_strtab.getAssumeExists(off);
const itable = self.import_tables.values()[i];
// Lookup table header
const lookup_header = coff.ImportDirectoryEntry{
.import_lookup_table_rva = header.virtual_address + lookup_table_offset,
.time_date_stamp = 0,
.forwarder_chain = 0,
.name_rva = header.virtual_address + dll_names_offset,
.import_address_table_rva = header.virtual_address + iat_offset,
};
mem.copy(u8, buffer.items[dir_table_offset..], mem.asBytes(&lookup_header));
dir_table_offset += dir_header_size;
for (itable.entries.items) |entry| {
const import_name = self.getSymbolName(entry);
// IAT and lookup table entry
const lookup = coff.ImportLookupEntry64.ByName{ .name_table_rva = @intCast(u31, header.virtual_address + names_table_offset) };
mem.copy(u8, buffer.items[iat_offset..], mem.asBytes(&lookup));
iat_offset += lookup_entry_size;
mem.copy(u8, buffer.items[lookup_table_offset..], mem.asBytes(&lookup));
lookup_table_offset += lookup_entry_size;
// Names table entry
mem.writeIntLittle(u16, buffer.items[names_table_offset..][0..2], 0); // Hint set to 0 until we learn how to parse DLLs
names_table_offset += 2;
mem.copy(u8, buffer.items[names_table_offset..], import_name);
names_table_offset += @intCast(u32, import_name.len);
buffer.items[names_table_offset] = 0;
names_table_offset += 1;
if (!mem.isAlignedGeneric(usize, names_table_offset, @sizeOf(u16))) {
buffer.items[names_table_offset] = 0;
names_table_offset += 1;
}
}
// IAT sentinel
mem.writeIntLittle(u64, buffer.items[iat_offset..][0..lookup_entry_size], 0);
iat_offset += 8;
// Lookup table sentinel
mem.copy(u8, buffer.items[lookup_table_offset..], mem.asBytes(&coff.ImportLookupEntry64.ByName{ .name_table_rva = 0 }));
lookup_table_offset += lookup_entry_size;
// DLL name
mem.copy(u8, buffer.items[dll_names_offset..], lib_name);
dll_names_offset += @intCast(u32, lib_name.len);
mem.copy(u8, buffer.items[dll_names_offset..], ext);
dll_names_offset += @intCast(u32, ext.len);
buffer.items[dll_names_offset] = 0;
dll_names_offset += 1;
}
// Sentinel
const lookup_header = coff.ImportDirectoryEntry{
.import_lookup_table_rva = 0,
.time_date_stamp = 0,
.forwarder_chain = 0,
.name_rva = 0,
.import_address_table_rva = iat_rva,
.import_address_table_rva = 0,
};
mem.copy(u8, buffer.items[dir_table_offset..], mem.asBytes(&lookup_header));
dir_table_offset += dir_header_size;
// TODO: we currently assume there's only one (implicit) DLL - ntdll
var lookup_table = std.ArrayList(coff.ImportLookupEntry64.ByName).init(gpa);
defer lookup_table.deinit();
assert(dll_names_offset == needed_size);
var names_table = std.ArrayList(u8).init(gpa);
defer names_table.deinit();
// TODO: check if import is still valid
for (self.imports.items) |entry| {
const target_name = self.getSymbolName(entry.target);
const start = names_table.items.len;
mem.writeIntLittle(u16, try names_table.addManyAsArray(2), 0); // TODO: currently, hint is set to 0 as we haven't yet parsed any DLL
try names_table.appendSlice(target_name);
try names_table.append(0);
const end = names_table.items.len;
if (!mem.isAlignedGeneric(usize, end - start, @sizeOf(u16))) {
try names_table.append(0);
}
try lookup_table.append(.{ .name_table_rva = @intCast(u31, start) });
}
try lookup_table.append(.{ .name_table_rva = 0 }); // the sentinel
const dir_entry_size = @sizeOf(coff.ImportDirectoryEntry) + lookup_table.items.len * @sizeOf(coff.ImportLookupEntry64.ByName) + names_table.items.len + dll_name.len + 1;
const needed_size = iat_size + dir_entry_size + @sizeOf(coff.ImportDirectoryEntry);
const sect_capacity = self.allocatedSize(section.header.pointer_to_raw_data);
assert(needed_size < sect_capacity); // TODO: implement expanding .idata section
// Fixup offsets
const base_rva = iat_rva + iat_size;
import_dir_entry.import_lookup_table_rva += base_rva;
import_dir_entry.name_rva = @intCast(u32, base_rva + dir_entry_size + @sizeOf(coff.ImportDirectoryEntry) - dll_name.len - 1);
for (lookup_table.items[0 .. lookup_table.items.len - 1]) |*lk| {
lk.name_table_rva += @intCast(u31, base_rva + @sizeOf(coff.ImportDirectoryEntry) * 2 + lookup_table.items.len * @sizeOf(coff.ImportLookupEntry64.ByName));
}
var buffer = std.ArrayList(u8).init(gpa);
defer buffer.deinit();
try buffer.ensureTotalCapacity(dir_entry_size + @sizeOf(coff.ImportDirectoryEntry));
buffer.appendSliceAssumeCapacity(mem.asBytes(&import_dir_entry));
buffer.appendNTimesAssumeCapacity(0, @sizeOf(coff.ImportDirectoryEntry)); // the sentinel; TODO: I think doing all of the above on bytes directly might be cleaner
buffer.appendSliceAssumeCapacity(mem.sliceAsBytes(lookup_table.items));
buffer.appendSliceAssumeCapacity(names_table.items);
buffer.appendSliceAssumeCapacity(dll_name);
buffer.appendAssumeCapacity(0);
try self.base.file.?.pwriteAll(buffer.items, section.header.pointer_to_raw_data + iat_size);
// Override the IAT atoms
// TODO: we should rewrite only dirtied atoms, but that's for way later
try self.base.file.?.pwriteAll(mem.sliceAsBytes(lookup_table.items), section.header.pointer_to_raw_data);
try self.base.file.?.pwriteAll(buffer.items, header.pointer_to_raw_data);
self.data_directories[@enumToInt(coff.DirectoryEntry.IMPORT)] = .{
.virtual_address = iat_rva + iat_size,
.size = @intCast(u32, @sizeOf(coff.ImportDirectoryEntry) * 2),
.virtual_address = header.virtual_address + iat_size,
.size = dir_table_size,
};
self.data_directories[@enumToInt(coff.DirectoryEntry.IAT)] = .{
.virtual_address = iat_rva,
.virtual_address = header.virtual_address,
.size = iat_size,
};
self.imports_count_dirty = false;
}
fn writeStrtab(self: *Coff) !void {
@ -2121,14 +2130,6 @@ pub fn getGotAtomIndexForSymbol(self: *const Coff, sym_loc: SymbolWithLoc) ?Atom
return self.getAtomIndexForSymbol(.{ .sym_index = got_entry.sym_index, .file = null });
}
/// Returns import atom that references `sym_loc` if one exists.
/// Returns null otherwise.
pub fn getImportAtomIndexForSymbol(self: *const Coff, sym_loc: SymbolWithLoc) ?Atom.Index {
const imports_index = self.imports_table.get(sym_loc) orelse return null;
const imports_entry = self.imports.items[imports_index];
return self.getAtomIndexForSymbol(.{ .sym_index = imports_entry.sym_index, .file = null });
}
fn setSectionName(self: *Coff, header: *coff.SectionHeader, name: []const u8) !void {
if (name.len <= 8) {
mem.copy(u8, &header.name, name);
@ -2249,3 +2250,50 @@ fn logSections(self: *Coff) void {
});
}
}
fn logImportTables(self: *const Coff) void {
log.debug("import tables:", .{});
for (self.import_tables.keys(), 0..) |off, i| {
const itable = self.import_tables.values()[i];
log.debug("{}", .{itable.fmtDebug(.{
.coff_file = self,
.index = i,
.name_off = off,
})});
}
}
const Coff = @This();
const std = @import("std");
const build_options = @import("build_options");
const builtin = @import("builtin");
const assert = std.debug.assert;
const coff = std.coff;
const fmt = std.fmt;
const log = std.log.scoped(.link);
const math = std.math;
const mem = std.mem;
const Allocator = std.mem.Allocator;
const codegen = @import("../codegen.zig");
const link = @import("../link.zig");
const lld = @import("Coff/lld.zig");
const trace = @import("../tracy.zig").trace;
const Air = @import("../Air.zig");
pub const Atom = @import("Coff/Atom.zig");
const Compilation = @import("../Compilation.zig");
const ImportTable = @import("Coff/ImportTable.zig");
const Liveness = @import("../Liveness.zig");
const LlvmObject = @import("../codegen/llvm.zig").Object;
const Module = @import("../Module.zig");
const Object = @import("Coff/Object.zig");
const Relocation = @import("Coff/Relocation.zig");
const StringTable = @import("strtab.zig").StringTable;
const TypedValue = @import("../TypedValue.zig");
pub const base_tag: link.File.Tag = .coff;
const msdos_stub = @embedFile("msdos-stub.bin");

View File

@ -0,0 +1,133 @@
//! Represents an import table in the .idata section where each contained pointer
//! is to a symbol from the same DLL.
//!
//! The layout of .idata section is as follows:
//!
//! --- ADDR1 : IAT (all import tables concatenated together)
//! ptr
//! ptr
//! 0 sentinel
//! ptr
//! 0 sentinel
//! --- ADDR2: headers
//! ImportDirectoryEntry header
//! ImportDirectoryEntry header
//! sentinel
//! --- ADDR2: lookup tables
//! Lookup table
//! 0 sentinel
//! Lookup table
//! 0 sentinel
//! --- ADDR3: name hint tables
//! hint-symname
//! hint-symname
//! --- ADDR4: DLL names
//! DLL#1 name
//! DLL#2 name
//! --- END
entries: std.ArrayListUnmanaged(SymbolWithLoc) = .{},
free_list: std.ArrayListUnmanaged(u32) = .{},
lookup: std.AutoHashMapUnmanaged(SymbolWithLoc, u32) = .{},
pub fn deinit(itab: *ImportTable, allocator: Allocator) void {
itab.entries.deinit(allocator);
itab.free_list.deinit(allocator);
itab.lookup.deinit(allocator);
}
/// Size of the import table does not include the sentinel.
pub fn size(itab: ImportTable) u32 {
return @intCast(u32, itab.entries.items.len) * @sizeOf(u64);
}
pub fn addImport(itab: *ImportTable, allocator: Allocator, target: SymbolWithLoc) !ImportIndex {
try itab.entries.ensureUnusedCapacity(allocator, 1);
const index: u32 = blk: {
if (itab.free_list.popOrNull()) |index| {
log.debug(" (reusing import entry index {d})", .{index});
break :blk index;
} else {
log.debug(" (allocating import entry at index {d})", .{itab.entries.items.len});
const index = @intCast(u32, itab.entries.items.len);
_ = itab.entries.addOneAssumeCapacity();
break :blk index;
}
};
itab.entries.items[index] = target;
try itab.lookup.putNoClobber(allocator, target, index);
return index;
}
const Context = struct {
coff_file: *const Coff,
/// Index of this ImportTable in a global list of all tables.
/// This is required in order to calculate the base vaddr of this ImportTable.
index: usize,
/// Offset into the string interning table of the DLL this ImportTable corresponds to.
name_off: u32,
};
fn getBaseAddress(ctx: Context) u32 {
const header = ctx.coff_file.sections.items(.header)[ctx.coff_file.idata_section_index.?];
var addr = header.virtual_address;
for (ctx.coff_file.import_tables.values(), 0..) |other_itab, i| {
if (ctx.index == i) break;
addr += @intCast(u32, other_itab.entries.items.len * @sizeOf(u64)) + 8;
}
return addr;
}
pub fn getImportAddress(itab: *const ImportTable, target: SymbolWithLoc, ctx: Context) ?u32 {
const index = itab.lookup.get(target) orelse return null;
const base_vaddr = getBaseAddress(ctx);
return base_vaddr + index * @sizeOf(u64);
}
const FormatContext = struct {
itab: ImportTable,
ctx: Context,
};
fn fmt(
fmt_ctx: FormatContext,
comptime unused_format_string: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) @TypeOf(writer).Error!void {
_ = options;
comptime assert(unused_format_string.len == 0);
const lib_name = fmt_ctx.ctx.coff_file.temp_strtab.getAssumeExists(fmt_ctx.ctx.name_off);
const base_vaddr = getBaseAddress(fmt_ctx.ctx);
try writer.print("IAT({s}.dll) @{x}:", .{ lib_name, base_vaddr });
for (fmt_ctx.itab.entries.items, 0..) |entry, i| {
try writer.print("\n {d}@{?x} => {s}", .{
i,
fmt_ctx.itab.getImportAddress(entry, fmt_ctx.ctx),
fmt_ctx.ctx.coff_file.getSymbolName(entry),
});
}
}
fn format(itab: ImportTable, comptime unused_format_string: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
_ = itab;
_ = unused_format_string;
_ = options;
_ = writer;
@compileError("do not format ImportTable directly; use itab.fmtDebug()");
}
pub fn fmtDebug(itab: ImportTable, ctx: Context) std.fmt.Formatter(fmt) {
return .{ .data = .{ .itab = itab, .ctx = ctx } };
}
const ImportIndex = u32;
const ImportTable = @This();
const std = @import("std");
const assert = std.debug.assert;
const log = std.log.scoped(.link);
const Allocator = std.mem.Allocator;
const Coff = @import("../Coff.zig");
const SymbolWithLoc = Coff.SymbolWithLoc;

View File

@ -45,23 +45,30 @@ pcrel: bool,
length: u2,
dirty: bool = true,
/// Returns an Atom which is the target node of this relocation edge (if any).
pub fn getTargetAtomIndex(self: Relocation, coff_file: *const Coff) ?Atom.Index {
/// Returns address of the target if any.
pub fn getTargetAddress(self: Relocation, coff_file: *const Coff) ?u32 {
switch (self.type) {
.got,
.got_page,
.got_pageoff,
=> return coff_file.getGotAtomIndexForSymbol(self.target),
.got, .got_page, .got_pageoff, .direct, .page, .pageoff => {
const maybe_target_atom_index = switch (self.type) {
.got, .got_page, .got_pageoff => coff_file.getGotAtomIndexForSymbol(self.target),
.direct, .page, .pageoff => coff_file.getAtomIndexForSymbol(self.target),
else => unreachable,
};
const target_atom_index = maybe_target_atom_index orelse return null;
const target_atom = coff_file.getAtom(target_atom_index);
return target_atom.getSymbol(coff_file).value;
},
.direct,
.page,
.pageoff,
=> return coff_file.getAtomIndexForSymbol(self.target),
.import,
.import_page,
.import_pageoff,
=> return coff_file.getImportAtomIndexForSymbol(self.target),
.import, .import_page, .import_pageoff => {
const sym = coff_file.getSymbol(self.target);
const index = coff_file.import_tables.getIndex(sym.value) orelse return null;
const itab = coff_file.import_tables.values()[index];
return itab.getImportAddress(self.target, .{
.coff_file = coff_file,
.index = index,
.name_off = sym.value,
});
},
}
}
@ -73,9 +80,7 @@ pub fn resolve(self: *Relocation, atom_index: Atom.Index, coff_file: *Coff) !voi
const file_offset = source_section.pointer_to_raw_data + source_sym.value - source_section.virtual_address;
const target_atom_index = self.getTargetAtomIndex(coff_file) orelse return;
const target_atom = coff_file.getAtom(target_atom_index);
const target_vaddr = target_atom.getSymbol(coff_file).value;
const target_vaddr = self.getTargetAddress(coff_file) orelse return;
const target_vaddr_with_addend = target_vaddr + self.addend;
log.debug(" ({x}: [() => 0x{x} ({s})) ({s}) (in file at 0x{x})", .{

View File

@ -3202,7 +3202,8 @@ fn insertSection(self: *MachO, segment_index: u8, header: macho.section_64) !u8
return insertion_index;
}
pub fn getGlobalSymbol(self: *MachO, name: []const u8) !u32 {
pub fn getGlobalSymbol(self: *MachO, name: []const u8, lib_name: ?[]const u8) !u32 {
_ = lib_name;
const gpa = self.base.allocator;
const sym_name = try std.fmt.allocPrint(gpa, "_{s}", .{name});

View File

@ -1573,7 +1573,8 @@ pub fn lowerUnnamedConst(wasm: *Wasm, tv: TypedValue, decl_index: Module.Decl.In
/// such as an exported or imported symbol.
/// If the symbol does not yet exist, creates a new one symbol instead
/// and then returns the index to it.
pub fn getGlobalSymbol(wasm: *Wasm, name: []const u8) !u32 {
pub fn getGlobalSymbol(wasm: *Wasm, name: []const u8, lib_name: ?[]const u8) !u32 {
_ = lib_name;
const name_index = try wasm.string_table.put(wasm.base.allocator, name);
const gop = try wasm.globals.getOrPut(wasm.base.allocator, name_index);
if (gop.found_existing) {

View File

@ -112,7 +112,6 @@ const test_targets = blk: {
.os_tag = .windows,
.abi = .gnu,
},
.single_threaded = true, // https://github.com/ziglang/zig/issues/15075
.backend = .stage2_x86_64,
},