diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 98f778ef35..884fd68d55 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -3466,20 +3466,16 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. // on linking. const mod = self.bin_file.options.module.?; if (self.air.value(callee)) |func_value| { - if (self.bin_file.tag == link.File.Elf.base_tag or self.bin_file.tag == link.File.Coff.base_tag) { + if (self.bin_file.cast(link.File.Elf)) |elf_file| { if (func_value.castTag(.function)) |func_payload| { const func = func_payload.data; const ptr_bits = self.target.cpu.arch.ptrBitWidth(); const ptr_bytes: u64 = @divExact(ptr_bits, 8); const fn_owner_decl = mod.declPtr(func.owner_decl); - const got_addr = if (self.bin_file.cast(link.File.Elf)) |elf_file| blk: { + const got_addr = blk: { const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?]; break :blk @intCast(u32, got.p_vaddr + fn_owner_decl.link.elf.offset_table_index * ptr_bytes); - } else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: { - const got_atom = coff_file.getGotAtomForSymbol(.{ .sym_index = fn_owner_decl.link.coff.sym_index, .file = null }).?; - const got_sym = coff_file.getSymbol(got_atom.getSymbolWithLoc()); - break :blk got_sym.value; - } else unreachable; + }; try self.genSetReg(Type.initTag(.usize), .x30, .{ .memory = got_addr }); @@ -3547,6 +3543,8 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. } else { return self.fail("TODO implement calling bitcasted functions", .{}); } + } else if (self.bin_file.cast(link.File.Coff)) |_| { + return self.fail("TODO implement calling in COFF for {}", .{self.target.cpu.arch}); } else unreachable; } else { assert(ty.zigTypeTag() == .Pointer); @@ -5110,10 +5108,8 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne // the linker has enough info to perform relocations. assert(decl.link.macho.sym_index != 0); return MCValue{ .got_load = decl.link.macho.sym_index }; - } else if (self.bin_file.cast(link.File.Coff)) |coff_file| { - const got_atom = coff_file.getGotAtomForSymbol(.{ .sym_index = decl.link.coff.sym_index, .file = null }).?; - const got_sym = coff_file.getSymbol(got_atom.getSymbolWithLoc()); - return MCValue{ .memory = got_sym.value }; + } else if (self.bin_file.cast(link.File.Coff)) |_| { + return self.fail("TODO codegen COFF const Decl pointer", .{}); } else if (self.bin_file.cast(link.File.Plan9)) |p9| { try p9.seeDecl(decl_index); const got_addr = p9.bases.data + decl.link.plan9.got_index.? * ptr_bytes; diff --git a/src/arch/arm/CodeGen.zig b/src/arch/arm/CodeGen.zig index 0f796c530d..cefcf3b114 100644 --- a/src/arch/arm/CodeGen.zig +++ b/src/arch/arm/CodeGen.zig @@ -3698,7 +3698,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. // Due to incremental compilation, how function calls are generated depends // on linking. switch (self.bin_file.tag) { - .elf, .coff => { + .elf => { if (self.air.value(callee)) |func_value| { if (func_value.castTag(.function)) |func_payload| { const func = func_payload.data; @@ -3709,12 +3709,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. const got_addr = if (self.bin_file.cast(link.File.Elf)) |elf_file| blk: { const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?]; break :blk @intCast(u32, got.p_vaddr + fn_owner_decl.link.elf.offset_table_index * ptr_bytes); - } else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: { - const got_atom = coff_file.getGotAtomForSymbol(.{ .sym_index = fn_owner_decl.link.coff.sym_index, .file = null }).?; - const got_sym = coff_file.getSymbol(got_atom.getSymbolWithLoc()); - break :blk @intCast(u32, got_sym.value); } else unreachable; - try self.genSetReg(Type.initTag(.usize), .lr, .{ .memory = got_addr }); } else if (func_value.castTag(.extern_fn)) |_| { return self.fail("TODO implement calling extern functions", .{}); @@ -3752,6 +3747,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. } }, .macho => unreachable, // unsupported architecture for MachO + .coff => return self.fail("TODO implement call in COFF for {}", .{self.target.cpu.arch}), .plan9 => return self.fail("TODO implement call on plan9 for {}", .{self.target.cpu.arch}), else => unreachable, } @@ -5549,10 +5545,8 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne return MCValue{ .memory = got_addr }; } else if (self.bin_file.cast(link.File.MachO)) |_| { unreachable; // unsupported architecture for MachO - } else if (self.bin_file.cast(link.File.Coff)) |coff_file| { - const got_atom = coff_file.getGotAtomForSymbol(.{ .sym_index = decl.link.coff.sym_index, .file = null }).?; - const got_sym = coff_file.getSymbol(got_atom.getSymbolWithLoc()); - return MCValue{ .memory = got_sym.value }; + } else if (self.bin_file.cast(link.File.Coff)) |_| { + return self.fail("TODO codegen COFF const Decl pointer", .{}); } else if (self.bin_file.cast(link.File.Plan9)) |p9| { try p9.seeDecl(decl_index); const got_addr = p9.bases.data + decl.link.plan9.got_index.? * ptr_bytes; diff --git a/src/arch/riscv64/CodeGen.zig b/src/arch/riscv64/CodeGen.zig index 2bb68086d8..cd1d0e4050 100644 --- a/src/arch/riscv64/CodeGen.zig +++ b/src/arch/riscv64/CodeGen.zig @@ -1718,7 +1718,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. // Due to incremental compilation, how function calls are generated depends // on linking. - if (self.bin_file.tag == link.File.Elf.base_tag or self.bin_file.tag == link.File.Coff.base_tag) { + if (self.bin_file.cast(link.File.Elf)) |elf_file| { for (info.args) |mc_arg, arg_i| { const arg = args[arg_i]; const arg_ty = self.air.typeOf(arg); @@ -1752,14 +1752,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. const ptr_bytes: u64 = @divExact(ptr_bits, 8); const mod = self.bin_file.options.module.?; const fn_owner_decl = mod.declPtr(func.owner_decl); - const got_addr = if (self.bin_file.cast(link.File.Elf)) |elf_file| blk: { + const got_addr = blk: { const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?]; break :blk @intCast(u32, got.p_vaddr + fn_owner_decl.link.elf.offset_table_index * ptr_bytes); - } else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: { - const got_atom = coff_file.getGotAtomForSymbol(.{ .sym_index = fn_owner_decl.link.coff.sym_index, .file = null }).?; - const got_sym = coff_file.getSymbol(got_atom.getSymbolWithLoc()); - break :blk got_sym.value; - } else unreachable; + }; try self.genSetReg(Type.initTag(.usize), .ra, .{ .memory = got_addr }); _ = try self.addInst(.{ @@ -1778,6 +1774,8 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. } else { return self.fail("TODO implement calling runtime known function pointer", .{}); } + } else if (self.bin_file.cast(link.File.Coff)) |_| { + return self.fail("TODO implement calling in COFF for {}", .{self.target.cpu.arch}); } else if (self.bin_file.cast(link.File.MachO)) |_| { unreachable; // unsupported architecture for MachO } else if (self.bin_file.cast(link.File.Plan9)) |_| { @@ -2592,10 +2590,8 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne // TODO I'm hacking my way through here by repurposing .memory for storing // index to the GOT target symbol index. return MCValue{ .memory = decl.link.macho.sym_index }; - } else if (self.bin_file.cast(link.File.Coff)) |coff_file| { - const got_atom = coff_file.getGotAtomForSymbol(.{ .sym_index = decl.link.coff.sym_index, .file = null }).?; - const got_sym = coff_file.getSymbol(got_atom.getSymbolWithLoc()); - return MCValue{ .memory = got_sym.value }; + } else if (self.bin_file.cast(link.File.Coff)) |_| { + return self.fail("TODO codegen COFF const Decl pointer", .{}); } else if (self.bin_file.cast(link.File.Plan9)) |p9| { try p9.seeDecl(decl_index); const got_addr = p9.bases.data + decl.link.plan9.got_index.? * ptr_bytes; diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 60ac26ecc6..e5d47e589a 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -2657,19 +2657,19 @@ fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue .direct_load, => |sym_index| { const abi_size = @intCast(u32, ptr_ty.abiSize(self.target.*)); - const flags: u2 = switch (ptr) { - .got_load => 0b00, - .direct_load => 0b01, - else => unreachable, - }; const mod = self.bin_file.options.module.?; const fn_owner_decl = mod.declPtr(self.mod_fn.owner_decl); const atom_index = if (self.bin_file.tag == link.File.MachO.base_tag) fn_owner_decl.link.macho.sym_index else fn_owner_decl.link.coff.sym_index; + const flags: u2 = switch (ptr) { + .got_load => 0b00, + .direct_load => 0b01, + else => unreachable, + }; _ = try self.addInst(.{ - .tag = .lea_pie, + .tag = .lea_pic, .ops = Mir.Inst.Ops.encode(.{ .reg1 = registerAlias(reg, abi_size), .flags = flags, @@ -4004,9 +4004,9 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. if (func_value.castTag(.function)) |func_payload| { const func = func_payload.data; const fn_owner_decl = mod.declPtr(func.owner_decl); - const sym_index = fn_owner_decl.link.coff.sym_index; - try self.genSetReg(Type.initTag(.usize), .rax, .{ .got_load = sym_index }); - // callq *%rax + try self.genSetReg(Type.initTag(.usize), .rax, .{ + .got_load = fn_owner_decl.link.coff.sym_index, + }); _ = try self.addInst(.{ .tag = .call, .ops = Mir.Inst.Ops.encode(.{ @@ -6876,8 +6876,6 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne const got_addr = got.p_vaddr + decl.link.elf.offset_table_index * ptr_bytes; return MCValue{ .memory = got_addr }; } else if (self.bin_file.cast(link.File.MachO)) |_| { - // Because MachO is PIE-always-on, we defer memory address resolution until - // the linker has enough info to perform relocations. assert(decl.link.macho.sym_index != 0); return MCValue{ .got_load = decl.link.macho.sym_index }; } else if (self.bin_file.cast(link.File.Coff)) |_| { diff --git a/src/arch/x86_64/Emit.zig b/src/arch/x86_64/Emit.zig index 9cf75ab7e3..12f3e9118f 100644 --- a/src/arch/x86_64/Emit.zig +++ b/src/arch/x86_64/Emit.zig @@ -137,7 +137,7 @@ pub fn lowerMir(emit: *Emit) InnerError!void { .fld => try emit.mirFld(inst), .lea => try emit.mirLea(inst), - .lea_pie => try emit.mirLeaPie(inst), + .lea_pic => try emit.mirLeaPic(inst), .shl => try emit.mirShift(.shl, inst), .sal => try emit.mirShift(.sal, inst), @@ -338,7 +338,7 @@ fn mirJmpCall(emit: *Emit, tag: Tag, inst: Mir.Inst.Index) InnerError!void { .base = ops.reg1, }), emit.code); }, - 0b11 => return emit.fail("TODO unused JMP/CALL variant 0b11", .{}), + 0b11 => return emit.fail("TODO unused variant jmp/call 0b11", .{}), } } @@ -784,7 +784,7 @@ fn mirMovabs(emit: *Emit, inst: Mir.Inst.Index) InnerError!void { // FD return lowerToFdEnc(.mov, ops.reg1, imm, emit.code); }, - else => return emit.fail("TODO unused variant: movabs 0b{b}", .{ops.flags}), + else => return emit.fail("TODO unused movabs variant", .{}), } } @@ -978,12 +978,17 @@ fn mirLea(emit: *Emit, inst: Mir.Inst.Index) InnerError!void { } } -fn mirLeaPie(emit: *Emit, inst: Mir.Inst.Index) InnerError!void { +fn mirLeaPic(emit: *Emit, inst: Mir.Inst.Index) InnerError!void { const tag = emit.mir.instructions.items(.tag)[inst]; - assert(tag == .lea_pie); + assert(tag == .lea_pic); const ops = emit.mir.instructions.items(.ops)[inst].decode(); const relocation = emit.mir.instructions.items(.data)[inst].relocation; + switch (ops.flags) { + 0b00, 0b01 => {}, + else => return emit.fail("TODO unused LEA PIC variants 0b10 and 0b11", .{}), + } + // lea reg1, [rip + reloc] // RM try lowerToRmEnc( @@ -1000,7 +1005,7 @@ fn mirLeaPie(emit: *Emit, inst: Mir.Inst.Index) InnerError!void { const reloc_type = switch (ops.flags) { 0b00 => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_GOT), 0b01 => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_SIGNED), - else => return emit.fail("TODO unused LEA PIE variants 0b10 and 0b11", .{}), + else => unreachable, }; const atom = macho_file.atom_by_index_table.get(relocation.atom_index).?; log.debug("adding reloc of type {} to local @{d}", .{ reloc_type, relocation.sym_index }); @@ -1015,27 +1020,21 @@ fn mirLeaPie(emit: *Emit, inst: Mir.Inst.Index) InnerError!void { }); } else if (emit.bin_file.cast(link.File.Coff)) |coff_file| { const atom = coff_file.atom_by_index_table.get(relocation.atom_index).?; - log.debug("adding reloc to local @{d}", .{relocation.sym_index}); - const gop = try coff_file.relocs.getOrPut(gpa, atom); - if (!gop.found_existing) { - gop.value_ptr.* = .{}; - } - try gop.value_ptr.append(gpa, .{ + try atom.addRelocation(coff_file, .{ .@"type" = switch (ops.flags) { - 0b00 => .got_pcrel, + 0b00 => .got, 0b01 => .direct, - else => return emit.fail("TODO unused LEA PIE variants 0b10 and 0b11", .{}), + else => unreachable, }, .target = .{ .sym_index = relocation.sym_index, .file = null }, .offset = @intCast(u32, end_offset - 4), .addend = 0, + .pcrel = true, + .length = 2, .prev_vaddr = atom.getSymbol(coff_file).value, }); } else { - return emit.fail( - "TODO implement lea reg, [rip + reloc] for linking backends different than MachO", - .{}, - ); + return emit.fail("TODO implement lea reg, [rip + reloc] for linking backends different than MachO", .{}); } } diff --git a/src/arch/x86_64/Mir.zig b/src/arch/x86_64/Mir.zig index f67b48a271..71aecc5e85 100644 --- a/src/arch/x86_64/Mir.zig +++ b/src/arch/x86_64/Mir.zig @@ -178,11 +178,11 @@ pub const Inst = struct { lea, /// ops flags: form: - /// 0b00 reg1, [rip + reloc] // via GOT emits X86_64_RELOC_GOT relocation - /// 0b01 reg1, [rip + reloc] // direct load emits X86_64_RELOC_SIGNED relocation + /// 0b00 reg1, [rip + reloc] // via GOT PIC + /// 0b01 reg1, [rip + reloc] // direct load PIC /// Notes: /// * `Data` contains `relocation` - lea_pie, + lea_pic, /// ops flags: form: /// 0b00 reg1, 1 @@ -242,15 +242,14 @@ pub const Inst = struct { imul_complex, /// ops flags: form: - /// 0bX0 reg1, imm64 - /// 0bX1 rax, moffs64 + /// 0b00 reg1, imm64 + /// 0b01 rax, moffs64 /// Notes: /// * If reg1 is 64-bit, the immediate is 64-bit and stored /// within extra data `Imm64`. - /// * For 0bX1, reg1 (or reg2) need to be + /// * For 0b01, reg1 (or reg2) need to be /// a version of rax. If reg1 == .none, then reg2 == .rax, /// or vice versa. - /// TODO handle scaling movabs, /// ops flags: form: diff --git a/src/link/Coff.zig b/src/link/Coff.zig index 0c3fbfd6a0..36ddfc4e2a 100644 --- a/src/link/Coff.zig +++ b/src/link/Coff.zig @@ -103,14 +103,16 @@ unnamed_const_atoms: UnnamedConstTable = .{}, /// this will be a table indexed by index into the list of Atoms. relocs: RelocTable = .{}, -const Reloc = struct { +pub const Reloc = struct { @"type": enum { - got_pcrel, + got, direct, }, target: SymbolWithLoc, offset: u32, addend: u32, + pcrel: bool, + length: u2, prev_vaddr: u32, }; @@ -593,15 +595,13 @@ fn createGotAtom(self: *Coff, target: SymbolWithLoc) !*Atom { log.debug("allocated GOT atom at 0x{x}", .{sym.value}); - const gop_relocs = try self.relocs.getOrPut(gpa, atom); - if (!gop_relocs.found_existing) { - gop_relocs.value_ptr.* = .{}; - } - try gop_relocs.value_ptr.append(gpa, .{ + try atom.addRelocation(self, .{ .@"type" = .direct, .target = target, .offset = 0, .addend = 0, + .pcrel = false, + .length = 3, .prev_vaddr = sym.value, }); @@ -656,7 +656,7 @@ fn resolveRelocs(self: *Coff, atom: *Atom) !void { for (relocs.items) |*reloc| { const target_vaddr = switch (reloc.@"type") { - .got_pcrel => blk: { + .got => blk: { const got_atom = self.getGotAtomForSymbol(reloc.target) orelse continue; break :blk got_atom.getSymbol(self).value; }, @@ -673,21 +673,28 @@ fn resolveRelocs(self: *Coff, atom: *Atom) !void { @tagName(reloc.@"type"), }); - switch (reloc.@"type") { - .got_pcrel => { - const source_vaddr = source_sym.value + reloc.offset; - const disp = target_vaddr_with_addend - source_vaddr - 4; - try self.base.file.?.pwriteAll(mem.asBytes(&@intCast(u32, disp)), file_offset + reloc.offset); - }, - .direct => switch (self.ptr_width) { - .p32 => try self.base.file.?.pwriteAll( - mem.asBytes(&@intCast(u32, target_vaddr_with_addend + default_image_base_exe)), + if (reloc.pcrel) { + const source_vaddr = source_sym.value + reloc.offset; + const disp = target_vaddr_with_addend - source_vaddr - 4; + try self.base.file.?.pwriteAll(mem.asBytes(&@intCast(u32, disp)), file_offset + reloc.offset); + return; + } + + switch (self.ptr_width) { + .p32 => try self.base.file.?.pwriteAll( + mem.asBytes(&@intCast(u32, target_vaddr_with_addend + default_image_base_exe)), + file_offset + reloc.offset, + ), + .p64 => switch (reloc.length) { + 2 => try self.base.file.?.pwriteAll( + mem.asBytes(&@truncate(u32, target_vaddr_with_addend + default_image_base_exe)), file_offset + reloc.offset, ), - .p64 => try self.base.file.?.pwriteAll( + 3 => try self.base.file.?.pwriteAll( mem.asBytes(&(target_vaddr_with_addend + default_image_base_exe)), file_offset + reloc.offset, ), + else => unreachable, }, } @@ -1270,8 +1277,8 @@ fn writeHeader(self: *Coff) !void { writer.writeAll(mem.asBytes(&coff_header)) catch unreachable; const dll_flags: coff.DllFlags = .{ - .HIGH_ENTROPY_VA = 0, // TODO handle ASLR - .DYNAMIC_BASE = 0, // TODO handle ASLR + .HIGH_ENTROPY_VA = 0, //@boolToInt(self.base.options.pie), + .DYNAMIC_BASE = 0, .TERMINAL_SERVER_AWARE = 1, // We are not a legacy app .NX_COMPAT = 1, // We are compatible with Data Execution Prevention }; diff --git a/src/link/Coff/Atom.zig b/src/link/Coff/Atom.zig index 2e59187d2b..6c085a8f58 100644 --- a/src/link/Coff/Atom.zig +++ b/src/link/Coff/Atom.zig @@ -6,6 +6,7 @@ const coff = std.coff; const Allocator = std.mem.Allocator; const Coff = @import("../Coff.zig"); +const Reloc = Coff.Reloc; const SymbolWithLoc = Coff.SymbolWithLoc; /// Each decl always gets a local symbol with the fully qualified name. @@ -96,3 +97,14 @@ pub fn freeListEligible(self: Atom, coff_file: *const Coff) bool { const surplus = cap - ideal_cap; return surplus >= Coff.min_text_capacity; } + +pub fn addRelocation(self: *Atom, coff_file: *Coff, reloc: Reloc) !void { + const gpa = coff_file.base.allocator; + // TODO causes a segfault on Windows + // log.debug("adding reloc of type {s} to target %{d}", .{ @tagName(reloc.@"type"), reloc.target.sym_index }); + const gop = try coff_file.relocs.getOrPut(gpa, self); + if (!gop.found_existing) { + gop.value_ptr.* = .{}; + } + try gop.value_ptr.append(gpa, reloc); +}