From 352e27c55ca32fdc31dd01e3e60893775f03a318 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 6 Feb 2024 12:46:41 +0100 Subject: [PATCH 01/12] macho: move static lib flushing logic into Archive --- src/link/MachO.zig | 18 +----------------- src/link/MachO/Archive.zig | 12 ++++++++++++ 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/link/MachO.zig b/src/link/MachO.zig index c5fe26bc57..d825671b10 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -379,10 +379,6 @@ pub fn deinit(self: *MachO) void { } pub fn flush(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node) link.File.FlushError!void { - // TODO: I think this is just a temp and can be removed once we can emit static archives - if (self.base.isStaticLib() and build_options.have_llvm) { - return self.base.linkAsArchive(arena, prog_node); - } try self.flushModule(arena, prog_node); } @@ -395,8 +391,6 @@ pub fn flushModule(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node if (self.llvm_object) |llvm_object| { try self.base.emitLlvmObject(arena, llvm_object, prog_node); - // TODO: I think this is just a temp and can be removed once we can emit static archives - if (self.base.isStaticLib() and build_options.have_llvm) return; } var sub_prog_node = prog_node.start("MachO Flush", 0); @@ -417,7 +411,7 @@ pub fn flushModule(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node if (comp.verbose_link) try self.dumpArgv(comp); if (self.getZigObject()) |zo| try zo.flushModule(self); - if (self.base.isStaticLib()) return self.flushStaticLib(comp, module_obj_path); + if (self.base.isStaticLib()) return Archive.flush(self, comp, module_obj_path); if (self.base.isObject()) return relocatable.flush(self, comp, module_obj_path); var positionals = std.ArrayList(Compilation.LinkObject).init(gpa); @@ -892,16 +886,6 @@ fn dumpArgv(self: *MachO, comp: *Compilation) !void { Compilation.dump_argv(argv.items); } -fn flushStaticLib(self: *MachO, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void { - _ = comp; - _ = module_obj_path; - - var err = try self.addErrorWithNotes(0); - try err.addMsg(self, "TODO implement flushStaticLib", .{}); - - return error.FlushFailure; -} - pub fn resolveLibSystem( self: *MachO, arena: Allocator, diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig index fd87b8e260..449fd709d9 100644 --- a/src/link/MachO/Archive.zig +++ b/src/link/MachO/Archive.zig @@ -143,7 +143,18 @@ pub fn parse(self: *Archive, macho_file: *MachO, path: []const u8, handle_index: } } +pub fn flush(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void { + _ = comp; + _ = module_obj_path; + + var err = try macho_file.addErrorWithNotes(0); + try err.addMsg(macho_file, "TODO implement flushStaticLib", .{}); + + return error.FlushFailure; +} + const fat = @import("fat.zig"); +const link = @import("../../link.zig"); const log = std.log.scoped(.link); const macho = std.macho; const mem = std.mem; @@ -151,6 +162,7 @@ const std = @import("std"); const Allocator = mem.Allocator; const Archive = @This(); +const Compilation = @import("../../Compilation.zig"); const File = @import("file.zig").File; const MachO = @import("../MachO.zig"); const Object = @import("Object.zig"); From bdbb1dbe1535b727e542c80fe1f7d62a78e527fd Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 6 Feb 2024 13:56:28 +0100 Subject: [PATCH 02/12] macho: refactor markExports, markImportsExports and claimUnresolved --- src/link/MachO.zig | 79 +++++++---------------------- src/link/MachO/Archive.zig | 29 ++++++++++- src/link/MachO/file.zig | 92 ++++++++++++++++++++++++++++++++++ src/link/MachO/relocatable.zig | 61 ++++++---------------- 4 files changed, 151 insertions(+), 110 deletions(-) diff --git a/src/link/MachO.zig b/src/link/MachO.zig index d825671b10..9416792bc0 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -379,6 +379,10 @@ pub fn deinit(self: *MachO) void { } pub fn flush(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node) link.File.FlushError!void { + // TODO: I think this is just a temp and can be removed once we can emit static archives + if (self.base.isStaticLib() and build_options.have_llvm) { + return self.base.linkAsArchive(arena, prog_node); + } try self.flushModule(arena, prog_node); } @@ -391,6 +395,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node if (self.llvm_object) |llvm_object| { try self.base.emitLlvmObject(arena, llvm_object, prog_node); + // TODO: I think this is just a temp and can be removed once we can emit static archives + if (self.base.isStaticLib() and build_options.have_llvm) return; } var sub_prog_node = prog_node.start("MachO Flush", 0); @@ -571,7 +577,7 @@ pub fn flushModule(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node }, }; - try self.markImportsAndExports(); + self.markImportsAndExports(); self.deadStripDylibs(); for (self.dylibs.items, 1..) |index, ord| { @@ -1509,46 +1515,11 @@ fn createObjcSections(self: *MachO) !void { } fn claimUnresolved(self: *MachO) error{OutOfMemory}!void { - const gpa = self.base.comp.gpa; - - var objects = try std.ArrayList(File.Index).initCapacity(gpa, self.objects.items.len + 1); - defer objects.deinit(); - if (self.getZigObject()) |zo| objects.appendAssumeCapacity(zo.index); - objects.appendSliceAssumeCapacity(self.objects.items); - - for (objects.items) |index| { - const file = self.getFile(index).?; - - for (file.getSymbols(), 0..) |sym_index, i| { - const nlist_idx = @as(Symbol.Index, @intCast(i)); - const nlist = switch (file) { - .object => |x| x.symtab.items(.nlist)[nlist_idx], - .zig_object => |x| x.symtab.items(.nlist)[nlist_idx], - else => unreachable, - }; - if (!nlist.ext()) continue; - if (!nlist.undf()) continue; - - const sym = self.getSymbol(sym_index); - if (sym.getFile(self) != null) continue; - - const is_import = switch (self.undefined_treatment) { - .@"error" => false, - .warn, .suppress => nlist.weakRef(), - .dynamic_lookup => true, - }; - if (is_import) { - sym.value = 0; - sym.atom = 0; - sym.nlist_idx = 0; - sym.file = self.internal_object.?; - sym.flags.weak = false; - sym.flags.weak_ref = nlist.weakRef(); - sym.flags.import = is_import; - sym.visibility = .global; - try self.getInternalObject().?.symbols.append(self.base.comp.gpa, sym_index); - } - } + if (self.getZigObject()) |zo| { + try zo.asFile().claimUnresolved(self); + } + for (self.objects.items) |index| { + try self.getFile(index).?.claimUnresolved(self); } } @@ -1574,26 +1545,12 @@ fn checkDuplicates(self: *MachO) !void { try self.reportDuplicates(dupes); } -fn markImportsAndExports(self: *MachO) error{OutOfMemory}!void { - const gpa = self.base.comp.gpa; - var objects = try std.ArrayList(File.Index).initCapacity(gpa, self.objects.items.len + 1); - defer objects.deinit(); - if (self.getZigObject()) |zo| objects.appendAssumeCapacity(zo.index); - objects.appendSliceAssumeCapacity(self.objects.items); - - for (objects.items) |index| { - for (self.getFile(index).?.getSymbols()) |sym_index| { - const sym = self.getSymbol(sym_index); - const file = sym.getFile(self) orelse continue; - if (sym.visibility != .global) continue; - if (file == .dylib and !sym.flags.abs) { - sym.flags.import = true; - continue; - } - if (file.getIndex() == index) { - sym.flags.@"export" = true; - } - } +fn markImportsAndExports(self: *MachO) void { + if (self.getZigObject()) |zo| { + zo.asFile().markImportsExports(self); + } + for (self.objects.items) |index| { + self.getFile(index).?.markImportsExports(self); } for (self.undefined_symbols.items) |index| { diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig index 449fd709d9..a5535345ef 100644 --- a/src/link/MachO/Archive.zig +++ b/src/link/MachO/Archive.zig @@ -144,8 +144,32 @@ pub fn parse(self: *Archive, macho_file: *MachO, path: []const u8, handle_index: } pub fn flush(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void { - _ = comp; - _ = module_obj_path; + const gpa = comp.gpa; + + var positionals = std.ArrayList(Compilation.LinkObject).init(gpa); + defer positionals.deinit(); + + try positionals.ensureUnusedCapacity(comp.objects.len); + positionals.appendSliceAssumeCapacity(comp.objects); + + for (comp.c_object_table.keys()) |key| { + try positionals.append(.{ .path = key.status.success.object_path }); + } + + if (module_obj_path) |path| try positionals.append(.{ .path = path }); + + for (positionals.items) |obj| { + // TODO: parse for archive meaning don't unpack objects + _ = obj; + } + + if (comp.link_errors.items.len > 0) return error.FlushFailure; + + // First, we flush relocatable object file generated with our backends. + if (macho_file.getZigObject()) |zo| { + zo.resolveSymbols(macho_file); + zo.asFile().claimUnresolvedRelocatable(macho_file); + } var err = try macho_file.addErrorWithNotes(0); try err.addMsg(macho_file, "TODO implement flushStaticLib", .{}); @@ -158,6 +182,7 @@ const link = @import("../../link.zig"); const log = std.log.scoped(.link); const macho = std.macho; const mem = std.mem; +const relocatable = @import("relocatable.zig"); const std = @import("std"); const Allocator = mem.Allocator; diff --git a/src/link/MachO/file.zig b/src/link/MachO/file.zig index 20f71d80fe..bfaa90fba5 100644 --- a/src/link/MachO/file.zig +++ b/src/link/MachO/file.zig @@ -44,6 +44,97 @@ pub const File = union(enum) { } } + pub fn claimUnresolved(file: File, macho_file: *MachO) error{OutOfMemory}!void { + assert(file == .object or file == .zig_object); + + for (file.getSymbols(), 0..) |sym_index, i| { + const nlist_idx = @as(Symbol.Index, @intCast(i)); + const nlist = switch (file) { + .object => |x| x.symtab.items(.nlist)[nlist_idx], + .zig_object => |x| x.symtab.items(.nlist)[nlist_idx], + else => unreachable, + }; + if (!nlist.ext()) continue; + if (!nlist.undf()) continue; + + const sym = macho_file.getSymbol(sym_index); + if (sym.getFile(macho_file) != null) continue; + + const is_import = switch (macho_file.undefined_treatment) { + .@"error" => false, + .warn, .suppress => nlist.weakRef(), + .dynamic_lookup => true, + }; + if (is_import) { + sym.value = 0; + sym.atom = 0; + sym.nlist_idx = 0; + sym.file = macho_file.internal_object.?; + sym.flags.weak = false; + sym.flags.weak_ref = nlist.weakRef(); + sym.flags.import = is_import; + sym.visibility = .global; + try macho_file.getInternalObject().?.symbols.append(macho_file.base.comp.gpa, sym_index); + } + } + } + + pub fn claimUnresolvedRelocatable(file: File, macho_file: *MachO) void { + assert(file == .object or file == .zig_object); + + for (file.getSymbols(), 0..) |sym_index, i| { + const nlist_idx = @as(Symbol.Index, @intCast(i)); + const nlist = switch (file) { + .object => |x| x.symtab.items(.nlist)[nlist_idx], + .zig_object => |x| x.symtab.items(.nlist)[nlist_idx], + else => unreachable, + }; + if (!nlist.ext()) continue; + if (!nlist.undf()) continue; + + const sym = macho_file.getSymbol(sym_index); + if (sym.getFile(macho_file) != null) continue; + + sym.value = 0; + sym.atom = 0; + sym.nlist_idx = nlist_idx; + sym.file = file.getIndex(); + sym.flags.weak_ref = nlist.weakRef(); + sym.flags.import = true; + sym.visibility = .global; + } + } + + pub fn markImportsExports(file: File, macho_file: *MachO) void { + assert(file == .object or file == .zig_object); + + for (file.getSymbols()) |sym_index| { + const sym = macho_file.getSymbol(sym_index); + const other_file = sym.getFile(macho_file) orelse continue; + if (sym.visibility != .global) continue; + if (other_file == .dylib and !sym.flags.abs) { + sym.flags.import = true; + continue; + } + if (other_file.getIndex() == file.getIndex()) { + sym.flags.@"export" = true; + } + } + } + + pub fn markExportsRelocatable(file: File, macho_file: *MachO) void { + assert(file == .object or file == .zig_object); + + for (file.getSymbols()) |sym_index| { + const sym = macho_file.getSymbol(sym_index); + const other_file = sym.getFile(macho_file) orelse continue; + if (sym.visibility != .global) continue; + if (other_file.getIndex() == file.getIndex()) { + sym.flags.@"export" = true; + } + } + } + /// Encodes symbol rank so that the following ordering applies: /// * strong in object /// * weak in object @@ -110,6 +201,7 @@ pub const File = union(enum) { pub const HandleIndex = Index; }; +const assert = std.debug.assert; const macho = std.macho; const std = @import("std"); diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig index 5e9eb9f823..4ecd1026e6 100644 --- a/src/link/MachO/relocatable.zig +++ b/src/link/MachO/relocatable.zig @@ -46,8 +46,8 @@ pub fn flush(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u try macho_file.addUndefinedGlobals(); try macho_file.resolveSymbols(); - try markExports(macho_file); - try claimUnresolved(macho_file); + markExports(macho_file); + claimUnresolved(macho_file); try initOutputSections(macho_file); try macho_file.sortSections(); try macho_file.addAtomsToSections(); @@ -86,54 +86,21 @@ pub fn flush(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u try writeHeader(macho_file, ncmds, sizeofcmds); } -fn markExports(macho_file: *MachO) error{OutOfMemory}!void { - var objects = try std.ArrayList(File.Index).initCapacity(macho_file.base.comp.gpa, macho_file.objects.items.len + 1); - defer objects.deinit(); - if (macho_file.getZigObject()) |zo| objects.appendAssumeCapacity(zo.index); - objects.appendSliceAssumeCapacity(macho_file.objects.items); - - for (objects.items) |index| { - for (macho_file.getFile(index).?.getSymbols()) |sym_index| { - const sym = macho_file.getSymbol(sym_index); - const file = sym.getFile(macho_file) orelse continue; - if (sym.visibility != .global) continue; - if (file.getIndex() == index) { - sym.flags.@"export" = true; - } - } +fn markExports(macho_file: *MachO) void { + if (macho_file.getZigObject()) |zo| { + zo.asFile().markExportsRelocatable(macho_file); + } + for (macho_file.objects.items) |index| { + macho_file.getFile(index).?.markExportsRelocatable(macho_file); } } -fn claimUnresolved(macho_file: *MachO) error{OutOfMemory}!void { - var objects = try std.ArrayList(File.Index).initCapacity(macho_file.base.comp.gpa, macho_file.objects.items.len + 1); - defer objects.deinit(); - if (macho_file.getZigObject()) |zo| objects.appendAssumeCapacity(zo.index); - objects.appendSliceAssumeCapacity(macho_file.objects.items); - - for (objects.items) |index| { - const file = macho_file.getFile(index).?; - - for (file.getSymbols(), 0..) |sym_index, i| { - const nlist_idx = @as(Symbol.Index, @intCast(i)); - const nlist = switch (file) { - .object => |x| x.symtab.items(.nlist)[nlist_idx], - .zig_object => |x| x.symtab.items(.nlist)[nlist_idx], - else => unreachable, - }; - if (!nlist.ext()) continue; - if (!nlist.undf()) continue; - - const sym = macho_file.getSymbol(sym_index); - if (sym.getFile(macho_file) != null) continue; - - sym.value = 0; - sym.atom = 0; - sym.nlist_idx = nlist_idx; - sym.file = index; - sym.flags.weak_ref = nlist.weakRef(); - sym.flags.import = true; - sym.visibility = .global; - } +pub fn claimUnresolved(macho_file: *MachO) void { + if (macho_file.getZigObject()) |zo| { + zo.asFile().claimUnresolvedRelocatable(macho_file); + } + for (macho_file.objects.items) |index| { + macho_file.getFile(index).?.claimUnresolvedRelocatable(macho_file); } } From 7f01b61679999bcd0ff644632a26e8c35e7541b6 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 6 Feb 2024 14:09:51 +0100 Subject: [PATCH 03/12] macho: move static lib emitting logic to relocatable.zig --- src/link/MachO.zig | 4 +- src/link/MachO/Archive.zig | 36 ------------------ src/link/MachO/relocatable.zig | 68 +++++++++++++++++++++++++++++++++- 3 files changed, 69 insertions(+), 39 deletions(-) diff --git a/src/link/MachO.zig b/src/link/MachO.zig index 9416792bc0..6b5ed4c0fb 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -417,8 +417,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node if (comp.verbose_link) try self.dumpArgv(comp); if (self.getZigObject()) |zo| try zo.flushModule(self); - if (self.base.isStaticLib()) return Archive.flush(self, comp, module_obj_path); - if (self.base.isObject()) return relocatable.flush(self, comp, module_obj_path); + if (self.base.isStaticLib()) return relocatable.flushStaticLib(self, comp, module_obj_path); + if (self.base.isObject()) return relocatable.flushObject(self, comp, module_obj_path); var positionals = std.ArrayList(Compilation.LinkObject).init(gpa); defer positionals.deinit(); diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig index a5535345ef..0dbc145b87 100644 --- a/src/link/MachO/Archive.zig +++ b/src/link/MachO/Archive.zig @@ -143,51 +143,15 @@ pub fn parse(self: *Archive, macho_file: *MachO, path: []const u8, handle_index: } } -pub fn flush(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void { - const gpa = comp.gpa; - - var positionals = std.ArrayList(Compilation.LinkObject).init(gpa); - defer positionals.deinit(); - - try positionals.ensureUnusedCapacity(comp.objects.len); - positionals.appendSliceAssumeCapacity(comp.objects); - - for (comp.c_object_table.keys()) |key| { - try positionals.append(.{ .path = key.status.success.object_path }); - } - - if (module_obj_path) |path| try positionals.append(.{ .path = path }); - - for (positionals.items) |obj| { - // TODO: parse for archive meaning don't unpack objects - _ = obj; - } - - if (comp.link_errors.items.len > 0) return error.FlushFailure; - - // First, we flush relocatable object file generated with our backends. - if (macho_file.getZigObject()) |zo| { - zo.resolveSymbols(macho_file); - zo.asFile().claimUnresolvedRelocatable(macho_file); - } - - var err = try macho_file.addErrorWithNotes(0); - try err.addMsg(macho_file, "TODO implement flushStaticLib", .{}); - - return error.FlushFailure; -} - const fat = @import("fat.zig"); const link = @import("../../link.zig"); const log = std.log.scoped(.link); const macho = std.macho; const mem = std.mem; -const relocatable = @import("relocatable.zig"); const std = @import("std"); const Allocator = mem.Allocator; const Archive = @This(); -const Compilation = @import("../../Compilation.zig"); const File = @import("file.zig").File; const MachO = @import("../MachO.zig"); const Object = @import("Object.zig"); diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig index 4ecd1026e6..1b0aa74d23 100644 --- a/src/link/MachO/relocatable.zig +++ b/src/link/MachO/relocatable.zig @@ -1,4 +1,4 @@ -pub fn flush(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void { +pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void { const gpa = macho_file.base.comp.gpa; var positionals = std.ArrayList(Compilation.LinkObject).init(gpa); @@ -86,6 +86,72 @@ pub fn flush(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u try writeHeader(macho_file, ncmds, sizeofcmds); } +pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void { + const gpa = comp.gpa; + + var positionals = std.ArrayList(Compilation.LinkObject).init(gpa); + defer positionals.deinit(); + + try positionals.ensureUnusedCapacity(comp.objects.len); + positionals.appendSliceAssumeCapacity(comp.objects); + + for (comp.c_object_table.keys()) |key| { + try positionals.append(.{ .path = key.status.success.object_path }); + } + + if (module_obj_path) |path| try positionals.append(.{ .path = path }); + + for (positionals.items) |obj| { + // TODO: parse for archive meaning don't unpack objects + _ = obj; + } + + if (comp.link_errors.items.len > 0) return error.FlushFailure; + + // First, we flush relocatable object file generated with our backends. + if (macho_file.getZigObject()) |zo| { + zo.resolveSymbols(macho_file); + zo.asFile().claimUnresolvedRelocatable(macho_file); + try macho_file.sortSections(); + try macho_file.addAtomsToSections(); + try calcSectionSizes(macho_file); + try createSegment(macho_file); + try allocateSections(macho_file); + allocateSegment(macho_file); + + var off = off: { + const seg = macho_file.segments.items[0]; + const off = math.cast(u32, seg.fileoff + seg.filesize) orelse return error.Overflow; + break :off mem.alignForward(u32, off, @alignOf(macho.relocation_info)); + }; + off = allocateSectionsRelocs(macho_file, off); + + state_log.debug("{}", .{macho_file.dumpState()}); + + try macho_file.calcSymtabSize(); + try writeAtoms(macho_file); + + off = mem.alignForward(u32, off, @alignOf(u64)); + off = try macho_file.writeDataInCode(0, off); + off = mem.alignForward(u32, off, @alignOf(u64)); + off = try macho_file.writeSymtab(off); + off = mem.alignForward(u32, off, @alignOf(u64)); + off = try macho_file.writeStrtab(off); + + // In order to please Apple ld (and possibly other MachO linkers in the wild), + // we will now sanitize segment names of Zig-specific segments. + sanitizeZigSections(macho_file); + + const ncmds, const sizeofcmds = try writeLoadCommands(macho_file); + try writeHeader(macho_file, ncmds, sizeofcmds); + } + + var err = try macho_file.addErrorWithNotes(0); + try err.addMsg(macho_file, "TODO implement flushStaticLib", .{}); + + return error.FlushFailure; +} + fn markExports(macho_file: *MachO) void { if (macho_file.getZigObject()) |zo| { zo.asFile().markExportsRelocatable(macho_file); From 80cafad9d32fed9f6a786f4d87f40b8ee622015e Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 6 Feb 2024 14:16:00 +0100 Subject: [PATCH 04/12] macho: read-in committed ZigObject to memory from file --- src/link/MachO/ZigObject.zig | 18 ++++++++++++++++++ src/link/MachO/relocatable.zig | 4 ++++ 2 files changed, 22 insertions(+) diff --git a/src/link/MachO/ZigObject.zig b/src/link/MachO/ZigObject.zig index b39905f259..bdcc658e75 100644 --- a/src/link/MachO/ZigObject.zig +++ b/src/link/MachO/ZigObject.zig @@ -1,3 +1,4 @@ +data: std.ArrayListUnmanaged(u8) = .{}, /// Externally owned memory. path: []const u8, index: File.Index, @@ -57,6 +58,7 @@ pub fn init(self: *ZigObject, macho_file: *MachO) !void { } pub fn deinit(self: *ZigObject, allocator: Allocator) void { + self.data.deinit(allocator); self.symtab.deinit(allocator); self.strtab.deinit(allocator); self.symbols.deinit(allocator); @@ -279,6 +281,22 @@ pub fn checkDuplicates(self: *ZigObject, dupes: anytype, macho_file: *MachO) !vo } } +/// This is just a temporary helper function that allows us to re-read what we wrote to file into a buffer. +/// We need this so that we can write to an archive. +/// TODO implement writing ZigObject data directly to a buffer instead. +pub fn readFileContents(self: *ZigObject, macho_file: *MachO) !void { + const gpa = macho_file.base.comp.gpa; + var end_pos: u64 = 0; + for (macho_file.segments.items) |seg| { + end_pos = @max(end_pos, seg.fileoff + seg.filesize); + } + const size = std.math.cast(usize, end_pos) orelse return error.Overflow; + try self.data.resize(gpa, size); + + const amt = try macho_file.base.file.?.preadAll(self.data.items, 0); + if (amt != size) return error.InputOutput; +} + pub fn scanRelocs(self: *ZigObject, macho_file: *MachO) !void { for (self.atoms.items) |atom_index| { const atom = macho_file.getAtom(atom_index) orelse continue; diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig index 1b0aa74d23..37ea17d751 100644 --- a/src/link/MachO/relocatable.zig +++ b/src/link/MachO/relocatable.zig @@ -144,6 +144,10 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? const ncmds, const sizeofcmds = try writeLoadCommands(macho_file); try writeHeader(macho_file, ncmds, sizeofcmds); + + // TODO we can avoid reading in the file contents we just wrote if we give the linker + // ability to write directly to a buffer. + try zo.readFileContents(macho_file); } var err = try macho_file.addErrorWithNotes(0); From 897a554109baa3288d575cac0833e10edd1a316c Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 7 Feb 2024 11:37:33 +0100 Subject: [PATCH 05/12] macho: populate output archive symtab --- src/link/MachO.zig | 4 +- src/link/MachO/Archive.zig | 329 +++++++++++++++++++++++++-------- src/link/MachO/Object.zig | 33 +++- src/link/MachO/ZigObject.zig | 24 +++ src/link/MachO/file.zig | 8 + src/link/MachO/relocatable.zig | 31 +++- 6 files changed, 343 insertions(+), 86 deletions(-) diff --git a/src/link/MachO.zig b/src/link/MachO.zig index 6b5ed4c0fb..7789c563d1 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -606,7 +606,9 @@ pub fn flushModule(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node self.allocateSyntheticSymbols(); try self.allocateLinkeditSegment(); - state_log.debug("{}", .{self.dumpState()}); + if (build_options.enable_logging) { + state_log.debug("{}", .{self.dumpState()}); + } try self.initDyldInfoSections(); diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig index 0dbc145b87..2777eb8587 100644 --- a/src/link/MachO/Archive.zig +++ b/src/link/MachO/Archive.zig @@ -1,5 +1,136 @@ objects: std.ArrayListUnmanaged(Object) = .{}, +pub fn isArchive(path: []const u8, fat_arch: ?fat.Arch) !bool { + const file = try std.fs.cwd().openFile(path, .{}); + defer file.close(); + if (fat_arch) |arch| { + try file.seekTo(arch.offset); + } + const magic = file.reader().readBytesNoEof(SARMAG) catch return false; + if (!mem.eql(u8, &magic, ARMAG)) return false; + return true; +} + +pub fn deinit(self: *Archive, allocator: Allocator) void { + self.objects.deinit(allocator); +} + +pub fn parse(self: *Archive, macho_file: *MachO, path: []const u8, handle_index: File.HandleIndex, fat_arch: ?fat.Arch) !void { + const gpa = macho_file.base.comp.gpa; + + var arena = std.heap.ArenaAllocator.init(gpa); + defer arena.deinit(); + + const handle = macho_file.getFileHandle(handle_index); + const offset = if (fat_arch) |ar| ar.offset else 0; + const size = if (fat_arch) |ar| ar.size else (try handle.stat()).size; + try handle.seekTo(offset); + + const reader = handle.reader(); + _ = try reader.readBytesNoEof(SARMAG); + + var pos: usize = SARMAG; + while (true) { + if (pos >= size) break; + if (!mem.isAligned(pos, 2)) { + try handle.seekBy(1); + pos += 1; + } + + const hdr = try reader.readStruct(ar_hdr); + pos += @sizeOf(ar_hdr); + + if (!mem.eql(u8, &hdr.ar_fmag, ARFMAG)) { + try macho_file.reportParseError(path, "invalid header delimiter: expected '{s}', found '{s}'", .{ + std.fmt.fmtSliceEscapeLower(ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag), + }); + return error.MalformedArchive; + } + + var hdr_size = try hdr.size(); + const name = name: { + if (hdr.name()) |n| break :name n; + if (try hdr.nameLength()) |len| { + hdr_size -= len; + const buf = try arena.allocator().alloc(u8, len); + try reader.readNoEof(buf); + pos += len; + const actual_len = mem.indexOfScalar(u8, buf, @as(u8, 0)) orelse len; + break :name buf[0..actual_len]; + } + unreachable; + }; + defer { + _ = handle.seekBy(hdr_size) catch {}; + pos += hdr_size; + } + + if (mem.eql(u8, name, SYMDEF) or + mem.eql(u8, name, SYMDEF64) or + mem.eql(u8, name, SYMDEF_SORTED) or + mem.eql(u8, name, SYMDEF64_SORTED)) continue; + + const object = Object{ + .archive = .{ + .path = try gpa.dupe(u8, path), + .offset = offset + pos, + }, + .path = try gpa.dupe(u8, name), + .file_handle = handle_index, + .index = undefined, + .alive = false, + .mtime = hdr.date() catch 0, + }; + + log.debug("extracting object '{s}' from archive '{s}'", .{ object.path, path }); + + try self.objects.append(gpa, object); + } +} + +pub fn writeHeader( + object_name: []const u8, + object_size: u32, + format: Format, + writer: anytype, +) !void { + var hdr: ar_hdr = .{ + .ar_name = undefined, + .ar_date = undefined, + .ar_uid = undefined, + .ar_gid = undefined, + .ar_mode = undefined, + .ar_size = undefined, + .ar_fmag = undefined, + }; + @memset(mem.asBytes(&hdr), 0x20); + inline for (@typeInfo(ar_hdr).Struct.fields) |field| { + var stream = std.io.fixedBufferStream(&@field(hdr, field.name)); + stream.writer().print("0", .{}) catch unreachable; + } + @memcpy(&hdr.ar_fmag, ARFMAG); + + const object_name_len = mem.alignForward(u32, object_name.len + 1, format.ptrWidth()); + const total_object_size = object_size + object_name_len; + + { + var stream = std.io.fixedBufferStream(&hdr.ar_name); + stream.writer().print("#1/{d}", .{object_name_len}) catch unreachable; + } + { + var stream = std.io.fixedBufferStream(&hdr.ar_size); + stream.writer().print("{d}", .{total_object_size}) catch unreachable; + } + + try writer.writeAll(mem.asBytes(&hdr)); + try writer.print("{s}\x00", .{object_name}); + + const padding = object_name_len - object_name.len - 1; + if (padding > 0) { + try writer.writeByteNTimes(0, padding); + } +} + // Archive files start with the ARMAG identifying string. Then follows a // `struct ar_hdr', and as many bytes of member file data as its `ar_size' // member indicates, for each member file. @@ -11,6 +142,11 @@ pub const SARMAG: u4 = 8; /// String in ar_fmag at the end of each header. const ARFMAG: *const [2:0]u8 = "`\n"; +const SYMDEF = "__.SYMDEF"; +const SYMDEF64 = "__.SYMDEF_64"; +const SYMDEF_SORTED = "__.SYMDEF SORTED"; +const SYMDEF64_SORTED = "__.SYMDEF_64 SORTED"; + const ar_hdr = extern struct { /// Member file name, sometimes / terminated. ar_name: [16]u8, @@ -58,90 +194,120 @@ const ar_hdr = extern struct { } }; -pub fn isArchive(path: []const u8, fat_arch: ?fat.Arch) !bool { - const file = try std.fs.cwd().openFile(path, .{}); - defer file.close(); - if (fat_arch) |arch| { - try file.seekTo(arch.offset); +pub const ArSymtab = struct { + entries: std.ArrayListUnmanaged(Entry) = .{}, + strtab: StringTable = .{}, + format: Format = .p32, + + pub fn deinit(ar: *ArSymtab, allocator: Allocator) void { + ar.entries.deinit(allocator); + ar.strtab.deinit(allocator); } - const magic = file.reader().readBytesNoEof(SARMAG) catch return false; - if (!mem.eql(u8, &magic, ARMAG)) return false; - return true; -} -pub fn deinit(self: *Archive, allocator: Allocator) void { - self.objects.deinit(allocator); -} - -pub fn parse(self: *Archive, macho_file: *MachO, path: []const u8, handle_index: File.HandleIndex, fat_arch: ?fat.Arch) !void { - const gpa = macho_file.base.comp.gpa; - - var arena = std.heap.ArenaAllocator.init(gpa); - defer arena.deinit(); - - const handle = macho_file.getFileHandle(handle_index); - const offset = if (fat_arch) |ar| ar.offset else 0; - const size = if (fat_arch) |ar| ar.size else (try handle.stat()).size; - try handle.seekTo(offset); - - const reader = handle.reader(); - _ = try reader.readBytesNoEof(Archive.SARMAG); - - var pos: usize = Archive.SARMAG; - while (true) { - if (pos >= size) break; - if (!mem.isAligned(pos, 2)) { - try handle.seekBy(1); - pos += 1; - } - - const hdr = try reader.readStruct(ar_hdr); - pos += @sizeOf(ar_hdr); - - if (!mem.eql(u8, &hdr.ar_fmag, ARFMAG)) { - try macho_file.reportParseError(path, "invalid header delimiter: expected '{s}', found '{s}'", .{ - std.fmt.fmtSliceEscapeLower(ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag), - }); - return error.MalformedArchive; - } - - var hdr_size = try hdr.size(); - const name = name: { - if (hdr.name()) |n| break :name n; - if (try hdr.nameLength()) |len| { - hdr_size -= len; - const buf = try arena.allocator().alloc(u8, len); - try reader.readNoEof(buf); - pos += len; - const actual_len = mem.indexOfScalar(u8, buf, @as(u8, 0)) orelse len; - break :name buf[0..actual_len]; - } - unreachable; - }; - defer { - _ = handle.seekBy(hdr_size) catch {}; - pos += hdr_size; - } - - if (mem.eql(u8, name, "__.SYMDEF") or mem.eql(u8, name, "__.SYMDEF SORTED")) continue; - - const object = Object{ - .archive = .{ - .path = try gpa.dupe(u8, path), - .offset = offset + pos, - }, - .path = try gpa.dupe(u8, name), - .file_handle = handle_index, - .index = undefined, - .alive = false, - .mtime = hdr.date() catch 0, - }; - - log.debug("extracting object '{s}' from archive '{s}'", .{ object.path, path }); - - try self.objects.append(gpa, object); + pub fn sort(ar: *ArSymtab) void { + mem.sort(Entry, ar.entries.items, {}, Entry.lessThan); } -} + + pub fn size(ar: ArSymtab) usize { + const ptr_width = ar.format.ptrWidth(); + return ptr_width + ar.entries.items.len * 2 * ptr_width + ptr_width + mem.alignForward(usize, ar.strtab.buffer.items.len, ptr_width); + } + + pub fn write(ar: ArSymtab, macho_file: *MachO, writer: anytype) !void { + // Header + try writeHeader(SYMDEF, ar.size()); + // Symtab size + try ar.writeInt(ar.entries.items.len * 2); + // Symtab entries + for (ar.entries.items) |entry| { + const file_off = switch (macho_file.getFile(entry.file).?) { + .zig_object => |x| x.output_ar_state.file_off, + .object => |x| x.output_ar_state.file_off, + else => unreachable, + }; + // Name offset + try ar.writeInt(entry.off); + // File offset + try ar.writeInt(file_off); + } + // Strtab size + const strtab_size = mem.alignForward(u64, ar.strtab.buffer.items.len, ar.format.ptrWidth()); + const padding = strtab_size - ar.strtab.buffer.items.len; + try ar.writeInt(strtab_size); + // Strtab + try writer.writeAll(ar.strtab.buffer.items); + if (padding > 0) { + try writer.writeByteNTimes(0, padding); + } + } + + fn writeInt(ar: ArSymtab, value: u64, writer: anytype) !void { + switch (ar.format) { + .p32 => try writer.writeInt(u32, std.math.cast(u32, value) orelse return error.Overflow, .little), + .p64 => try writer.writeInt(u64, value, .little), + } + } + + const FormatContext = struct { + ar: ArSymtab, + macho_file: *MachO, + }; + + pub fn fmt(ar: ArSymtab, macho_file: *MachO) std.fmt.Formatter(format2) { + return .{ .data = .{ .ar = ar, .macho_file = macho_file } }; + } + + fn format2( + ctx: FormatContext, + comptime unused_fmt_string: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, + ) !void { + _ = unused_fmt_string; + _ = options; + const ar = ctx.ar; + const macho_file = ctx.macho_file; + for (ar.entries.items, 0..) |entry, i| { + const name = ar.strtab.getAssumeExists(entry.off); + const file = macho_file.getFile(entry.file).?; + try writer.print(" {d}: {s} in file({d})({})\n", .{ i, name, entry.file, file.fmtPath() }); + } + } + + const Entry = struct { + /// Symbol name offset + off: u32, + /// Exporting file + file: File.Index, + + pub fn lessThan(ctx: void, lhs: Entry, rhs: Entry) bool { + _ = ctx; + if (lhs.off == rhs.off) return lhs.file < rhs.file; + return lhs.off < rhs.off; + } + }; +}; + +const Format = enum { + p32, + p64, + + fn ptrWidth(self: Format) usize { + return switch (self) { + .p32 => @as(usize, 4), + .p64 => 8, + }; + } +}; + +pub const ArState = struct { + /// File offset of the ar_hdr describing the contributing + /// object in the archive. + file_off: u64 = 0, + + /// Total size of the contributing object (excludes ar_hdr and long name with padding). + size: u64 = 0, +}; const fat = @import("fat.zig"); const link = @import("../../link.zig"); @@ -155,3 +321,4 @@ const Archive = @This(); const File = @import("file.zig").File; const MachO = @import("../MachO.zig"); const Object = @import("Object.zig"); +const StringTable = @import("../StringTable.zig"); diff --git a/src/link/MachO/Object.zig b/src/link/MachO/Object.zig index 6a987cb02d..7e91e04dd2 100644 --- a/src/link/MachO/Object.zig +++ b/src/link/MachO/Object.zig @@ -1,4 +1,4 @@ -archive: ?Archive = null, +archive: ?InArchive = null, path: []const u8, file_handle: File.HandleIndex, mtime: u64, @@ -29,8 +29,9 @@ hidden: bool = false, dynamic_relocs: MachO.DynamicRelocs = .{}, output_symtab_ctx: MachO.SymtabCtx = .{}, +output_ar_state: Archive.ArState = .{}, -const Archive = struct { +const InArchive = struct { path: []const u8, offset: u64, }; @@ -1232,6 +1233,33 @@ fn addSection(self: *Object, allocator: Allocator, segname: []const u8, sectname return n_sect; } +pub fn updateArSymtab(self: Object, ar_symtab: *Archive.ArSymtab, macho_file: *MachO) error{OutOfMemory}!void { + const gpa = macho_file.base.comp.gpa; + for (self.symtab.items(.nlist)) |nlist| { + if (!nlist.ext() or (nlist.undf() and !nlist.tentative())) continue; + const off = try ar_symtab.strtab.insert(gpa, self.getString(nlist.n_strx)); + try ar_symtab.entries.append(gpa, .{ .off = off, .file = self.index }); + } +} + +pub fn updateArSize(self: *Object, macho_file: *MachO) !void { + const file = macho_file.getFileHandle(self.file_handle); + const size = (try file.stat()).size; + self.output_ar_state.size = size; +} + +pub fn writeAr(self: Object, macho_file: *MachO, writer: anytype) !void { + // Header + try Archive.writeHeader(self.path, self.output_ar_state.size, writer); + // Data + const file = macho_file.getFileHandle(self.file_handle); + // TODO try using copyRangeAll + const gpa = macho_file.base.comp.gpa; + const data = try file.readToEndAlloc(gpa, self.output_ar_state.size); + defer gpa.free(data); + try writer.writeAll(data); +} + pub fn calcSymtabSize(self: *Object, macho_file: *MachO) !void { const tracy = trace(@src()); defer tracy.end(); @@ -2241,6 +2269,7 @@ const trace = @import("../../tracy.zig").trace; const std = @import("std"); const Allocator = mem.Allocator; +const Archive = @import("Archive.zig"); const Atom = @import("Atom.zig"); const Cie = eh_frame.Cie; const DwarfInfo = @import("DwarfInfo.zig"); diff --git a/src/link/MachO/ZigObject.zig b/src/link/MachO/ZigObject.zig index bdcc658e75..65ae3788ad 100644 --- a/src/link/MachO/ZigObject.zig +++ b/src/link/MachO/ZigObject.zig @@ -48,6 +48,7 @@ relocs: RelocationTable = .{}, dynamic_relocs: MachO.DynamicRelocs = .{}, output_symtab_ctx: MachO.SymtabCtx = .{}, +output_ar_state: Archive.ArState = .{}, pub fn init(self: *ZigObject, macho_file: *MachO) !void { const comp = macho_file.base.comp; @@ -297,6 +298,29 @@ pub fn readFileContents(self: *ZigObject, macho_file: *MachO) !void { if (amt != size) return error.InputOutput; } +pub fn updateArSymtab(self: ZigObject, ar_symtab: *Archive.ArSymtab, macho_file: *MachO) error{OutOfMemory}!void { + const gpa = macho_file.base.comp.gpa; + for (self.symbols.items) |sym_index| { + const sym = macho_file.getSymbol(sym_index); + const file = sym.getFile(macho_file).?; + assert(file.getIndex() == self.index); + if (!sym.flags.@"export") continue; + const off = try ar_symtab.strtab.insert(gpa, sym.getName(macho_file)); + try ar_symtab.entries.append(gpa, .{ .off = off, .file = self.index }); + } +} + +pub fn updateArSize(self: *ZigObject) void { + self.output_ar_state.size = self.data.items.len; +} + +pub fn writeAr(self: ZigObject, writer: anytype) !void { + // Header + try Archive.writeHeader(self.path, self.output_ar_state.size, writer); + // Data + try writer.writeAll(self.data.items); +} + pub fn scanRelocs(self: *ZigObject, macho_file: *MachO) !void { for (self.atoms.items) |atom_index| { const atom = macho_file.getAtom(atom_index) orelse continue; diff --git a/src/link/MachO/file.zig b/src/link/MachO/file.zig index bfaa90fba5..d6e21c2f38 100644 --- a/src/link/MachO/file.zig +++ b/src/link/MachO/file.zig @@ -175,6 +175,13 @@ pub const File = union(enum) { }; } + pub fn updateArSymtab(file: File, ar_symtab: *Archive.ArSymtab, macho_file: *MachO) error{OutOfMemory}!void { + return switch (file) { + .dylib, .internal => unreachable, + inline else => |x| x.updateArSymtab(ar_symtab, macho_file), + }; + } + pub fn calcSymtabSize(file: File, macho_file: *MachO) !void { return switch (file) { inline else => |x| x.calcSymtabSize(macho_file), @@ -206,6 +213,7 @@ const macho = std.macho; const std = @import("std"); const Allocator = std.mem.Allocator; +const Archive = @import("Archive.zig"); const Atom = @import("Atom.zig"); const InternalObject = @import("InternalObject.zig"); const MachO = @import("../MachO.zig"); diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig index 37ea17d751..f90988c32e 100644 --- a/src/link/MachO/relocatable.zig +++ b/src/link/MachO/relocatable.zig @@ -64,7 +64,9 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]c }; off = allocateSectionsRelocs(macho_file, off); - state_log.debug("{}", .{macho_file.dumpState()}); + if (build_options.enable_logging) { + state_log.debug("{}", .{macho_file.dumpState()}); + } try macho_file.calcSymtabSize(); try writeAtoms(macho_file); @@ -111,6 +113,7 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? // First, we flush relocatable object file generated with our backends. if (macho_file.getZigObject()) |zo| { zo.resolveSymbols(macho_file); + zo.asFile().markExportsRelocatable(macho_file); zo.asFile().claimUnresolvedRelocatable(macho_file); try macho_file.sortSections(); try macho_file.addAtomsToSections(); @@ -126,7 +129,9 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? }; off = allocateSectionsRelocs(macho_file, off); - state_log.debug("{}", .{macho_file.dumpState()}); + if (build_options.enable_logging) { + state_log.debug("{}", .{macho_file.dumpState()}); + } try macho_file.calcSymtabSize(); try writeAtoms(macho_file); @@ -150,6 +155,26 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? try zo.readFileContents(macho_file); } + var files = std.ArrayList(File.Index).init(gpa); + defer files.deinit(); + try files.ensureTotalCapacityPrecise(macho_file.objects.items.len + 1); + if (macho_file.getZigObject()) |zo| files.appendAssumeCapacity(zo.index); + for (macho_file.objects.items) |index| files.appendAssumeCapacity(index); + + // Update ar symtab from parsed objects + var ar_symtab: Archive.ArSymtab = .{}; + defer ar_symtab.deinit(gpa); + + for (files.items) |index| { + try macho_file.getFile(index).?.updateArSymtab(&ar_symtab, macho_file); + } + + ar_symtab.sort(); + + if (build_options.enable_logging) { + state_log.debug("ar_symtab\n{}\n", .{ar_symtab.fmt(macho_file)}); + } + var err = try macho_file.addErrorWithNotes(0); try err.addMsg(macho_file, "TODO implement flushStaticLib", .{}); @@ -646,6 +671,7 @@ fn writeHeader(macho_file: *MachO, ncmds: usize, sizeofcmds: usize) !void { } const assert = std.debug.assert; +const build_options = @import("build_options"); const eh_frame = @import("eh_frame.zig"); const link = @import("../../link.zig"); const load_commands = @import("load_commands.zig"); @@ -657,6 +683,7 @@ const state_log = std.log.scoped(.link_state); const std = @import("std"); const trace = @import("../../tracy.zig").trace; +const Archive = @import("Archive.zig"); const Atom = @import("Atom.zig"); const Compilation = @import("../../Compilation.zig"); const File = @import("file.zig").File; From efa1c6124d167b3144c4d4b15ebf384130d35abd Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 7 Feb 2024 12:20:38 +0100 Subject: [PATCH 06/12] macho: emit an archive --- src/link/MachO.zig | 4 +-- src/link/MachO/Archive.zig | 63 ++++++++++++++++---------------- src/link/MachO/Object.zig | 7 ++-- src/link/MachO/ZigObject.zig | 5 +-- src/link/MachO/file.zig | 16 +++++++++ src/link/MachO/relocatable.zig | 66 ++++++++++++++++++++++++++++++++-- 6 files changed, 119 insertions(+), 42 deletions(-) diff --git a/src/link/MachO.zig b/src/link/MachO.zig index 7789c563d1..f19e1c9ea7 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -380,7 +380,7 @@ pub fn deinit(self: *MachO) void { pub fn flush(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node) link.File.FlushError!void { // TODO: I think this is just a temp and can be removed once we can emit static archives - if (self.base.isStaticLib() and build_options.have_llvm) { + if (self.base.isStaticLib() and build_options.have_llvm and self.base.comp.config.use_llvm) { return self.base.linkAsArchive(arena, prog_node); } try self.flushModule(arena, prog_node); @@ -396,7 +396,7 @@ pub fn flushModule(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node if (self.llvm_object) |llvm_object| { try self.base.emitLlvmObject(arena, llvm_object, prog_node); // TODO: I think this is just a temp and can be removed once we can emit static archives - if (self.base.isStaticLib() and build_options.have_llvm) return; + if (self.base.isStaticLib() and build_options.have_llvm and self.base.comp.config.use_llvm) return; } var sub_prog_node = prog_node.start("MachO Flush", 0); diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig index 2777eb8587..83cf80f05e 100644 --- a/src/link/MachO/Archive.zig +++ b/src/link/MachO/Archive.zig @@ -90,7 +90,7 @@ pub fn parse(self: *Archive, macho_file: *MachO, path: []const u8, handle_index: pub fn writeHeader( object_name: []const u8, - object_size: u32, + object_size: usize, format: Format, writer: anytype, ) !void { @@ -110,7 +110,7 @@ pub fn writeHeader( } @memcpy(&hdr.ar_fmag, ARFMAG); - const object_name_len = mem.alignForward(u32, object_name.len + 1, format.ptrWidth()); + const object_name_len = mem.alignForward(usize, object_name.len + 1, ptrWidth(format)); const total_object_size = object_size + object_name_len; { @@ -142,12 +142,12 @@ pub const SARMAG: u4 = 8; /// String in ar_fmag at the end of each header. const ARFMAG: *const [2:0]u8 = "`\n"; -const SYMDEF = "__.SYMDEF"; -const SYMDEF64 = "__.SYMDEF_64"; -const SYMDEF_SORTED = "__.SYMDEF SORTED"; -const SYMDEF64_SORTED = "__.SYMDEF_64 SORTED"; +pub const SYMDEF = "__.SYMDEF"; +pub const SYMDEF64 = "__.SYMDEF_64"; +pub const SYMDEF_SORTED = "__.SYMDEF SORTED"; +pub const SYMDEF64_SORTED = "__.SYMDEF_64 SORTED"; -const ar_hdr = extern struct { +pub const ar_hdr = extern struct { /// Member file name, sometimes / terminated. ar_name: [16]u8, @@ -197,7 +197,6 @@ const ar_hdr = extern struct { pub const ArSymtab = struct { entries: std.ArrayListUnmanaged(Entry) = .{}, strtab: StringTable = .{}, - format: Format = .p32, pub fn deinit(ar: *ArSymtab, allocator: Allocator) void { ar.entries.deinit(allocator); @@ -208,16 +207,16 @@ pub const ArSymtab = struct { mem.sort(Entry, ar.entries.items, {}, Entry.lessThan); } - pub fn size(ar: ArSymtab) usize { - const ptr_width = ar.format.ptrWidth(); + pub fn size(ar: ArSymtab, format: Format) usize { + const ptr_width = ptrWidth(format); return ptr_width + ar.entries.items.len * 2 * ptr_width + ptr_width + mem.alignForward(usize, ar.strtab.buffer.items.len, ptr_width); } - pub fn write(ar: ArSymtab, macho_file: *MachO, writer: anytype) !void { + pub fn write(ar: ArSymtab, format: Format, macho_file: *MachO, writer: anytype) !void { // Header - try writeHeader(SYMDEF, ar.size()); + try writeHeader(SYMDEF, ar.size(format), format, writer); // Symtab size - try ar.writeInt(ar.entries.items.len * 2); + try writeInt(format, ar.entries.items.len * 2, writer); // Symtab entries for (ar.entries.items) |entry| { const file_off = switch (macho_file.getFile(entry.file).?) { @@ -226,14 +225,14 @@ pub const ArSymtab = struct { else => unreachable, }; // Name offset - try ar.writeInt(entry.off); + try writeInt(format, entry.off, writer); // File offset - try ar.writeInt(file_off); + try writeInt(format, file_off, writer); } // Strtab size - const strtab_size = mem.alignForward(u64, ar.strtab.buffer.items.len, ar.format.ptrWidth()); + const strtab_size = mem.alignForward(u64, ar.strtab.buffer.items.len, ptrWidth(format)); const padding = strtab_size - ar.strtab.buffer.items.len; - try ar.writeInt(strtab_size); + try writeInt(format, strtab_size, writer); // Strtab try writer.writeAll(ar.strtab.buffer.items); if (padding > 0) { @@ -241,13 +240,6 @@ pub const ArSymtab = struct { } } - fn writeInt(ar: ArSymtab, value: u64, writer: anytype) !void { - switch (ar.format) { - .p32 => try writer.writeInt(u32, std.math.cast(u32, value) orelse return error.Overflow, .little), - .p64 => try writer.writeInt(u64, value, .little), - } - } - const FormatContext = struct { ar: ArSymtab, macho_file: *MachO, @@ -288,18 +280,25 @@ pub const ArSymtab = struct { }; }; -const Format = enum { +pub const Format = enum { p32, p64, - - fn ptrWidth(self: Format) usize { - return switch (self) { - .p32 => @as(usize, 4), - .p64 => 8, - }; - } }; +pub fn ptrWidth(format: Format) usize { + return switch (format) { + .p32 => @as(usize, 4), + .p64 => 8, + }; +} + +pub fn writeInt(format: Format, value: u64, writer: anytype) !void { + switch (format) { + .p32 => try writer.writeInt(u32, std.math.cast(u32, value) orelse return error.Overflow, .little), + .p64 => try writer.writeInt(u64, value, .little), + } +} + pub const ArState = struct { /// File offset of the ar_hdr describing the contributing /// object in the archive. diff --git a/src/link/MachO/Object.zig b/src/link/MachO/Object.zig index 7e91e04dd2..9d436d0197 100644 --- a/src/link/MachO/Object.zig +++ b/src/link/MachO/Object.zig @@ -1248,14 +1248,15 @@ pub fn updateArSize(self: *Object, macho_file: *MachO) !void { self.output_ar_state.size = size; } -pub fn writeAr(self: Object, macho_file: *MachO, writer: anytype) !void { +pub fn writeAr(self: Object, ar_format: Archive.Format, macho_file: *MachO, writer: anytype) !void { // Header - try Archive.writeHeader(self.path, self.output_ar_state.size, writer); + const size = std.math.cast(usize, self.output_ar_state.size) orelse return error.Overflow; + try Archive.writeHeader(self.path, size, ar_format, writer); // Data const file = macho_file.getFileHandle(self.file_handle); // TODO try using copyRangeAll const gpa = macho_file.base.comp.gpa; - const data = try file.readToEndAlloc(gpa, self.output_ar_state.size); + const data = try file.readToEndAlloc(gpa, size); defer gpa.free(data); try writer.writeAll(data); } diff --git a/src/link/MachO/ZigObject.zig b/src/link/MachO/ZigObject.zig index 65ae3788ad..1017203bbe 100644 --- a/src/link/MachO/ZigObject.zig +++ b/src/link/MachO/ZigObject.zig @@ -314,9 +314,10 @@ pub fn updateArSize(self: *ZigObject) void { self.output_ar_state.size = self.data.items.len; } -pub fn writeAr(self: ZigObject, writer: anytype) !void { +pub fn writeAr(self: ZigObject, ar_format: Archive.Format, writer: anytype) !void { // Header - try Archive.writeHeader(self.path, self.output_ar_state.size, writer); + const size = std.math.cast(usize, self.output_ar_state.size) orelse return error.Overflow; + try Archive.writeHeader(self.path, size, ar_format, writer); // Data try writer.writeAll(self.data.items); } diff --git a/src/link/MachO/file.zig b/src/link/MachO/file.zig index d6e21c2f38..60c7a70f26 100644 --- a/src/link/MachO/file.zig +++ b/src/link/MachO/file.zig @@ -182,6 +182,22 @@ pub const File = union(enum) { }; } + pub fn updateArSize(file: File, macho_file: *MachO) !void { + return switch (file) { + .dylib, .internal => unreachable, + .zig_object => |x| x.updateArSize(), + .object => |x| x.updateArSize(macho_file), + }; + } + + pub fn writeAr(file: File, ar_format: Archive.Format, macho_file: *MachO, writer: anytype) !void { + return switch (file) { + .dylib, .internal => unreachable, + .zig_object => |x| x.writeAr(ar_format, writer), + .object => |x| x.writeAr(ar_format, macho_file, writer), + }; + } + pub fn calcSymtabSize(file: File, macho_file: *MachO) !void { return switch (file) { inline else => |x| x.calcSymtabSize(macho_file), diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig index f90988c32e..d7d4469f7f 100644 --- a/src/link/MachO/relocatable.zig +++ b/src/link/MachO/relocatable.zig @@ -161,6 +161,9 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? if (macho_file.getZigObject()) |zo| files.appendAssumeCapacity(zo.index); for (macho_file.objects.items) |index| files.appendAssumeCapacity(index); + const format: Archive.Format = .p32; + const ptr_width = Archive.ptrWidth(format); + // Update ar symtab from parsed objects var ar_symtab: Archive.ArSymtab = .{}; defer ar_symtab.deinit(gpa); @@ -171,14 +174,71 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? ar_symtab.sort(); + // Update sizes of contributing objects + for (files.items) |index| { + try macho_file.getFile(index).?.updateArSize(macho_file); + } + + // Update file offsets of contributing objects + const total_size: usize = blk: { + var pos: usize = Archive.SARMAG; + pos += @sizeOf(Archive.ar_hdr) + Archive.SYMDEF.len + 1; + pos = mem.alignForward(usize, pos, ptr_width); + pos += ar_symtab.size(format); + + for (files.items) |index| { + const file = macho_file.getFile(index).?; + const state = switch (file) { + .zig_object => |x| &x.output_ar_state, + .object => |x| &x.output_ar_state, + else => unreachable, + }; + const path = switch (file) { + .zig_object => |x| x.path, + .object => |x| x.path, + else => unreachable, + }; + pos = mem.alignForward(usize, pos, ptr_width); + state.file_off = pos; + pos += @sizeOf(Archive.ar_hdr) + path.len + 1; + pos = mem.alignForward(usize, pos, ptr_width); + pos += math.cast(usize, state.size) orelse return error.Overflow; + } + + break :blk pos; + }; + if (build_options.enable_logging) { state_log.debug("ar_symtab\n{}\n", .{ar_symtab.fmt(macho_file)}); } - var err = try macho_file.addErrorWithNotes(0); - try err.addMsg(macho_file, "TODO implement flushStaticLib", .{}); + var buffer = std.ArrayList(u8).init(gpa); + defer buffer.deinit(); + try buffer.ensureTotalCapacityPrecise(total_size); + const writer = buffer.writer(); - return error.FlushFailure; + // Write magic + try writer.writeAll(Archive.ARMAG); + + // Write symtab + try ar_symtab.write(format, macho_file, writer); + + // Write object files + for (files.items) |index| { + const aligned = mem.alignForward(usize, buffer.items.len, ptr_width); + const padding = aligned - buffer.items.len; + if (padding > 0) { + try writer.writeByteNTimes(0, padding); + } + try macho_file.getFile(index).?.writeAr(format, macho_file, writer); + } + + assert(buffer.items.len == total_size); + + try macho_file.base.file.?.setEndPos(total_size); + try macho_file.base.file.?.pwriteAll(buffer.items, 0); + + if (comp.link_errors.items.len > 0) return error.FlushFailure; } fn markExports(macho_file: *MachO) void { From 35ac066f1d550445519aceb098d7e3f517f98c55 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 7 Feb 2024 12:27:49 +0100 Subject: [PATCH 07/12] macho: fix writing SYMDEF symtab size --- src/link/MachO/Archive.zig | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig index 83cf80f05e..a65531064d 100644 --- a/src/link/MachO/Archive.zig +++ b/src/link/MachO/Archive.zig @@ -213,10 +213,11 @@ pub const ArSymtab = struct { } pub fn write(ar: ArSymtab, format: Format, macho_file: *MachO, writer: anytype) !void { + const ptr_width = ptrWidth(format); // Header try writeHeader(SYMDEF, ar.size(format), format, writer); // Symtab size - try writeInt(format, ar.entries.items.len * 2, writer); + try writeInt(format, ar.entries.items.len * 2 * ptr_width, writer); // Symtab entries for (ar.entries.items) |entry| { const file_off = switch (macho_file.getFile(entry.file).?) { @@ -230,7 +231,7 @@ pub const ArSymtab = struct { try writeInt(format, file_off, writer); } // Strtab size - const strtab_size = mem.alignForward(u64, ar.strtab.buffer.items.len, ptrWidth(format)); + const strtab_size = mem.alignForward(u64, ar.strtab.buffer.items.len, ptr_width); const padding = strtab_size - ar.strtab.buffer.items.len; try writeInt(format, strtab_size, writer); // Strtab From 82144a9073aa06f037e248f332608f94caed59b9 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 7 Feb 2024 18:05:20 +0100 Subject: [PATCH 08/12] macho: fix invalid ZigObject size calculation --- src/link/MachO/ZigObject.zig | 8 +------- src/link/MachO/relocatable.zig | 2 +- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/src/link/MachO/ZigObject.zig b/src/link/MachO/ZigObject.zig index 1017203bbe..ecd2470733 100644 --- a/src/link/MachO/ZigObject.zig +++ b/src/link/MachO/ZigObject.zig @@ -285,15 +285,9 @@ pub fn checkDuplicates(self: *ZigObject, dupes: anytype, macho_file: *MachO) !vo /// This is just a temporary helper function that allows us to re-read what we wrote to file into a buffer. /// We need this so that we can write to an archive. /// TODO implement writing ZigObject data directly to a buffer instead. -pub fn readFileContents(self: *ZigObject, macho_file: *MachO) !void { +pub fn readFileContents(self: *ZigObject, size: usize, macho_file: *MachO) !void { const gpa = macho_file.base.comp.gpa; - var end_pos: u64 = 0; - for (macho_file.segments.items) |seg| { - end_pos = @max(end_pos, seg.fileoff + seg.filesize); - } - const size = std.math.cast(usize, end_pos) orelse return error.Overflow; try self.data.resize(gpa, size); - const amt = try macho_file.base.file.?.preadAll(self.data.items, 0); if (amt != size) return error.InputOutput; } diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig index d7d4469f7f..c190c1fdcc 100644 --- a/src/link/MachO/relocatable.zig +++ b/src/link/MachO/relocatable.zig @@ -152,7 +152,7 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? // TODO we can avoid reading in the file contents we just wrote if we give the linker // ability to write directly to a buffer. - try zo.readFileContents(macho_file); + try zo.readFileContents(off, macho_file); } var files = std.ArrayList(File.Index).init(gpa); From f9eb14ddcfdc508fd03f35e9d89a8f55baf7d11b Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 7 Feb 2024 19:08:15 +0100 Subject: [PATCH 09/12] macho: parse input object files specifically for incl in archive --- src/link/MachO.zig | 4 +- src/link/MachO/Object.zig | 76 ++++++++++++++++++++++++++++++ src/link/MachO/relocatable.zig | 86 +++++++++++++++++++++++++++++++++- 3 files changed, 162 insertions(+), 4 deletions(-) diff --git a/src/link/MachO.zig b/src/link/MachO.zig index f19e1c9ea7..ead945a2be 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -926,7 +926,7 @@ pub fn resolveLibSystem( }); } -const ParseError = error{ +pub const ParseError = error{ MalformedObject, MalformedArchive, MalformedDylib, @@ -1003,7 +1003,7 @@ fn parseObject(self: *MachO, path: []const u8) ParseError!void { try object.parse(self); } -fn parseFatLibrary(self: *MachO, path: []const u8) !fat.Arch { +pub fn parseFatLibrary(self: *MachO, path: []const u8) !fat.Arch { var buffer: [2]fat.Arch = undefined; const fat_archs = try fat.parseArchs(path, &buffer); const cpu_arch = self.getTarget().cpu.arch; diff --git a/src/link/MachO/Object.zig b/src/link/MachO/Object.zig index 9d436d0197..8436a8b853 100644 --- a/src/link/MachO/Object.zig +++ b/src/link/MachO/Object.zig @@ -1233,6 +1233,82 @@ fn addSection(self: *Object, allocator: Allocator, segname: []const u8, sectname return n_sect; } +pub fn parseAr(self: *Object, macho_file: *MachO) !void { + const tracy = trace(@src()); + defer tracy.end(); + + const gpa = macho_file.base.comp.gpa; + const offset = if (self.archive) |ar| ar.offset else 0; + const handle = macho_file.getFileHandle(self.file_handle); + + var header_buffer: [@sizeOf(macho.mach_header_64)]u8 = undefined; + { + const amt = try handle.preadAll(&header_buffer, offset); + if (amt != @sizeOf(macho.mach_header_64)) return error.InputOutput; + } + self.header = @as(*align(1) const macho.mach_header_64, @ptrCast(&header_buffer)).*; + + const this_cpu_arch: std.Target.Cpu.Arch = switch (self.header.?.cputype) { + macho.CPU_TYPE_ARM64 => .aarch64, + macho.CPU_TYPE_X86_64 => .x86_64, + else => |x| { + try macho_file.reportParseError2(self.index, "unknown cpu architecture: {d}", .{x}); + return error.InvalidCpuArch; + }, + }; + if (macho_file.getTarget().cpu.arch != this_cpu_arch) { + try macho_file.reportParseError2(self.index, "invalid cpu architecture: {s}", .{@tagName(this_cpu_arch)}); + return error.InvalidCpuArch; + } + + const lc_buffer = try gpa.alloc(u8, self.header.?.sizeofcmds); + defer gpa.free(lc_buffer); + { + const amt = try handle.preadAll(lc_buffer, offset + @sizeOf(macho.mach_header_64)); + if (amt != self.header.?.sizeofcmds) return error.InputOutput; + } + + var it = LoadCommandIterator{ + .ncmds = self.header.?.ncmds, + .buffer = lc_buffer, + }; + while (it.next()) |lc| switch (lc.cmd()) { + .SYMTAB => { + const cmd = lc.cast(macho.symtab_command).?; + try self.strtab.resize(gpa, cmd.strsize); + { + const amt = try handle.preadAll(self.strtab.items, cmd.stroff + offset); + if (amt != self.strtab.items.len) return error.InputOutput; + } + + const symtab_buffer = try gpa.alloc(u8, cmd.nsyms * @sizeOf(macho.nlist_64)); + defer gpa.free(symtab_buffer); + { + const amt = try handle.preadAll(symtab_buffer, cmd.symoff + offset); + if (amt != symtab_buffer.len) return error.InputOutput; + } + const symtab = @as([*]align(1) const macho.nlist_64, @ptrCast(symtab_buffer.ptr))[0..cmd.nsyms]; + try self.symtab.ensureUnusedCapacity(gpa, symtab.len); + for (symtab) |nlist| { + self.symtab.appendAssumeCapacity(.{ + .nlist = nlist, + .atom = 0, + .size = 0, + }); + } + }, + .BUILD_VERSION, + .VERSION_MIN_MACOSX, + .VERSION_MIN_IPHONEOS, + .VERSION_MIN_TVOS, + .VERSION_MIN_WATCHOS, + => if (self.platform == null) { + self.platform = MachO.Platform.fromLoadCommand(lc); + }, + else => {}, + }; +} + pub fn updateArSymtab(self: Object, ar_symtab: *Archive.ArSymtab, macho_file: *MachO) error{OutOfMemory}!void { const gpa = macho_file.base.comp.gpa; for (self.symtab.items(.nlist)) |nlist| { diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig index c190c1fdcc..8f5bf97696 100644 --- a/src/link/MachO/relocatable.zig +++ b/src/link/MachO/relocatable.zig @@ -104,8 +104,19 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? if (module_obj_path) |path| try positionals.append(.{ .path = path }); for (positionals.items) |obj| { - // TODO: parse for archive meaning don't unpack objects - _ = obj; + parsePositional(macho_file, obj.path) catch |err| switch (err) { + error.MalformedObject, + error.MalformedArchive, + error.InvalidCpuArch, + error.InvalidTarget, + => continue, // already reported + error.UnknownFileType => try macho_file.reportParseError(obj.path, "unknown file type for an object file", .{}), + else => |e| try macho_file.reportParseError( + obj.path, + "unexpected error: parsing input file failed with error {s}", + .{@errorName(e)}, + ), + }; } if (comp.link_errors.items.len > 0) return error.FlushFailure; @@ -241,6 +252,75 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? if (comp.link_errors.items.len > 0) return error.FlushFailure; } +fn parsePositional(macho_file: *MachO, path: []const u8) MachO.ParseError!void { + const tracy = trace(@src()); + defer tracy.end(); + if (try Object.isObject(path)) { + try parseObject(macho_file, path); + } else if (try fat.isFatLibrary(path)) { + const fat_arch = try macho_file.parseFatLibrary(path); + if (try Archive.isArchive(path, fat_arch)) { + try parseArchive(macho_file, path, fat_arch); + } else return error.UnknownFileType; + } else if (try Archive.isArchive(path, null)) { + try parseArchive(macho_file, path, null); + } else return error.UnknownFileType; +} + +fn parseObject(macho_file: *MachO, path: []const u8) MachO.ParseError!void { + const tracy = trace(@src()); + defer tracy.end(); + + const gpa = macho_file.base.comp.gpa; + const file = try std.fs.cwd().openFile(path, .{}); + errdefer file.close(); + const handle = try macho_file.addFileHandle(file); + const mtime: u64 = mtime: { + const stat = file.stat() catch break :mtime 0; + break :mtime @as(u64, @intCast(@divFloor(stat.mtime, 1_000_000_000))); + }; + const index = @as(File.Index, @intCast(try macho_file.files.addOne(gpa))); + macho_file.files.set(index, .{ .object = .{ + .path = try gpa.dupe(u8, path), + .file_handle = handle, + .mtime = mtime, + .index = index, + } }); + try macho_file.objects.append(gpa, index); + + const object = macho_file.getFile(index).?.object; + try object.parseAr(macho_file); +} + +fn parseArchive(macho_file: *MachO, path: []const u8, fat_arch: ?fat.Arch) MachO.ParseError!void { + const tracy = trace(@src()); + defer tracy.end(); + + const gpa = macho_file.base.comp.gpa; + + const file = try std.fs.cwd().openFile(path, .{}); + errdefer file.close(); + const handle = try macho_file.addFileHandle(file); + + var archive = Archive{}; + defer archive.deinit(gpa); + try archive.parse(macho_file, path, handle, fat_arch); + + var has_parse_error = false; + for (archive.objects.items) |extracted| { + const index = @as(File.Index, @intCast(try macho_file.files.addOne(gpa))); + macho_file.files.set(index, .{ .object = extracted }); + const object = &macho_file.files.items(.data)[index].object; + object.index = index; + object.parseAr(macho_file) catch |err| switch (err) { + error.InvalidCpuArch => has_parse_error = true, + else => |e| return e, + }; + try macho_file.objects.append(gpa, index); + } + if (has_parse_error) return error.MalformedArchive; +} + fn markExports(macho_file: *MachO) void { if (macho_file.getZigObject()) |zo| { zo.asFile().markExportsRelocatable(macho_file); @@ -733,6 +813,7 @@ fn writeHeader(macho_file: *MachO, ncmds: usize, sizeofcmds: usize) !void { const assert = std.debug.assert; const build_options = @import("build_options"); const eh_frame = @import("eh_frame.zig"); +const fat = @import("fat.zig"); const link = @import("../../link.zig"); const load_commands = @import("load_commands.zig"); const log = std.log.scoped(.link); @@ -748,4 +829,5 @@ const Atom = @import("Atom.zig"); const Compilation = @import("../../Compilation.zig"); const File = @import("file.zig").File; const MachO = @import("../MachO.zig"); +const Object = @import("Object.zig"); const Symbol = @import("Symbol.zig"); From e3b6d347b29da1862a81463b6f0fa928111532ec Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 7 Feb 2024 19:16:37 +0100 Subject: [PATCH 10/12] macho: remove fallback to llvm-ar --- src/link/MachO.zig | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/link/MachO.zig b/src/link/MachO.zig index ead945a2be..84de97f4e9 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -379,10 +379,6 @@ pub fn deinit(self: *MachO) void { } pub fn flush(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node) link.File.FlushError!void { - // TODO: I think this is just a temp and can be removed once we can emit static archives - if (self.base.isStaticLib() and build_options.have_llvm and self.base.comp.config.use_llvm) { - return self.base.linkAsArchive(arena, prog_node); - } try self.flushModule(arena, prog_node); } @@ -395,8 +391,6 @@ pub fn flushModule(self: *MachO, arena: Allocator, prog_node: *std.Progress.Node if (self.llvm_object) |llvm_object| { try self.base.emitLlvmObject(arena, llvm_object, prog_node); - // TODO: I think this is just a temp and can be removed once we can emit static archives - if (self.base.isStaticLib() and build_options.have_llvm and self.base.comp.config.use_llvm) return; } var sub_prog_node = prog_node.start("MachO Flush", 0); From 114518c6b0361fccef34a07d20c13f434bce1ef7 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 7 Feb 2024 21:28:20 +0100 Subject: [PATCH 11/12] macho: fix 32bit builds --- src/link/MachO/Archive.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig index a65531064d..aba31d1742 100644 --- a/src/link/MachO/Archive.zig +++ b/src/link/MachO/Archive.zig @@ -231,7 +231,7 @@ pub const ArSymtab = struct { try writeInt(format, file_off, writer); } // Strtab size - const strtab_size = mem.alignForward(u64, ar.strtab.buffer.items.len, ptr_width); + const strtab_size = mem.alignForward(usize, ar.strtab.buffer.items.len, ptr_width); const padding = strtab_size - ar.strtab.buffer.items.len; try writeInt(format, strtab_size, writer); // Strtab From 272fc2df2e0ba79866ff56bacacece2f60dec10b Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 7 Feb 2024 23:25:15 +0100 Subject: [PATCH 12/12] macho: use preadAll to read in object files when emitting archive --- src/link/MachO/Object.zig | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/link/MachO/Object.zig b/src/link/MachO/Object.zig index 8436a8b853..ad7ba1c240 100644 --- a/src/link/MachO/Object.zig +++ b/src/link/MachO/Object.zig @@ -1332,8 +1332,10 @@ pub fn writeAr(self: Object, ar_format: Archive.Format, macho_file: *MachO, writ const file = macho_file.getFileHandle(self.file_handle); // TODO try using copyRangeAll const gpa = macho_file.base.comp.gpa; - const data = try file.readToEndAlloc(gpa, size); + const data = try gpa.alloc(u8, size); defer gpa.free(data); + const amt = try file.preadAll(data, 0); + if (amt != size) return error.InputOutput; try writer.writeAll(data); }