diff --git a/src/Compilation.zig b/src/Compilation.zig index 4ad4d3edfa..833e3b39ca 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -654,6 +654,11 @@ pub const ClangPreprocessorMode = enum { pub const SystemLib = link.SystemLib; pub const CacheMode = link.CacheMode; +pub const LinkObject = struct { + path: []const u8, + must_link: bool = false, +}; + pub const InitOptions = struct { zig_lib_directory: Directory, local_cache_directory: Directory, @@ -698,7 +703,7 @@ pub const InitOptions = struct { lib_dirs: []const []const u8 = &[0][]const u8{}, rpath_list: []const []const u8 = &[0][]const u8{}, c_source_files: []const CSourceFile = &[0]CSourceFile{}, - link_objects: []const []const u8 = &[0][]const u8{}, + link_objects: []LinkObject = &[0]LinkObject{}, framework_dirs: []const []const u8 = &[0][]const u8{}, frameworks: []const []const u8 = &[0][]const u8{}, system_lib_names: []const []const u8 = &.{}, @@ -1056,7 +1061,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation { if (options.system_lib_names.len != 0) break :x true; for (options.link_objects) |obj| { - switch (classifyFileExt(obj)) { + switch (classifyFileExt(obj.path)) { .shared_library => break :x true, else => continue, } @@ -1459,7 +1464,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation { if (options.c_source_files.len >= 1) { hash.addBytes(options.c_source_files[0].src_path); } else if (options.link_objects.len >= 1) { - hash.addBytes(options.link_objects[0]); + hash.addBytes(options.link_objects[0].path); } const digest = hash.final(); @@ -2265,7 +2270,11 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes try man.addOptionalFile(comp.bin_file.options.linker_script); try man.addOptionalFile(comp.bin_file.options.version_script); - try man.addListOfFiles(comp.bin_file.options.objects); + + for (comp.bin_file.options.objects) |obj| { + _ = try man.addFile(obj.path, null); + man.hash.add(obj.must_link); + } for (comp.c_object_table.keys()) |key| { _ = try man.addFile(key.src.src_path, null); diff --git a/src/link.zig b/src/link.zig index a4b990cf6b..3dc75c77e8 100644 --- a/src/link.zig +++ b/src/link.zig @@ -155,7 +155,7 @@ pub const Options = struct { soname: ?[]const u8, llvm_cpu_features: ?[*:0]const u8, - objects: []const []const u8, + objects: []Compilation.LinkObject, framework_dirs: []const []const u8, frameworks: []const []const u8, system_libs: std.StringArrayHashMapUnmanaged(SystemLib), @@ -755,7 +755,10 @@ pub const File = struct { // We are about to obtain this lock, so here we give other processes a chance first. base.releaseLock(); - try man.addListOfFiles(base.options.objects); + for (base.options.objects) |obj| { + _ = try man.addFile(obj.path, null); + man.hash.add(obj.must_link); + } for (comp.c_object_table.keys()) |key| { _ = try man.addFile(key.status.success.object_path, null); } @@ -792,8 +795,8 @@ pub const File = struct { var object_files = try std.ArrayList([*:0]const u8).initCapacity(base.allocator, num_object_files); defer object_files.deinit(); - for (base.options.objects) |obj_path| { - object_files.appendAssumeCapacity(try arena.dupeZ(u8, obj_path)); + for (base.options.objects) |obj| { + object_files.appendAssumeCapacity(try arena.dupeZ(u8, obj.path)); } for (comp.c_object_table.keys()) |key| { object_files.appendAssumeCapacity(try arena.dupeZ(u8, key.status.success.object_path)); diff --git a/src/link/Coff.zig b/src/link/Coff.zig index ea9c457716..26b8c78a28 100644 --- a/src/link/Coff.zig +++ b/src/link/Coff.zig @@ -943,7 +943,10 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void { comptime assert(Compilation.link_hash_implementation_version == 1); - try man.addListOfFiles(self.base.options.objects); + for (self.base.options.objects) |obj| { + _ = try man.addFile(obj.path, null); + man.hash.add(obj.must_link); + } for (comp.c_object_table.keys()) |key| { _ = try man.addFile(key.status.success.object_path, null); } @@ -1005,7 +1008,7 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void { // build-obj. See also the corresponding TODO in linkAsArchive. const the_object_path = blk: { if (self.base.options.objects.len != 0) - break :blk self.base.options.objects[0]; + break :blk self.base.options.objects[0].path; if (comp.c_object_table.count() != 0) break :blk comp.c_object_table.keys()[0].status.success.object_path; @@ -1110,7 +1113,10 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void { try argv.append(try allocPrint(arena, "-LIBPATH:{s}", .{lib_dir})); } - try argv.appendSlice(self.base.options.objects); + try argv.ensureUnusedCapacity(self.base.options.objects.len); + for (self.base.options.objects) |obj| { + argv.appendAssumeCapacity(obj.path); + } for (comp.c_object_table.keys()) |key| { try argv.append(key.status.success.object_path); diff --git a/src/link/Elf.zig b/src/link/Elf.zig index 36fe97f69b..d0f9d7851d 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -1384,7 +1384,10 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void { try man.addOptionalFile(self.base.options.linker_script); try man.addOptionalFile(self.base.options.version_script); - try man.addListOfFiles(self.base.options.objects); + for (self.base.options.objects) |obj| { + _ = try man.addFile(obj.path, null); + man.hash.add(obj.must_link); + } for (comp.c_object_table.keys()) |key| { _ = try man.addFile(key.status.success.object_path, null); } @@ -1469,7 +1472,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void { // build-obj. See also the corresponding TODO in linkAsArchive. const the_object_path = blk: { if (self.base.options.objects.len != 0) - break :blk self.base.options.objects[0]; + break :blk self.base.options.objects[0].path; if (comp.c_object_table.count() != 0) break :blk comp.c_object_table.keys()[0].status.success.object_path; @@ -1678,7 +1681,10 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void { } // Positional arguments to the linker such as object files. - try argv.appendSlice(self.base.options.objects); + try argv.ensureUnusedCapacity(self.base.options.objects.len); + for (self.base.options.objects) |obj| { + argv.appendAssumeCapacity(obj.path); + } for (comp.c_object_table.keys()) |key| { try argv.append(key.status.success.object_path); diff --git a/src/link/MachO.zig b/src/link/MachO.zig index 7dd674eed9..26cbe60c8e 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -141,6 +141,9 @@ objc_selrefs_section_index: ?u16 = null, objc_classrefs_section_index: ?u16 = null, objc_data_section_index: ?u16 = null, +rustc_section_index: ?u16 = null, +rustc_section_size: u64 = 0, + bss_file_offset: u32 = 0, tlv_bss_file_offset: u32 = 0, @@ -496,7 +499,10 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void { comptime assert(Compilation.link_hash_implementation_version == 1); - try man.addListOfFiles(self.base.options.objects); + for (self.base.options.objects) |obj| { + _ = try man.addFile(obj.path, null); + man.hash.add(obj.must_link); + } for (comp.c_object_table.keys()) |key| { _ = try man.addFile(key.status.success.object_path, null); } @@ -568,8 +574,9 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void { // here. TODO: think carefully about how we can avoid this redundant operation when doing // build-obj. See also the corresponding TODO in linkAsArchive. const the_object_path = blk: { - if (self.base.options.objects.len != 0) - break :blk self.base.options.objects[0]; + if (self.base.options.objects.len != 0) { + break :blk self.base.options.objects[0].path; + } if (comp.c_object_table.count() != 0) break :blk comp.c_object_table.keys()[0].status.success.object_path; @@ -678,8 +685,19 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void { // Positional arguments to the linker such as object files and static archives. var positionals = std.ArrayList([]const u8).init(arena); + try positionals.ensureUnusedCapacity(self.base.options.objects.len); - try positionals.appendSlice(self.base.options.objects); + var must_link_archives = std.StringArrayHashMap(void).init(arena); + try must_link_archives.ensureUnusedCapacity(self.base.options.objects.len); + + for (self.base.options.objects) |obj| { + if (must_link_archives.contains(obj.path)) continue; + if (obj.must_link) { + _ = must_link_archives.getOrPutAssumeCapacity(obj.path); + } else { + _ = positionals.appendAssumeCapacity(obj.path); + } + } for (comp.c_object_table.keys()) |key| { try positionals.append(key.status.success.object_path); @@ -886,12 +904,17 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void { try argv.append("dynamic_lookup"); } + for (must_link_archives.keys()) |lib| { + try argv.append(try std.fmt.allocPrint(arena, "-force_load {s}", .{lib})); + } + Compilation.dump_argv(argv.items); } var dependent_libs = std.fifo.LinearFifo(Dylib.Id, .Dynamic).init(self.base.allocator); defer dependent_libs.deinit(); try self.parseInputFiles(positionals.items, self.base.options.sysroot, &dependent_libs); + try self.parseAndForceLoadStaticArchives(must_link_archives.keys()); try self.parseLibs(libs.items, self.base.options.sysroot, &dependent_libs); try self.parseDependentLibs(self.base.options.sysroot, &dependent_libs); } @@ -993,6 +1016,11 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void { try self.writeAtoms(); } + if (self.rustc_section_index) |id| { + const seg = &self.load_commands.items[self.data_segment_cmd_index.?].segment; + const sect = &seg.sections.items[id]; + sect.size = self.rustc_section_size; + } if (self.bss_section_index) |idx| { const seg = &self.load_commands.items[self.data_segment_cmd_index.?].segment; const sect = &seg.sections.items[idx]; @@ -1195,7 +1223,7 @@ fn parseObject(self: *MachO, path: []const u8) !bool { return true; } -fn parseArchive(self: *MachO, path: []const u8) !bool { +fn parseArchive(self: *MachO, path: []const u8, force_load: bool) !bool { const file = fs.cwd().openFile(path, .{}) catch |err| switch (err) { error.FileNotFound => return false, else => |e| return e, @@ -1218,7 +1246,23 @@ fn parseArchive(self: *MachO, path: []const u8) !bool { else => |e| return e, }; - try self.archives.append(self.base.allocator, archive); + if (force_load) { + defer archive.deinit(self.base.allocator); + // Get all offsets from the ToC + var offsets = std.AutoArrayHashMap(u32, void).init(self.base.allocator); + defer offsets.deinit(); + for (archive.toc.values()) |offs| { + for (offs.items) |off| { + _ = try offsets.getOrPut(off); + } + } + for (offsets.keys()) |off| { + const object = try self.objects.addOne(self.base.allocator); + object.* = try archive.parseObject(self.base.allocator, self.base.options.target, off); + } + } else { + try self.archives.append(self.base.allocator, archive); + } return true; } @@ -1303,7 +1347,7 @@ fn parseInputFiles(self: *MachO, files: []const []const u8, syslibroot: ?[]const log.debug("parsing input file path '{s}'", .{full_path}); if (try self.parseObject(full_path)) continue; - if (try self.parseArchive(full_path)) continue; + if (try self.parseArchive(full_path, false)) continue; if (try self.parseDylib(full_path, .{ .syslibroot = syslibroot, .dependent_libs = dependent_libs, @@ -1313,6 +1357,21 @@ fn parseInputFiles(self: *MachO, files: []const []const u8, syslibroot: ?[]const } } +fn parseAndForceLoadStaticArchives(self: *MachO, files: []const []const u8) !void { + for (files) |file_name| { + const full_path = full_path: { + var buffer: [fs.MAX_PATH_BYTES]u8 = undefined; + const path = try fs.realpath(file_name, &buffer); + break :full_path try self.base.allocator.dupe(u8, path); + }; + defer self.base.allocator.free(full_path); + log.debug("parsing and force loading static archive '{s}'", .{full_path}); + + if (try self.parseArchive(full_path, true)) continue; + log.warn("unknown filetype: expected static archive: '{s}'", .{file_name}); + } +} + fn parseLibs(self: *MachO, libs: []const []const u8, syslibroot: ?[]const u8, dependent_libs: anytype) !void { for (libs) |lib| { log.debug("parsing lib path '{s}'", .{lib}); @@ -1320,7 +1379,7 @@ fn parseLibs(self: *MachO, libs: []const []const u8, syslibroot: ?[]const u8, de .syslibroot = syslibroot, .dependent_libs = dependent_libs, })) continue; - if (try self.parseArchive(lib)) continue; + if (try self.parseArchive(lib, false)) continue; log.warn("unknown filetype for a library: '{s}'", .{lib}); } @@ -1886,6 +1945,24 @@ pub fn getMatchingSection(self: *MachO, sect: macho.section_64) !?MatchingSectio .seg = self.data_segment_cmd_index.?, .sect = self.objc_data_section_index.?, }; + } else if (mem.eql(u8, sectname, ".rustc")) { + if (self.rustc_section_index == null) { + self.rustc_section_index = try self.initSection( + self.data_segment_cmd_index.?, + ".rustc", + sect.size, + sect.@"align", + .{}, + ); + // We need to preserve the section size for rustc to properly + // decompress the metadata. + self.rustc_section_size = sect.size; + } + + break :blk .{ + .seg = self.data_segment_cmd_index.?, + .sect = self.rustc_section_index.?, + }; } else { if (self.data_section_index == null) { self.data_section_index = try self.initSection( @@ -5212,6 +5289,7 @@ fn sortSections(self: *MachO) !void { // __DATA segment const indices = &[_]*?u16{ + &self.rustc_section_index, &self.la_symbol_ptr_section_index, &self.objc_const_section_index, &self.objc_selrefs_section_index, diff --git a/src/link/MachO/Atom.zig b/src/link/MachO/Atom.zig index a2f1b385dc..2b16bc8cb0 100644 --- a/src/link/MachO/Atom.zig +++ b/src/link/MachO/Atom.zig @@ -419,6 +419,7 @@ pub fn parseRelocs(self: *Atom, relocs: []macho.relocation_info, context: RelocC .X86_64_RELOC_BRANCH => { // TODO rewrite relocation try addStub(target, context); + addend = mem.readIntLittle(i32, self.code.items[offset..][0..4]); }, .X86_64_RELOC_GOT, .X86_64_RELOC_GOT_LOAD => { // TODO rewrite relocation @@ -1003,7 +1004,7 @@ pub fn resolveRelocs(self: *Atom, macho_file: *MachO) !void { .X86_64_RELOC_BRANCH => { const displacement = try math.cast( i32, - @intCast(i64, target_addr) - @intCast(i64, source_addr) - 4, + @intCast(i64, target_addr) - @intCast(i64, source_addr) - 4 + rel.addend, ); mem.writeIntLittle(u32, self.code.items[rel.offset..][0..4], @bitCast(u32, displacement)); }, diff --git a/src/link/MachO/Object.zig b/src/link/MachO/Object.zig index 12ca463760..70cdda6c2f 100644 --- a/src/link/MachO/Object.zig +++ b/src/link/MachO/Object.zig @@ -409,7 +409,7 @@ pub fn parseIntoAtoms(self: *Object, allocator: Allocator, macho_file: *MachO) ! } else blk: { var iundefsym: usize = sorted_all_nlists.items.len; while (iundefsym > 0) : (iundefsym -= 1) { - const nlist = sorted_all_nlists.items[iundefsym]; + const nlist = sorted_all_nlists.items[iundefsym - 1]; if (nlist.nlist.sect()) break; } break :blk iundefsym; diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig index 99311b8441..34723cdb27 100644 --- a/src/link/Wasm.zig +++ b/src/link/Wasm.zig @@ -1128,7 +1128,10 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void { comptime assert(Compilation.link_hash_implementation_version == 1); - try man.addListOfFiles(self.base.options.objects); + for (self.base.options.objects) |obj| { + _ = try man.addFile(obj.path, null); + man.hash.add(obj.must_link); + } for (comp.c_object_table.keys()) |key| { _ = try man.addFile(key.status.success.object_path, null); } @@ -1181,7 +1184,7 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void { // build-obj. See also the corresponding TODO in linkAsArchive. const the_object_path = blk: { if (self.base.options.objects.len != 0) - break :blk self.base.options.objects[0]; + break :blk self.base.options.objects[0].path; if (comp.c_object_table.count() != 0) break :blk comp.c_object_table.keys()[0].status.success.object_path; @@ -1346,7 +1349,10 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void { } // Positional arguments to the linker such as object files. - try argv.appendSlice(self.base.options.objects); + try argv.ensureUnusedCapacity(self.base.options.objects.len); + for (self.base.options.objects) |obj| { + argv.appendAssumeCapacity(obj.path); + } for (comp.c_object_table.keys()) |key| { try argv.append(key.status.success.object_path); diff --git a/src/main.zig b/src/main.zig index 7756f48e26..c924b14b69 100644 --- a/src/main.zig +++ b/src/main.zig @@ -705,7 +705,7 @@ fn buildOutputType( var c_source_files = std.ArrayList(Compilation.CSourceFile).init(gpa); defer c_source_files.deinit(); - var link_objects = std.ArrayList([]const u8).init(gpa); + var link_objects = std.ArrayList(Compilation.LinkObject).init(gpa); defer link_objects.deinit(); var framework_dirs = std.ArrayList([]const u8).init(gpa); @@ -1238,7 +1238,7 @@ fn buildOutputType( } } else switch (Compilation.classifyFileExt(arg)) { .object, .static_library, .shared_library => { - try link_objects.append(arg); + try link_objects.append(.{ .path = arg }); }, .assembly, .c, .cpp, .h, .ll, .bc, .m, .mm => { try c_source_files.append(.{ @@ -1309,7 +1309,7 @@ fn buildOutputType( switch (file_ext) { .assembly, .c, .cpp, .ll, .bc, .h, .m, .mm => try c_source_files.append(.{ .src_path = it.only_arg }), .unknown, .shared_library, .object, .static_library => { - try link_objects.append(it.only_arg); + try link_objects.append(.{ .path = it.only_arg }); }, .zig => { if (root_src_file) |other| { @@ -1748,6 +1748,15 @@ fn buildOutputType( fatal("expected linker arg after '{s}'", .{arg}); } install_name = linker_args.items[i]; + } else if (mem.eql(u8, arg, "-force_load")) { + i += 1; + if (i >= linker_args.items.len) { + fatal("expected linker arg after '{s}'", .{arg}); + } + try link_objects.append(.{ + .path = linker_args.items[i], + .must_link = true, + }); } else { warn("unsupported linker arg: {s}", .{arg}); } @@ -1842,7 +1851,7 @@ fn buildOutputType( const basename = fs.path.basename(c_source_files.items[0].src_path); break :blk basename[0 .. basename.len - fs.path.extension(basename).len]; } else if (link_objects.items.len >= 1) { - const basename = fs.path.basename(link_objects.items[0]); + const basename = fs.path.basename(link_objects.items[0].path); break :blk basename[0 .. basename.len - fs.path.extension(basename).len]; } else if (emit_bin == .yes) { const basename = fs.path.basename(emit_bin.yes); @@ -2045,7 +2054,7 @@ fn buildOutputType( test_path.items, @errorName(e), }), }; - try link_objects.append(try arena.dupe(u8, test_path.items)); + try link_objects.append(.{ .path = try arena.dupe(u8, test_path.items) }); break; } else { var search_paths = std.ArrayList(u8).init(arena);