move a large chunk of linker logic away from "options"

These options are only supposed to be provided to the initialization
functions, resolved, and then computed values stored in the appropriate
place (base struct or the object-format-specific structs).

Many more to go...
This commit is contained in:
Andrew Kelley 2023-12-11 21:16:49 -07:00
parent 12de7e3472
commit 2bef0715c7
18 changed files with 1245 additions and 962 deletions

View File

@ -69,6 +69,8 @@ root_name: [:0]const u8,
cache_mode: CacheMode,
include_compiler_rt: bool,
objects: []Compilation.LinkObject,
/// Needed only for passing -F args to clang.
framework_dirs: []const []const u8,
/// These are *always* dynamically linked. Static libraries will be
/// provided as positional arguments.
system_libs: std.StringArrayHashMapUnmanaged(SystemLib),
@ -133,6 +135,7 @@ verbose_llvm_ir: ?[]const u8,
verbose_llvm_bc: ?[]const u8,
verbose_cimport: bool,
verbose_llvm_cpu_features: bool,
verbose_link: bool,
disable_c_depfile: bool,
time_report: bool,
stack_report: bool,
@ -220,6 +223,8 @@ emit_llvm_bc: ?EmitLoc,
work_queue_wait_group: WaitGroup = .{},
astgen_wait_group: WaitGroup = .{},
llvm_opt_bisect_limit: c_int,
pub const Emit = struct {
/// Where the output will go.
directory: Directory,
@ -340,7 +345,7 @@ const Job = union(enum) {
/// one of WASI libc static objects
wasi_libc_crt_file: wasi_libc.CRTFile,
/// The value is the index into `link.File.Options.system_libs`.
/// The value is the index into `system_libs`.
windows_import_lib: usize,
};
@ -819,6 +824,20 @@ pub const cache_helpers = struct {
addEmitLoc(hh, optional_emit_loc orelse return);
}
pub fn addOptionalDebugFormat(hh: *Cache.HashHelper, x: ?link.File.DebugFormat) void {
hh.add(x != null);
addDebugFormat(hh, x orelse return);
}
pub fn addDebugFormat(hh: *Cache.HashHelper, x: link.File.DebugFormat) void {
const tag: @typeInfo(link.File.DebugFormat).Union.tag_type.? = x;
hh.add(tag);
switch (x) {
.strip, .code_view => {},
.dwarf => |f| hh.add(f),
}
}
pub fn hashCSource(self: *Cache.Manifest, c_source: CSourceFile) !void {
_ = try self.addFile(c_source.src_path, null);
// Hash the extra flags, with special care to call addFile for file parameters.
@ -846,7 +865,7 @@ pub const ClangPreprocessorMode = enum {
stdout,
};
pub const Framework = link.Framework;
pub const Framework = link.File.MachO.Framework;
pub const SystemLib = link.SystemLib;
pub const CacheMode = link.CacheMode;
@ -952,7 +971,7 @@ pub const InitOptions = struct {
linker_print_gc_sections: bool = false,
linker_print_icf_sections: bool = false,
linker_print_map: bool = false,
linker_opt_bisect_limit: i32 = -1,
llvm_opt_bisect_limit: i32 = -1,
each_lib_rpath: ?bool = null,
build_id: ?std.zig.BuildId = null,
disable_c_depfile: bool = false,
@ -994,7 +1013,7 @@ pub const InitOptions = struct {
hash_style: link.HashStyle = .both,
entry: ?[]const u8 = null,
force_undefined_symbols: std.StringArrayHashMapUnmanaged(void) = .{},
stack_size_override: ?u64 = null,
stack_size: ?u64 = null,
image_base_override: ?u64 = null,
version: ?std.SemanticVersion = null,
compatibility_version: ?std.SemanticVersion = null,
@ -1007,7 +1026,7 @@ pub const InitOptions = struct {
test_name_prefix: ?[]const u8 = null,
test_runner_path: ?[]const u8 = null,
subsystem: ?std.Target.SubSystem = null,
dwarf_format: ?std.dwarf.Format = null,
debug_format: ?link.File.DebugFormat = null,
/// (Zig compiler development) Enable dumping linker's state as JSON.
enable_link_snapshots: bool = false,
/// (Darwin) Install name of the dylib
@ -1297,7 +1316,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
cache.hash.add(options.config.link_libcpp);
cache.hash.add(options.config.link_libunwind);
cache.hash.add(output_mode);
cache.hash.addOptional(options.dwarf_format);
cache_helpers.addOptionalDebugFormat(&cache.hash, options.debug_format);
cache_helpers.addOptionalEmitLoc(&cache.hash, options.emit_bin);
cache_helpers.addOptionalEmitLoc(&cache.hash, options.emit_implib);
cache_helpers.addOptionalEmitLoc(&cache.hash, options.emit_docs);
@ -1596,6 +1615,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.verbose_llvm_bc = options.verbose_llvm_bc,
.verbose_cimport = options.verbose_cimport,
.verbose_llvm_cpu_features = options.verbose_llvm_cpu_features,
.verbose_link = options.verbose_link,
.disable_c_depfile = options.disable_c_depfile,
.owned_link_dir = owned_link_dir,
.color = options.color,
@ -1617,6 +1637,8 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.libc_installation = libc_dirs.libc_installation,
.include_compiler_rt = include_compiler_rt,
.objects = options.link_objects,
.framework_dirs = options.framework_dirs,
.llvm_opt_bisect_limit = options.llvm_opt_bisect_limit,
};
if (bin_file_emit) |emit| {
@ -1636,7 +1658,6 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.z_max_page_size = options.linker_z_max_page_size,
.darwin_sdk_layout = libc_dirs.darwin_sdk_layout,
.frameworks = options.frameworks,
.framework_dirs = options.framework_dirs,
.wasi_emulated_libs = options.wasi_emulated_libs,
.lib_dirs = options.lib_dirs,
.rpath_list = options.rpath_list,
@ -1659,13 +1680,12 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.print_gc_sections = options.linker_print_gc_sections,
.print_icf_sections = options.linker_print_icf_sections,
.print_map = options.linker_print_map,
.opt_bisect_limit = options.linker_opt_bisect_limit,
.tsaware = options.linker_tsaware,
.nxcompat = options.linker_nxcompat,
.dynamicbase = options.linker_dynamicbase,
.major_subsystem_version = options.major_subsystem_version,
.minor_subsystem_version = options.minor_subsystem_version,
.stack_size_override = options.stack_size_override,
.stack_size = options.stack_size,
.image_base_override = options.image_base_override,
.version_script = options.version_script,
.gc_sections = options.linker_gc_sections,
@ -1674,7 +1694,6 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.rdynamic = options.rdynamic,
.soname = options.soname,
.compatibility_version = options.compatibility_version,
.verbose_link = options.verbose_link,
.dll_export_fns = dll_export_fns,
.skip_linker_dependencies = options.skip_linker_dependencies,
.parent_compilation_link_libc = options.parent_compilation_link_libc,
@ -1682,7 +1701,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.build_id = build_id,
.disable_lld_caching = options.disable_lld_caching or cache_mode == .whole,
.subsystem = options.subsystem,
.dwarf_format = options.dwarf_format,
.debug_format = options.debug_format,
.hash_style = options.hash_style,
.enable_link_snapshots = options.enable_link_snapshots,
.install_name = options.install_name,
@ -1826,7 +1845,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
// When linking mingw-w64 there are some import libs we always need.
for (mingw.always_link_libs) |name| {
try comp.bin_file.options.system_libs.put(comp.gpa, name, .{
try comp.system_libs.put(comp.gpa, name, .{
.needed = false,
.weak = false,
.path = null,
@ -1835,7 +1854,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
}
// Generate Windows import libs.
if (target.os.tag == .windows) {
const count = comp.bin_file.options.system_libs.count();
const count = comp.system_libs.count();
try comp.work_queue.ensureUnusedCapacity(count);
for (0..count) |i| {
comp.work_queue.writeItemAssumeCapacity(.{ .windows_import_lib = i });
@ -2450,7 +2469,7 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes
cache_helpers.addOptionalEmitLoc(&man.hash, comp.emit_llvm_ir);
cache_helpers.addOptionalEmitLoc(&man.hash, comp.emit_llvm_bc);
man.hash.addOptional(comp.bin_file.options.stack_size_override);
man.hash.add(comp.bin_file.stack_size);
man.hash.addOptional(comp.bin_file.options.image_base_override);
man.hash.addOptional(comp.bin_file.options.gc_sections);
man.hash.add(comp.bin_file.options.eh_frame_hdr);
@ -2460,7 +2479,7 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes
man.hash.addListOfBytes(comp.bin_file.options.rpath_list);
man.hash.addListOfBytes(comp.bin_file.options.symbol_wrap_set.keys());
man.hash.add(comp.bin_file.options.each_lib_rpath);
man.hash.add(comp.bin_file.options.build_id);
man.hash.add(comp.bin_file.build_id);
man.hash.add(comp.bin_file.options.skip_linker_dependencies);
man.hash.add(comp.bin_file.options.z_nodelete);
man.hash.add(comp.bin_file.options.z_notext);
@ -2488,9 +2507,9 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes
}
man.hash.addOptionalBytes(comp.bin_file.options.soname);
man.hash.addOptional(comp.bin_file.options.version);
try link.hashAddSystemLibs(man, comp.bin_file.options.system_libs);
try link.hashAddSystemLibs(man, comp.system_libs);
man.hash.addListOfBytes(comp.bin_file.options.force_undefined_symbols.keys());
man.hash.addOptional(comp.bin_file.options.allow_shlib_undefined);
man.hash.addOptional(comp.bin_file.allow_shlib_undefined);
man.hash.add(comp.bin_file.options.bind_global_refs_locally);
man.hash.add(comp.bin_file.options.tsan);
man.hash.addOptionalBytes(comp.bin_file.options.sysroot);
@ -2505,7 +2524,7 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes
man.hash.addOptional(comp.bin_file.options.global_base);
// Mach-O specific stuff
man.hash.addListOfBytes(comp.bin_file.options.framework_dirs);
man.hash.addListOfBytes(comp.framework_dirs);
try link.hashAddFrameworks(man, comp.bin_file.options.frameworks);
try man.addOptionalFile(comp.bin_file.options.entitlements);
man.hash.addOptional(comp.bin_file.options.pagezero_size);
@ -3561,7 +3580,7 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
const named_frame = tracy.namedFrame("windows_import_lib");
defer named_frame.end();
const link_lib = comp.bin_file.options.system_libs.keys()[index];
const link_lib = comp.system_libs.keys()[index];
mingw.buildImportLib(comp, link_lib) catch |err| {
// TODO Surface more error details.
comp.lockAndSetMiscFailure(
@ -4964,7 +4983,7 @@ pub fn addCCArgs(
try argv.appendSlice(&.{ "-iframework", framework_dir });
}
for (comp.bin_file.options.framework_dirs) |framework_dir| {
for (comp.framework_dirs) |framework_dir| {
try argv.appendSlice(&.{ "-F", framework_dir });
}
@ -5219,22 +5238,21 @@ pub fn addCCArgs(
},
}
if (!comp.bin_file.options.strip) {
switch (target.ofmt) {
.coff => {
// -g is required here because -gcodeview doesn't trigger debug info
// generation, it only changes the type of information generated.
try argv.appendSlice(&.{ "-g", "-gcodeview" });
},
.elf, .macho => {
try argv.append("-gdwarf-4");
if (comp.bin_file.options.dwarf_format) |f| switch (f) {
.@"32" => try argv.append("-gdwarf32"),
.@"64" => try argv.append("-gdwarf64"),
};
},
else => try argv.append("-g"),
}
try argv.ensureUnusedCapacity(2);
switch (comp.bin_file.debug_format) {
.strip => {},
.code_view => {
// -g is required here because -gcodeview doesn't trigger debug info
// generation, it only changes the type of information generated.
argv.appendSliceAssumeCapacity(&.{ "-g", "-gcodeview" });
},
.dwarf => |f| {
argv.appendAssumeCapacity("-gdwarf-4");
switch (f) {
.@"32" => argv.appendAssumeCapacity("-gdwarf32"),
.@"64" => argv.appendAssumeCapacity("-gdwarf64"),
}
},
}
if (target_util.llvmMachineAbi(target)) |mabi| {
@ -6306,7 +6324,7 @@ pub fn addLinkLib(comp: *Compilation, lib_name: []const u8) !void {
// This happens when an `extern "foo"` function is referenced.
// If we haven't seen this library yet and we're targeting Windows, we need
// to queue up a work item to produce the DLL import library for this.
const gop = try comp.bin_file.options.system_libs.getOrPut(comp.gpa, lib_name);
const gop = try comp.system_libs.getOrPut(comp.gpa, lib_name);
if (!gop.found_existing and comp.getTarget().os.tag == .windows) {
gop.value_ptr.* = .{
.needed = true,
@ -6314,7 +6332,7 @@ pub fn addLinkLib(comp: *Compilation, lib_name: []const u8) !void {
.path = null,
};
try comp.work_queue.writeItem(.{
.windows_import_lib = comp.bin_file.options.system_libs.count() - 1,
.windows_import_lib = comp.system_libs.count() - 1,
});
}
}

View File

@ -137,7 +137,11 @@ pub fn resolve(options: Options) !Config {
const use_llvm = b: {
// If emitting to LLVM bitcode object format, must use LLVM backend.
if (options.emit_llvm_ir or options.emit_llvm_bc) {
if (options.use_llvm == false) return error.EmittingLlvmModuleRequiresLlvmBackend;
if (options.use_llvm == false)
return error.EmittingLlvmModuleRequiresLlvmBackend;
if (!target_util.hasLlvmSupport(target, target.ofmt))
return error.LlvmLacksTargetSupport;
break :b true;
}
@ -147,6 +151,12 @@ pub fn resolve(options: Options) !Config {
break :b false;
}
// If Zig does not support the target, then we can't use it.
if (target_util.zigBackend(target, false) == .other) {
if (options.use_llvm == false) return error.ZigLacksTargetSupport;
break :b true;
}
if (options.use_llvm) |x| break :b x;
// If we have no zig code to compile, no need for LLVM.
@ -166,16 +176,23 @@ pub fn resolve(options: Options) !Config {
break :b !target_util.selfHostedBackendIsAsRobustAsLlvm(target);
};
if (!use_lib_llvm and use_llvm and options.emit_bin) {
// Explicit request to use LLVM to produce an object file, but without
// using LLVM libraries. Impossible.
return error.EmittingBinaryRequiresLlvmLibrary;
if (options.emit_bin) {
if (!use_lib_llvm and use_llvm) {
// Explicit request to use LLVM to produce an object file, but without
// using LLVM libraries. Impossible.
return error.EmittingBinaryRequiresLlvmLibrary;
}
if (target_util.zigBackend(target, use_llvm) == .other) {
// There is no compiler backend available for this target.
return error.ZigLacksTargetSupport;
}
}
// Make a decision on whether to use LLD or our own linker.
const use_lld = b: {
if (target.isDarwin()) {
if (options.use_lld == true) return error.LldIncompatibleOs;
if (!target_util.hasLldSupport(target.ofmt)) {
if (options.use_lld == true) return error.LldIncompatibleObjectFormat;
break :b false;
}
@ -184,11 +201,6 @@ pub fn resolve(options: Options) !Config {
break :b false;
}
if (target.ofmt == .c) {
if (options.use_lld == true) return error.LldIncompatibleObjectFormat;
break :b false;
}
if (options.lto == true) {
if (options.use_lld == false) return error.LtoRequiresLld;
break :b true;

View File

@ -854,15 +854,21 @@ pub const Object = struct {
pub const DITypeMap = std.AutoArrayHashMapUnmanaged(InternPool.Index, AnnotatedDITypePtr);
pub fn create(arena: Allocator, options: link.File.OpenOptions) !*Object {
const gpa = options.comp.gpa;
const llvm_target_triple = try targetTriple(arena, options.target);
if (build_options.only_c) unreachable;
const comp = options.comp;
const gpa = comp.gpa;
const target = comp.root_mod.resolved_target.result;
const llvm_target_triple = try targetTriple(arena, target);
const strip = comp.root_mod.strip;
const optimize_mode = comp.root_mod.optimize_mode;
const pic = comp.root_mod.pic;
var builder = try Builder.init(.{
.allocator = gpa,
.use_lib_llvm = options.use_lib_llvm,
.strip = options.strip or !options.use_lib_llvm, // TODO
.name = options.root_name,
.target = options.target,
.use_lib_llvm = comp.config.use_lib_llvm,
.strip = strip or !comp.config.use_lib_llvm, // TODO
.name = comp.root_name,
.target = target,
.triple = llvm_target_triple,
});
errdefer builder.deinit();
@ -870,10 +876,18 @@ pub const Object = struct {
var target_machine: if (build_options.have_llvm) *llvm.TargetMachine else void = undefined;
var target_data: if (build_options.have_llvm) *llvm.TargetData else void = undefined;
if (builder.useLibLlvm()) {
if (!options.strip) {
switch (options.target.ofmt) {
.coff => builder.llvm.module.?.addModuleCodeViewFlag(),
else => builder.llvm.module.?.addModuleDebugInfoFlag(options.dwarf_format == std.dwarf.Format.@"64"),
debug_info: {
const debug_format = options.debug_format orelse b: {
if (strip) break :b .strip;
break :b switch (target.ofmt) {
.coff => .code_view,
else => .{ .dwarf = .@"32" },
};
};
switch (debug_format) {
.strip => break :debug_info,
.code_view => builder.llvm.module.?.addModuleCodeViewFlag(),
.dwarf => |f| builder.llvm.module.?.addModuleDebugInfoFlag(f == .@"64"),
}
builder.llvm.di_builder = builder.llvm.module.?.createDIBuilder(true);
@ -892,8 +906,8 @@ pub const Object = struct {
// TODO: the only concern I have with this is WASI as either host or target, should
// we leave the paths as relative then?
const compile_unit_dir_z = blk: {
if (options.module) |mod| m: {
const d = try mod.root_mod.root.joinStringZ(arena, "");
if (comp.module) |zcu| m: {
const d = try zcu.root_mod.root.joinStringZ(arena, "");
if (d.len == 0) break :m;
if (std.fs.path.isAbsolute(d)) break :blk d;
break :blk std.fs.realpathAlloc(arena, d) catch d;
@ -903,9 +917,9 @@ pub const Object = struct {
builder.llvm.di_compile_unit = builder.llvm.di_builder.?.createCompileUnit(
DW.LANG.C99,
builder.llvm.di_builder.?.createFile(options.root_name, compile_unit_dir_z),
builder.llvm.di_builder.?.createFile(comp.root_name, compile_unit_dir_z),
producer.slice(&builder).?,
options.optimize_mode != .Debug,
optimize_mode != .Debug,
"", // flags
0, // runtime version
"", // split name
@ -914,19 +928,19 @@ pub const Object = struct {
);
}
const opt_level: llvm.CodeGenOptLevel = if (options.optimize_mode == .Debug)
const opt_level: llvm.CodeGenOptLevel = if (optimize_mode == .Debug)
.None
else
.Aggressive;
const reloc_mode: llvm.RelocMode = if (options.pic)
const reloc_mode: llvm.RelocMode = if (pic)
.PIC
else if (options.link_mode == .Dynamic)
else if (comp.config.link_mode == .Dynamic)
llvm.RelocMode.DynamicNoPIC
else
.Static;
const code_model: llvm.CodeModel = switch (options.machine_code_model) {
const code_model: llvm.CodeModel = switch (comp.root_mod.code_model) {
.default => .Default,
.tiny => .Tiny,
.small => .Small,
@ -941,15 +955,15 @@ pub const Object = struct {
target_machine = llvm.TargetMachine.create(
builder.llvm.target.?,
builder.target_triple.slice(&builder).?,
if (options.target.cpu.model.llvm_name) |s| s.ptr else null,
options.llvm_cpu_features,
if (target.cpu.model.llvm_name) |s| s.ptr else null,
comp.root_mod.resolved_target.llvm_cpu_features.?,
opt_level,
reloc_mode,
code_model,
options.function_sections,
options.data_sections,
options.function_sections orelse false,
options.data_sections orelse false,
float_abi,
if (target_util.llvmMachineAbi(options.target)) |s| s.ptr else null,
if (target_util.llvmMachineAbi(target)) |s| s.ptr else null,
);
errdefer target_machine.dispose();
@ -958,15 +972,15 @@ pub const Object = struct {
builder.llvm.module.?.setModuleDataLayout(target_data);
if (options.pic) builder.llvm.module.?.setModulePICLevel();
if (options.pie) builder.llvm.module.?.setModulePIELevel();
if (pic) builder.llvm.module.?.setModulePICLevel();
if (comp.config.pie) builder.llvm.module.?.setModulePIELevel();
if (code_model != .Default) builder.llvm.module.?.setModuleCodeModel(code_model);
if (options.opt_bisect_limit >= 0) {
builder.llvm.context.setOptBisectLimit(std.math.lossyCast(c_int, options.opt_bisect_limit));
if (comp.llvm_opt_bisect_limit >= 0) {
builder.llvm.context.setOptBisectLimit(comp.llvm_opt_bisect_limit);
}
builder.data_layout = try builder.fmt("{}", .{DataLayoutBuilder{ .target = options.target }});
builder.data_layout = try builder.fmt("{}", .{DataLayoutBuilder{ .target = target }});
if (std.debug.runtime_safety) {
const rep = target_data.stringRep();
defer llvm.disposeMessage(rep);
@ -981,13 +995,13 @@ pub const Object = struct {
obj.* = .{
.gpa = gpa,
.builder = builder,
.module = options.module.?,
.module = comp.module.?,
.di_map = .{},
.di_builder = if (builder.useLibLlvm()) builder.llvm.di_builder else null, // TODO
.di_compile_unit = if (builder.useLibLlvm()) builder.llvm.di_compile_unit else null,
.target_machine = target_machine,
.target_data = target_data,
.target = options.target,
.target = target,
.decl_map = .{},
.anon_decl_map = .{},
.named_enum_map = .{},

View File

@ -32,13 +32,6 @@ pub const SystemLib = struct {
path: ?[]const u8,
};
/// When adding a new field, remember to update `hashAddFrameworks`.
pub const Framework = struct {
needed: bool = false,
weak: bool = false,
path: []const u8,
};
pub const SortSection = enum { name, alignment };
pub const CacheMode = enum { incremental, whole };
@ -56,14 +49,6 @@ pub fn hashAddSystemLibs(
}
}
pub fn hashAddFrameworks(man: *Cache.Manifest, hm: []const Framework) !void {
for (hm) |value| {
man.hash.add(value.needed);
man.hash.add(value.weak);
_ = try man.addFile(value.path, null);
}
}
pub const producer_string = if (builtin.is_test) "zig test" else "zig " ++ build_options.version;
pub const HashStyle = enum { sysv, gnu, both };
@ -81,6 +66,19 @@ pub const File = struct {
/// When linking with LLD, this linker code will output an object file only at
/// this location, and then this path can be placed on the LLD linker line.
intermediary_basename: ?[]const u8 = null,
disable_lld_caching: bool,
gc_sections: bool,
build_id: std.zig.BuildId,
rpath_list: []const []const u8,
/// List of symbols forced as undefined in the symbol table
/// thus forcing their resolution by the linker.
/// Corresponds to `-u <symbol>` for ELF/MachO and `/include:<symbol>` for COFF/PE.
force_undefined_symbols: std.StringArrayHashMapUnmanaged(void),
allow_shlib_undefined: bool,
stack_size: u64,
debug_format: DebugFormat,
function_sections: bool,
data_sections: bool,
/// Prevents other processes from clobbering files in the output directory
/// of this linking operation.
@ -88,6 +86,12 @@ pub const File = struct {
child_pid: ?std.ChildProcess.Id = null,
pub const DebugFormat = union(enum) {
strip,
dwarf: std.dwarf.Format,
code_view,
};
pub const OpenOptions = struct {
comp: *Compilation,
emit: Compilation.Emit,
@ -97,7 +101,7 @@ pub const File = struct {
/// Virtual address of the entry point procedure relative to image base.
entry_addr: ?u64,
stack_size_override: ?u64,
stack_size: ?u64,
image_base_override: ?u64,
function_sections: bool,
data_sections: bool,
@ -128,7 +132,6 @@ pub const File = struct {
max_memory: ?u64,
export_symbol_names: []const []const u8,
global_base: ?u64,
verbose_link: bool,
dll_export_fns: bool,
skip_linker_dependencies: bool,
parent_compilation_link_libc: bool,
@ -139,7 +142,7 @@ pub const File = struct {
sort_section: ?SortSection,
major_subsystem_version: ?u32,
minor_subsystem_version: ?u32,
gc_sections: ?bool = null,
gc_sections: ?bool,
allow_shlib_undefined: ?bool,
subsystem: ?std.Target.SubSystem,
version_script: ?[]const u8,
@ -147,11 +150,7 @@ pub const File = struct {
print_gc_sections: bool,
print_icf_sections: bool,
print_map: bool,
opt_bisect_limit: i32,
/// List of symbols forced as undefined in the symbol table
/// thus forcing their resolution by the linker.
/// Corresponds to `-u <symbol>` for ELF/MachO and `/include:<symbol>` for COFF/PE.
force_undefined_symbols: std.StringArrayHashMapUnmanaged(void),
/// Use a wrapper function for symbol. Any undefined reference to symbol
/// will be resolved to __wrap_symbol. Any undefined reference to
@ -163,7 +162,7 @@ pub const File = struct {
compatibility_version: ?std.SemanticVersion,
dwarf_format: ?std.dwarf.Format,
debug_format: ?DebugFormat,
// TODO: remove this. libraries are resolved by the frontend.
lib_dirs: []const []const u8,
@ -184,8 +183,7 @@ pub const File = struct {
headerpad_max_install_names: bool,
/// (Darwin) remove dylibs that are unreachable by the entry point or exported symbols
dead_strip_dylibs: bool,
framework_dirs: []const []const u8,
frameworks: []const Framework,
frameworks: []const MachO.Framework,
darwin_sdk_layout: ?MachO.SdkLayout,
/// (Windows) PDB source path prefix to instruct the linker how to resolve relative
@ -228,7 +226,7 @@ pub const File = struct {
.coff, .elf, .macho, .plan9, .wasm => {
if (build_options.only_c) unreachable;
if (base.file != null) return;
const emit = base.options.emit orelse return;
const emit = base.emit;
if (base.child_pid) |pid| {
if (builtin.os.tag == .windows) {
base.cast(Coff).?.ptraceAttach(pid) catch |err| {
@ -256,10 +254,13 @@ pub const File = struct {
}
}
}
const use_lld = build_options.have_llvm and base.comp.config.use_lld;
const output_mode = base.comp.config.output_mode;
const link_mode = base.comp.config.link_mode;
base.file = try emit.directory.handle.createFile(emit.sub_path, .{
.truncate = false,
.read = true,
.mode = determineMode(base.options),
.mode = determineMode(use_lld, output_mode, link_mode),
});
},
.c, .spirv, .nvptx => {},
@ -267,9 +268,13 @@ pub const File = struct {
}
pub fn makeExecutable(base: *File) !void {
switch (base.options.output_mode) {
const output_mode = base.comp.config.output_mode;
const link_mode = base.comp.config.link_mode;
const use_lld = build_options.have_llvm and base.comp.config.use_lld;
switch (output_mode) {
.Obj => return,
.Lib => switch (base.options.link_mode) {
.Lib => switch (link_mode) {
.Static => return,
.Dynamic => {},
},
@ -278,7 +283,6 @@ pub const File = struct {
switch (base.tag) {
.elf => if (base.file) |f| {
if (build_options.only_c) unreachable;
const use_lld = build_options.have_llvm and base.options.use_lld;
if (base.intermediary_basename != null and use_lld) {
// The file we have open is not the final file that we want to
// make executable, so we don't have to close it.
@ -596,7 +600,7 @@ pub const File = struct {
return @fieldParentPtr(C, "base", base).flush(comp, prog_node);
}
if (comp.clang_preprocessor_mode == .yes) {
const emit = base.options.emit orelse return; // -fno-emit-bin
const emit = base.emit;
// TODO: avoid extra link step when it's just 1 object file (the `zig cc -c` case)
// Until then, we do `lld -r -o output.o input.o` even though the output is the same
// as the input. For the preprocessing case (`zig cc -E -o foo`) we copy the file
@ -610,8 +614,10 @@ pub const File = struct {
return;
}
const use_lld = build_options.have_llvm and base.options.use_lld;
if (use_lld and base.options.output_mode == .Lib and base.options.link_mode == .Static) {
const use_lld = build_options.have_llvm and base.comp.config.use_lld;
const output_mode = base.comp.config.output_mode;
const link_mode = base.comp.config.link_mode;
if (use_lld and output_mode == .Lib and link_mode == .Static) {
return base.linkAsArchive(comp, prog_node);
}
switch (base.tag) {
@ -845,8 +851,6 @@ pub const File = struct {
}
pub fn linkAsArchive(base: *File, comp: *Compilation, prog_node: *std.Progress.Node) FlushError!void {
const emit = base.options.emit orelse return;
const tracy = trace(@src());
defer tracy.end();
@ -854,22 +858,23 @@ pub const File = struct {
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const directory = emit.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{emit.sub_path});
const directory = base.emit.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{base.emit.sub_path});
const full_out_path_z = try arena.dupeZ(u8, full_out_path);
const opt_zcu = base.comp.module;
// If there is no Zig code to compile, then we should skip flushing the output file
// because it will not be part of the linker line anyway.
const module_obj_path: ?[]const u8 = if (base.options.module != null) blk: {
const zcu_obj_path: ?[]const u8 = if (opt_zcu != null) blk: {
try base.flushModule(comp, prog_node);
const dirname = fs.path.dirname(full_out_path_z) orelse ".";
break :blk try fs.path.join(arena, &.{ dirname, base.intermediary_basename.? });
} else null;
log.debug("module_obj_path={s}", .{if (module_obj_path) |s| s else "(null)"});
log.debug("zcu_obj_path={s}", .{if (zcu_obj_path) |s| s else "(null)"});
const compiler_rt_path: ?[]const u8 = if (base.options.include_compiler_rt)
const compiler_rt_path: ?[]const u8 = if (base.comp.include_compiler_rt)
comp.compiler_rt_obj.?.full_object_path
else
null;
@ -881,17 +886,19 @@ pub const File = struct {
const id_symlink_basename = "llvm-ar.id";
var man: Cache.Manifest = undefined;
defer if (!base.options.disable_lld_caching) man.deinit();
defer if (!base.disable_lld_caching) man.deinit();
const objects = base.comp.objects;
var digest: [Cache.hex_digest_len]u8 = undefined;
if (!base.options.disable_lld_caching) {
if (!base.disable_lld_caching) {
man = comp.cache_parent.obtain();
// We are about to obtain this lock, so here we give other processes a chance first.
base.releaseLock();
for (base.options.objects) |obj| {
for (objects) |obj| {
_ = try man.addFile(obj.path, null);
man.hash.add(obj.must_link);
man.hash.add(obj.loption);
@ -904,7 +911,7 @@ pub const File = struct {
_ = try man.addFile(key.status.success.res_path, null);
}
}
try man.addOptionalFile(module_obj_path);
try man.addOptionalFile(zcu_obj_path);
try man.addOptionalFile(compiler_rt_path);
// We don't actually care whether it's a cache hit or miss; we just need the digest and the lock.
@ -934,11 +941,11 @@ pub const File = struct {
}
const win32_resource_table_len = if (build_options.only_core_functionality) 0 else comp.win32_resource_table.count();
const num_object_files = base.options.objects.len + comp.c_object_table.count() + win32_resource_table_len + 2;
const num_object_files = objects.len + comp.c_object_table.count() + win32_resource_table_len + 2;
var object_files = try std.ArrayList([*:0]const u8).initCapacity(base.allocator, num_object_files);
defer object_files.deinit();
for (base.options.objects) |obj| {
for (objects) |obj| {
object_files.appendAssumeCapacity(try arena.dupeZ(u8, obj.path));
}
for (comp.c_object_table.keys()) |key| {
@ -949,14 +956,14 @@ pub const File = struct {
object_files.appendAssumeCapacity(try arena.dupeZ(u8, key.status.success.res_path));
}
}
if (module_obj_path) |p| {
if (zcu_obj_path) |p| {
object_files.appendAssumeCapacity(try arena.dupeZ(u8, p));
}
if (compiler_rt_path) |p| {
object_files.appendAssumeCapacity(try arena.dupeZ(u8, p));
}
if (base.options.verbose_link) {
if (comp.verbose_link) {
std.debug.print("ar rcs {s}", .{full_out_path_z});
for (object_files.items) |arg| {
std.debug.print(" {s}", .{arg});
@ -972,7 +979,7 @@ pub const File = struct {
const bad = llvm_bindings.WriteArchive(full_out_path_z, object_files.items.ptr, object_files.items.len, os_tag);
if (bad) return error.UnableToWriteArchive;
if (!base.options.disable_lld_caching) {
if (!base.disable_lld_caching) {
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
log.warn("failed to save archive hash digest file: {s}", .{@errorName(err)});
};
@ -1090,6 +1097,34 @@ pub const File = struct {
}
}
pub fn isStatic(self: File) bool {
return self.base.options.link_mode == .Static;
}
pub fn isObject(self: File) bool {
const output_mode = self.comp.config.output_mode;
return output_mode == .Obj;
}
pub fn isExe(self: File) bool {
const output_mode = self.comp.config.output_mode;
return output_mode == .Exe;
}
pub fn isStaticLib(self: File) bool {
const output_mode = self.comp.config.output_mode;
return output_mode == .Lib and self.isStatic();
}
pub fn isRelocatable(self: File) bool {
return self.isObject() or self.isStaticLib();
}
pub fn isDynLib(self: File) bool {
const output_mode = self.comp.config.output_mode;
return output_mode == .Lib and !self.isStatic();
}
pub const C = @import("link/C.zig");
pub const Coff = @import("link/Coff.zig");
pub const Plan9 = @import("link/Plan9.zig");

View File

@ -5,6 +5,7 @@ const Allocator = std.mem.Allocator;
const fs = std.fs;
const C = @This();
const build_options = @import("build_options");
const Module = @import("../Module.zig");
const InternPool = @import("../InternPool.zig");
const Alignment = InternPool.Alignment;
@ -91,28 +92,40 @@ pub fn addString(this: *C, s: []const u8) Allocator.Error!String {
};
}
pub fn openPath(gpa: Allocator, sub_path: []const u8, options: link.Options) !*C {
pub fn open(arena: Allocator, options: link.File.OpenOptions) !*C {
assert(options.target.ofmt == .c);
const optimize_mode = options.comp.root_mod.optimize_mode;
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
const use_llvm = options.comp.config.use_llvm;
if (options.use_llvm) return error.LLVMHasNoCBackend;
if (options.use_lld) return error.LLDHasNoCBackend;
// These are caught by `Compilation.Config.resolve`.
assert(!use_lld);
assert(!use_llvm);
const file = try options.emit.?.directory.handle.createFile(sub_path, .{
const emit = options.emit;
const file = try emit.directory.handle.createFile(emit.sub_path, .{
// Truncation is done on `flush`.
.truncate = false,
.mode = link.determineMode(options),
});
errdefer file.close();
const c_file = try gpa.create(C);
errdefer gpa.destroy(c_file);
const c_file = try arena.create(C);
c_file.* = .{
.base = .{
.tag = .c,
.options = options,
.comp = options.comp,
.emit = emit,
.gc_sections = options.gc_sections orelse optimize_mode != .Debug,
.stack_size = options.stack_size orelse 16777216,
.allow_shlib_undefined = options.allow_shlib_undefined orelse false,
.file = file,
.allocator = gpa,
.disable_lld_caching = options.disable_lld_caching,
.build_id = options.build_id,
.rpath_list = options.rpath_list,
.force_undefined_symbols = options.force_undefined_symbols,
},
};
@ -120,7 +133,7 @@ pub fn openPath(gpa: Allocator, sub_path: []const u8, options: link.Options) !*C
}
pub fn deinit(self: *C) void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
for (self.decl_table.values()) |*db| {
db.deinit(gpa);
@ -141,7 +154,7 @@ pub fn deinit(self: *C) void {
}
pub fn freeDecl(self: *C, decl_index: InternPool.DeclIndex) void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
if (self.decl_table.fetchSwapRemove(decl_index)) |kv| {
var decl_block = kv.value;
decl_block.deinit(gpa);
@ -155,7 +168,7 @@ pub fn updateFunc(
air: Air,
liveness: Liveness,
) !void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const func = module.funcInfo(func_index);
const decl_index = func.owner_decl;
@ -223,7 +236,7 @@ pub fn updateFunc(
}
fn updateAnonDecl(self: *C, module: *Module, i: usize) !void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const anon_decl = self.anon_decls.keys()[i];
const fwd_decl = &self.fwd_decl_buf;
@ -285,7 +298,7 @@ pub fn updateDecl(self: *C, module: *Module, decl_index: InternPool.DeclIndex) !
const tracy = trace(@src());
defer tracy.end();
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const gop = try self.decl_table.getOrPut(gpa, decl_index);
if (!gop.found_existing) {
@ -352,7 +365,8 @@ pub fn flush(self: *C, comp: *Compilation, prog_node: *std.Progress.Node) !void
}
fn abiDefines(self: *C, target: std.Target) !std.ArrayList(u8) {
var defines = std.ArrayList(u8).init(self.base.allocator);
const gpa = self.base.comp.gpa;
var defines = std.ArrayList(u8).init(gpa);
errdefer defines.deinit();
const writer = defines.writer();
switch (target.abi) {
@ -371,7 +385,7 @@ pub fn flushModule(self: *C, _: *Compilation, prog_node: *std.Progress.Node) !vo
sub_prog_node.activate();
defer sub_prog_node.end();
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const module = self.base.options.module.?;
{
@ -520,7 +534,7 @@ fn flushCTypes(
pass: codegen.DeclGen.Pass,
decl_ctypes: codegen.CType.Store,
) FlushDeclError!void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const mod = self.base.options.module.?;
const decl_ctypes_len = decl_ctypes.count();
@ -601,7 +615,7 @@ fn flushCTypes(
}
fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const fwd_decl = &self.lazy_fwd_decl_buf;
const code = &self.lazy_code_buf;
@ -643,7 +657,7 @@ fn flushLazyFn(
ctypes: *codegen.CType.Store,
lazy_fn: codegen.LazyFnMap.Entry,
) FlushDeclError!void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const fwd_decl = &self.lazy_fwd_decl_buf;
const code = &self.lazy_code_buf;
@ -683,7 +697,7 @@ fn flushLazyFn(
}
fn flushLazyFns(self: *C, f: *Flush, lazy_fns: codegen.LazyFnMap) FlushDeclError!void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
try f.lazy_fns.ensureUnusedCapacity(gpa, @intCast(lazy_fns.count()));
var it = lazy_fns.iterator();
@ -702,7 +716,7 @@ fn flushDeclBlock(
export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void),
extern_symbol_name: InternPool.OptionalNullTerminatedString,
) FlushDeclError!void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
try self.flushLazyFns(f, decl_block.lazy_fns);
try f.all_buffers.ensureUnusedCapacity(gpa, 1);
fwd_decl: {

View File

@ -232,7 +232,7 @@ pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Coff {
errdefer self.base.destroy();
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
const use_llvm = build_options.have_llvm and options.comp.config.use_llvm;
const use_llvm = options.comp.config.use_llvm;
if (use_lld and use_llvm) {
// LLVM emits the object file; LLD links it into the final product.
@ -353,6 +353,7 @@ pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Coff {
pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*Coff {
const target = options.comp.root_mod.resolved_target.result;
const optimize_mode = options.comp.root_mod.optimize_mode;
const ptr_width: PtrWidth = switch (target.ptrBitWidth()) {
0...32 => .p32,
33...64 => .p64,
@ -367,14 +368,24 @@ pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*Coff {
.tag = .coff,
.comp = options.comp,
.emit = options.emit,
.stack_size = options.stack_size orelse 16777216,
.gc_sections = options.gc_sections orelse (optimize_mode != .Debug),
.allow_shlib_undefined = options.allow_shlib_undefined orelse false,
.file = null,
.disable_lld_caching = options.disable_lld_caching,
.build_id = options.build_id,
.rpath_list = options.rpath_list,
.force_undefined_symbols = options.force_undefined_symbols,
.debug_format = options.debug_format orelse .code_view,
.function_sections = options.function_sections,
.data_sections = options.data_sections,
},
.ptr_width = ptr_width,
.page_size = page_size,
.data_directories = comptime mem.zeroes([coff.IMAGE_NUMBEROF_DIRECTORY_ENTRIES]coff.ImageDataDirectory),
};
const use_llvm = build_options.have_llvm and options.comp.config.use_llvm;
const use_llvm = options.comp.config.use_llvm;
if (use_llvm and options.comp.config.have_zcu) {
self.llvm_object = try LlvmObject.create(arena, options);
}
@ -1494,8 +1505,6 @@ pub fn updateExports(
if (self.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, exports);
if (self.base.options.emit == null) return;
const gpa = self.base.comp.gpa;
const metadata = switch (exported) {
@ -1645,13 +1654,7 @@ fn resolveGlobalSymbol(self: *Coff, current: SymbolWithLoc) !void {
}
pub fn flush(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Node) link.File.FlushError!void {
if (self.base.options.emit == null) {
if (self.llvm_object) |llvm_object| {
return try llvm_object.flushModule(comp, prog_node);
}
return;
}
const use_lld = build_options.have_llvm and self.base.options.use_lld;
const use_lld = build_options.have_llvm and self.base.comp.config.use_lld;
if (use_lld) {
return lld.linkWithLLD(self, comp, prog_node);
}

View File

@ -25,8 +25,8 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
const directory = self.base.emit.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.emit.sub_path});
// If there is no Zig code to compile, then we should skip flushing the output file because it
// will not be part of the linker line anyway.
@ -50,6 +50,7 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
const is_exe_or_dyn_lib = is_dyn_lib or self.base.options.output_mode == .Exe;
const link_in_crt = self.base.options.link_libc and is_exe_or_dyn_lib;
const target = self.base.options.target;
const optimize_mode = self.base.comp.root_mod.optimize_mode;
// See link/Elf.zig for comments on how this mechanism works.
const id_symlink_basename = "lld.id";
@ -79,7 +80,7 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
}
try man.addOptionalFile(module_obj_path);
man.hash.addOptionalBytes(self.base.options.entry);
man.hash.addOptional(self.base.options.stack_size_override);
man.hash.add(self.base.stack_size);
man.hash.addOptional(self.base.options.image_base_override);
man.hash.addListOfBytes(self.base.options.lib_dirs);
man.hash.add(self.base.options.skip_linker_dependencies);
@ -93,14 +94,14 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
}
}
}
try link.hashAddSystemLibs(&man, self.base.options.system_libs);
try link.hashAddSystemLibs(&man, self.base.comp.system_libs);
man.hash.addListOfBytes(self.base.options.force_undefined_symbols.keys());
man.hash.addOptional(self.base.options.subsystem);
man.hash.add(self.base.options.is_test);
man.hash.add(self.base.options.tsaware);
man.hash.add(self.base.options.nxcompat);
man.hash.add(self.base.options.dynamicbase);
man.hash.addOptional(self.base.options.allow_shlib_undefined);
man.hash.addOptional(self.base.allow_shlib_undefined);
// strip does not need to go into the linker hash because it is part of the hash namespace
man.hash.addOptional(self.base.options.major_subsystem_version);
man.hash.addOptional(self.base.options.minor_subsystem_version);
@ -185,15 +186,14 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
try argv.append(try allocPrint(arena, "-VERSION:{}.{}", .{ version.major, version.minor }));
}
if (self.base.options.lto) {
switch (self.base.options.optimize_mode) {
switch (optimize_mode) {
.Debug => {},
.ReleaseSmall => try argv.append("-OPT:lldlto=2"),
.ReleaseFast, .ReleaseSafe => try argv.append("-OPT:lldlto=3"),
}
}
if (self.base.options.output_mode == .Exe) {
const stack_size = self.base.options.stack_size_override orelse 16777216;
try argv.append(try allocPrint(arena, "-STACK:{d}", .{stack_size}));
try argv.append(try allocPrint(arena, "-STACK:{d}", .{self.base.stack_size}));
}
if (self.base.options.image_base_override) |image_base| {
try argv.append(try std.fmt.allocPrint(arena, "-BASE:{d}", .{image_base}));
@ -232,10 +232,8 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
if (!self.base.options.dynamicbase) {
try argv.append("-dynamicbase:NO");
}
if (self.base.options.allow_shlib_undefined) |allow_shlib_undefined| {
if (allow_shlib_undefined) {
try argv.append("-FORCE:UNRESOLVED");
}
if (self.base.allow_shlib_undefined) {
try argv.append("-FORCE:UNRESOLVED");
}
try argv.append(try allocPrint(arena, "-OUT:{s}", .{full_out_path}));
@ -419,7 +417,7 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
try argv.append(try comp.get_libc_crt_file(arena, "uuid.lib"));
for (mingw.always_link_libs) |name| {
if (!self.base.options.system_libs.contains(name)) {
if (!self.base.comp.system_libs.contains(name)) {
const lib_basename = try allocPrint(arena, "{s}.lib", .{name});
try argv.append(try comp.get_libc_crt_file(arena, lib_basename));
}
@ -429,7 +427,7 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
.Dynamic => "",
.Static => "lib",
};
const d_str = switch (self.base.options.optimize_mode) {
const d_str = switch (optimize_mode) {
.Debug => "d",
else => "",
};
@ -489,8 +487,8 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
if (comp.compiler_rt_lib) |lib| try argv.append(lib.full_object_path);
}
try argv.ensureUnusedCapacity(self.base.options.system_libs.count());
for (self.base.options.system_libs.keys()) |key| {
try argv.ensureUnusedCapacity(self.base.comp.system_libs.count());
for (self.base.comp.system_libs.keys()) |key| {
const lib_basename = try allocPrint(arena, "{s}.lib", .{key});
if (comp.crt_files.get(lib_basename)) |crt_file| {
argv.appendAssumeCapacity(crt_file.full_object_path);
@ -516,7 +514,7 @@ pub fn linkWithLLD(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
return error.DllImportLibraryNotFound;
}
if (self.base.options.verbose_link) {
if (self.base.comp.verbose_link) {
// Skip over our own name so that the LLD linker name is the first argv item.
Compilation.dump_argv(argv.items[1..]);
}

View File

@ -206,7 +206,10 @@ pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Elf {
assert(target.ofmt == .elf);
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
const use_llvm = build_options.have_llvm and options.comp.config.use_llvm;
const use_llvm = options.comp.config.use_llvm;
const opt_zcu = options.comp.module;
const output_mode = options.comp.config.output_mode;
const link_mode = options.comp.config.link_mode;
const self = try createEmpty(arena, options);
errdefer self.base.destroy();
@ -216,8 +219,8 @@ pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Elf {
return self;
}
const is_obj = options.output_mode == .Obj;
const is_obj_or_ar = is_obj or (options.output_mode == .Lib and options.link_mode == .Static);
const is_obj = output_mode == .Obj;
const is_obj_or_ar = is_obj or (output_mode == .Lib and link_mode == .Static);
const sub_path = if (!use_lld) options.emit.sub_path else p: {
// Open a temporary object file, not the final output file because we
@ -229,10 +232,10 @@ pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Elf {
break :p o_file_path;
};
self.base.file = try options.emit.?.directory.handle.createFile(sub_path, .{
self.base.file = try options.emit.directory.handle.createFile(sub_path, .{
.truncate = false,
.read = true,
.mode = link.determineMode(options),
.mode = link.File.determineMode(use_lld, output_mode, link_mode),
});
const gpa = options.comp.gpa;
@ -292,24 +295,34 @@ pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Elf {
});
}
if (options.module != null and !options.use_llvm) {
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
self.files.set(index, .{ .zig_object = .{
.index = index,
.path = try std.fmt.allocPrint(arena, "{s}.o", .{std.fs.path.stem(
options.module.?.main_mod.root_src_path,
)}),
} });
self.zig_object_index = index;
try self.zigObjectPtr().?.init(self);
try self.initMetadata();
if (opt_zcu) |zcu| {
if (!use_llvm) {
const index: File.Index = @intCast(try self.files.addOne(gpa));
self.files.set(index, .{ .zig_object = .{
.index = index,
.path = try std.fmt.allocPrint(arena, "{s}.o", .{std.fs.path.stem(
zcu.main_mod.root_src_path,
)}),
} });
self.zig_object_index = index;
try self.zigObjectPtr().?.init(self);
try self.initMetadata(.{
.symbol_count_hint = options.symbol_count_hint,
.program_code_size_hint = options.program_code_size_hint,
});
}
}
return self;
}
pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*Elf {
const use_llvm = options.comp.config.use_llvm;
const optimize_mode = options.comp.root_mod.optimize_mode;
const target = options.comp.root_mod.resolved_target.result;
const output_mode = options.comp.config.output_mode;
const link_mode = options.comp.config.link_mode;
const is_native_os = options.comp.root_mod.resolved_target.is_native_os;
const ptr_width: PtrWidth = switch (target.ptrBitWidth()) {
0...32 => .p32,
33...64 => .p64,
@ -322,7 +335,7 @@ pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*Elf {
.sparc64 => 0x2000,
else => 0x1000,
};
const is_dyn_lib = options.output_mode == .Lib and options.link_mode == .Dynamic;
const is_dyn_lib = output_mode == .Lib and link_mode == .Dynamic;
const default_sym_version: elf.Elf64_Versym = if (is_dyn_lib or options.rdynamic)
elf.VER_NDX_GLOBAL
else
@ -333,13 +346,23 @@ pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*Elf {
.tag = .elf,
.comp = options.comp,
.emit = options.emit,
.gc_sections = options.gc_sections orelse (optimize_mode != .Debug and output_mode != .Obj),
.stack_size = options.stack_size orelse 16777216,
.allow_shlib_undefined = options.allow_shlib_undefined orelse !is_native_os,
.file = null,
.disable_lld_caching = options.disable_lld_caching,
.build_id = options.build_id,
.rpath_list = options.rpath_list,
.force_undefined_symbols = options.force_undefined_symbols,
.debug_format = options.debug_format orelse .{ .dwarf = .@"32" },
.function_sections = options.function_sections,
.data_sections = options.data_sections,
},
.ptr_width = ptr_width,
.page_size = page_size,
.default_sym_version = default_sym_version,
};
if (options.use_llvm and options.comp.config.have_zcu) {
if (use_llvm and options.comp.config.have_zcu) {
self.llvm_object = try LlvmObject.create(arena, options);
}
@ -504,8 +527,13 @@ fn findFreeSpace(self: *Elf, object_size: u64, min_alignment: u64) u64 {
return start;
}
pub const InitMetadataOptions = struct {
symbol_count_hint: u64,
program_code_size_hint: u64,
};
/// TODO move to ZigObject
pub fn initMetadata(self: *Elf) !void {
pub fn initMetadata(self: *Elf, options: InitMetadataOptions) !void {
const gpa = self.base.comp.gpa;
const ptr_size = self.ptrWidthBytes();
const target = self.base.comp.root_mod.resolved_target.result;
@ -515,7 +543,7 @@ pub fn initMetadata(self: *Elf) !void {
const fillSection = struct {
fn fillSection(elf_file: *Elf, shdr: *elf.Elf64_Shdr, size: u64, phndx: ?u16) void {
if (elf_file.isRelocatable()) {
if (elf_file.base.isRelocatable()) {
const off = elf_file.findFreeSpace(size, shdr.sh_addralign);
shdr.sh_offset = off;
shdr.sh_size = size;
@ -530,9 +558,9 @@ pub fn initMetadata(self: *Elf) !void {
comptime assert(number_of_zig_segments == 5);
if (!self.isRelocatable()) {
if (!self.base.isRelocatable()) {
if (self.phdr_zig_load_re_index == null) {
const filesz = self.base.options.program_code_size_hint;
const filesz = options.program_code_size_hint;
const off = self.findFreeSpace(filesz, self.page_size);
self.phdr_zig_load_re_index = try self.addPhdr(.{
.type = elf.PT_LOAD,
@ -549,7 +577,7 @@ pub fn initMetadata(self: *Elf) !void {
// We really only need ptr alignment but since we are using PROGBITS, linux requires
// page align.
const alignment = if (is_linux) self.page_size else @as(u16, ptr_size);
const filesz = @as(u64, ptr_size) * self.base.options.symbol_count_hint;
const filesz = @as(u64, ptr_size) * options.symbol_count_hint;
const off = self.findFreeSpace(filesz, alignment);
self.phdr_zig_got_index = try self.addPhdr(.{
.type = elf.PT_LOAD,
@ -613,8 +641,8 @@ pub fn initMetadata(self: *Elf) !void {
.offset = std.math.maxInt(u64),
});
const shdr = &self.shdrs.items[self.zig_text_section_index.?];
fillSection(self, shdr, self.base.options.program_code_size_hint, self.phdr_zig_load_re_index);
if (self.isRelocatable()) {
fillSection(self, shdr, options.program_code_size_hint, self.phdr_zig_load_re_index);
if (self.base.isRelocatable()) {
const rela_shndx = try self.addRelaShdr(".rela.text.zig", self.zig_text_section_index.?);
try self.output_rela_sections.putNoClobber(gpa, self.zig_text_section_index.?, .{
.shndx = rela_shndx,
@ -630,7 +658,7 @@ pub fn initMetadata(self: *Elf) !void {
try self.last_atom_and_free_list_table.putNoClobber(gpa, self.zig_text_section_index.?, .{});
}
if (self.zig_got_section_index == null and !self.isRelocatable()) {
if (self.zig_got_section_index == null and !self.base.isRelocatable()) {
self.zig_got_section_index = try self.addSection(.{
.name = ".got.zig",
.type = elf.SHT_PROGBITS,
@ -661,7 +689,7 @@ pub fn initMetadata(self: *Elf) !void {
});
const shdr = &self.shdrs.items[self.zig_data_rel_ro_section_index.?];
fillSection(self, shdr, 1024, self.phdr_zig_load_ro_index);
if (self.isRelocatable()) {
if (self.base.isRelocatable()) {
const rela_shndx = try self.addRelaShdr(
".rela.data.rel.ro.zig",
self.zig_data_rel_ro_section_index.?,
@ -690,7 +718,7 @@ pub fn initMetadata(self: *Elf) !void {
});
const shdr = &self.shdrs.items[self.zig_data_section_index.?];
fillSection(self, shdr, 1024, self.phdr_zig_load_rw_index);
if (self.isRelocatable()) {
if (self.base.isRelocatable()) {
const rela_shndx = try self.addRelaShdr(
".rela.data.zig",
self.zig_data_section_index.?,
@ -930,13 +958,7 @@ pub fn markDirty(self: *Elf, shdr_index: u16) void {
}
pub fn flush(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) link.File.FlushError!void {
if (self.base.options.emit == null) {
if (self.llvm_object) |llvm_object| {
try llvm_object.flushModule(comp, prog_node);
}
return;
}
const use_lld = build_options.have_llvm and self.base.options.use_lld;
const use_lld = build_options.have_llvm and self.base.comp.config.use_lld;
if (use_lld) {
return self.linkWithLLD(comp, prog_node);
}
@ -950,7 +972,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
if (self.llvm_object) |llvm_object| {
try llvm_object.flushModule(comp, prog_node);
const use_lld = build_options.have_llvm and self.base.options.use_lld;
const use_lld = build_options.have_llvm and self.base.comp.config.use_lld;
if (use_lld) return;
}
@ -959,13 +981,14 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
sub_prog_node.activate();
defer sub_prog_node.end();
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const target = self.base.comp.root_mod.resolved_target.result;
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
const link_mode = self.base.comp.config.link_mode;
const directory = self.base.emit.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.emit.sub_path});
const module_obj_path: ?[]const u8 = if (self.base.intermediary_basename) |path| blk: {
if (fs.path.dirname(full_out_path)) |dirname| {
break :blk try fs.path.join(arena, &.{ dirname, path });
@ -973,10 +996,10 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
break :blk path;
}
} else null;
const gc_sections = self.base.options.gc_sections orelse false;
const gc_sections = self.base.gc_sections;
// --verbose-link
if (self.base.options.verbose_link) try self.dumpArgv(comp);
if (self.base.comp.verbose_link) try self.dumpArgv(comp);
const csu = try CsuObjects.init(arena, self.base.options, comp);
const compiler_rt_path: ?[]const u8 = blk: {
@ -986,8 +1009,8 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
};
if (self.zigObjectPtr()) |zig_object| try zig_object.flushModule(self);
if (self.isStaticLib()) return self.flushStaticLib(comp, module_obj_path);
if (self.isObject()) return self.flushObject(comp, module_obj_path);
if (self.base.isStaticLib()) return self.flushStaticLib(comp, module_obj_path);
if (self.base.isObject()) return self.flushObject(comp, module_obj_path);
// Here we will parse input positional and library files (if referenced).
// This will roughly match in any linker backend we support.
@ -1011,7 +1034,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
if (module_obj_path) |path| try positionals.append(.{ .path = path });
// rpaths
var rpath_table = std.StringArrayHashMap(void).init(self.base.allocator);
var rpath_table = std.StringArrayHashMap(void).init(gpa);
defer rpath_table.deinit();
for (self.base.options.rpath_list) |rpath| {
@ -1019,10 +1042,10 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
}
if (self.base.options.each_lib_rpath) {
var test_path = std.ArrayList(u8).init(self.base.allocator);
var test_path = std.ArrayList(u8).init(gpa);
defer test_path.deinit();
for (self.base.options.lib_dirs) |lib_dir_path| {
for (self.base.options.system_libs.keys()) |link_lib| {
for (self.base.comp.system_libs.keys()) |link_lib| {
if (!(try self.accessLibPath(&test_path, null, lib_dir_path, link_lib, .Dynamic)))
continue;
_ = try rpath_table.put(lib_dir_path, {});
@ -1064,8 +1087,8 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
var system_libs = std.ArrayList(SystemLib).init(arena);
try system_libs.ensureUnusedCapacity(self.base.options.system_libs.values().len);
for (self.base.options.system_libs.values()) |lib_info| {
try system_libs.ensureUnusedCapacity(self.base.comp.system_libs.values().len);
for (self.base.comp.system_libs.values()) |lib_info| {
system_libs.appendAssumeCapacity(.{ .needed = lib_info.needed, .path = lib_info.path.? });
}
@ -1127,7 +1150,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
.path = try comp.get_libc_crt_file(arena, "libc_nonshared.a"),
});
} else if (target.isMusl()) {
const path = try comp.get_libc_crt_file(arena, switch (self.base.options.link_mode) {
const path = try comp.get_libc_crt_file(arena, switch (link_mode) {
.Static => "libc.a",
.Dynamic => "libc.so",
});
@ -1224,7 +1247,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
if (self.entry_index == null) {
const entry: ?[]const u8 = entry: {
if (self.base.options.entry) |entry| break :entry entry;
if (!self.isDynLib()) break :entry "_start";
if (!self.base.isDynLib()) break :entry "_start";
break :entry null;
};
self.entry_index = if (entry) |name| self.globalByName(name) else null;
@ -1301,7 +1324,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
try self.writeAtoms();
try self.writeSyntheticSections();
if (self.entry_index == null and self.isExe()) {
if (self.entry_index == null and self.base.isExe()) {
log.debug("flushing. no_entry_point_found = true", .{});
self.error_flags.no_entry_point_found = true;
} else {
@ -1531,13 +1554,15 @@ pub fn flushObject(self: *Elf, comp: *Compilation, module_obj_path: ?[]const u8)
/// --verbose-link output
fn dumpArgv(self: *Elf, comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
const gpa = self.base.comp.gpa;
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const target = self.base.comp.root_mod.resolved_target.result;
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
const link_mode = self.base.comp.config.link_mode;
const directory = self.base.emit.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.emit.sub_path});
const module_obj_path: ?[]const u8 = if (self.base.intermediary_basename) |path| blk: {
if (fs.path.dirname(full_out_path)) |dirname| {
break :blk try fs.path.join(arena, &.{ dirname, path });
@ -1545,7 +1570,6 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
break :blk path;
}
} else null;
const gc_sections = self.base.options.gc_sections orelse false;
const csu = try CsuObjects.init(arena, self.base.options, comp);
const compiler_rt_path: ?[]const u8 = blk: {
@ -1558,20 +1582,20 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
try argv.append("zig");
if (self.isStaticLib()) {
if (self.base.isStaticLib()) {
try argv.append("ar");
} else {
try argv.append("ld");
}
if (self.isObject()) {
if (self.base.isObject()) {
try argv.append("-r");
}
try argv.append("-o");
try argv.append(full_out_path);
if (self.isRelocatable()) {
if (self.base.isRelocatable()) {
for (self.base.options.objects) |obj| {
try argv.append(obj.path);
}
@ -1591,7 +1615,7 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
}
}
if (self.isDynLib()) {
if (self.base.isDynLib()) {
if (self.base.options.soname) |name| {
try argv.append("-soname");
try argv.append(name);
@ -1624,16 +1648,16 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
}
}
if (self.base.options.stack_size_override) |ss| {
try argv.append("-z");
try argv.append(try std.fmt.allocPrint(arena, "stack-size={d}", .{ss}));
}
try argv.appendSlice(&.{
"-z",
try std.fmt.allocPrint(arena, "stack-size={d}", .{self.base.stack_size}),
});
if (self.base.options.image_base_override) |image_base| {
try argv.append(try std.fmt.allocPrint(arena, "--image-base={d}", .{image_base}));
}
if (gc_sections) {
if (self.base.gc_sections) {
try argv.append("--gc-sections");
}
@ -1666,11 +1690,11 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
if (self.isStatic()) {
try argv.append("-static");
} else if (self.isDynLib()) {
} else if (self.base.isDynLib()) {
try argv.append("-shared");
}
if (self.base.options.pie and self.isExe()) {
if (self.base.options.pie and self.base.isExe()) {
try argv.append("-pie");
}
@ -1741,11 +1765,11 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
// Shared libraries.
// Worst-case, we need an --as-needed argument for every lib, as well
// as one before and one after.
try argv.ensureUnusedCapacity(self.base.options.system_libs.keys().len * 2 + 2);
try argv.ensureUnusedCapacity(self.base.comp.system_libs.keys().len * 2 + 2);
argv.appendAssumeCapacity("--as-needed");
var as_needed = true;
for (self.base.options.system_libs.values()) |lib_info| {
for (self.base.comp.system_libs.values()) |lib_info| {
const lib_as_needed = !lib_info.needed;
switch ((@as(u2, @intFromBool(lib_as_needed)) << 1) | @intFromBool(as_needed)) {
0b00, 0b11 => {},
@ -1780,7 +1804,7 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
// libc dep
if (self.base.options.link_libc) {
if (self.base.options.libc_installation != null) {
const needs_grouping = self.base.options.link_mode == .Static;
const needs_grouping = link_mode == .Static;
if (needs_grouping) try argv.append("--start-group");
try argv.appendSlice(target_util.libcFullLinkFlags(target));
if (needs_grouping) try argv.append("--end-group");
@ -1793,7 +1817,7 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
}
try argv.append(try comp.get_libc_crt_file(arena, "libc_nonshared.a"));
} else if (target.isMusl()) {
try argv.append(try comp.get_libc_crt_file(arena, switch (self.base.options.link_mode) {
try argv.append(try comp.get_libc_crt_file(arena, switch (link_mode) {
.Static => "libc.a",
.Dynamic => "libc.so",
}));
@ -2006,6 +2030,7 @@ fn accessLibPath(
lib_name: []const u8,
link_mode: ?std.builtin.LinkMode,
) !bool {
const gpa = self.base.comp.gpa;
const sep = fs.path.sep_str;
const target = self.base.comp.root_mod.resolved_target.result;
test_path.clearRetainingCapacity();
@ -2021,7 +2046,7 @@ fn accessLibPath(
suffix,
});
if (checked_paths) |cpaths| {
try cpaths.append(try self.base.allocator.dupe(u8, test_path.items));
try cpaths.append(try gpa.dupe(u8, test_path.items));
}
fs.cwd().access(test_path.items, .{}) catch |err| switch (err) {
error.FileNotFound => return false,
@ -2150,7 +2175,7 @@ fn markImportsExports(self: *Elf) void {
}
if (file_ptr.index() == file_index) {
global.flags.@"export" = true;
if (elf_file.isDynLib() and vis != .PROTECTED) {
if (elf_file.base.isDynLib() and vis != .PROTECTED) {
global.flags.import = true;
}
}
@ -2158,7 +2183,7 @@ fn markImportsExports(self: *Elf) void {
}
}.mark;
if (!self.isDynLib()) {
if (!self.base.isDynLib()) {
for (self.shared_objects.items) |index| {
for (self.file(index).?.globals()) |global_index| {
const global = self.symbol(global_index);
@ -2274,12 +2299,13 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
const tracy = trace(@src());
defer tracy.end();
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
const gpa = self.base.comp.gpa;
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
const directory = self.base.emit.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.emit.sub_path});
// If there is no Zig code to compile, then we should skip flushing the output file because it
// will not be part of the linker line anyway.
@ -2298,16 +2324,15 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
sub_prog_node.context.refresh();
defer sub_prog_node.end();
const is_obj = self.base.options.output_mode == .Obj;
const is_lib = self.base.options.output_mode == .Lib;
const is_dyn_lib = self.base.options.link_mode == .Dynamic and is_lib;
const is_exe_or_dyn_lib = is_dyn_lib or self.base.options.output_mode == .Exe;
const output_mode = self.base.comp.config.output_mode;
const is_obj = output_mode == .Obj;
const is_lib = output_mode == .Lib;
const link_mode = self.base.comp.config.link_mode;
const is_dyn_lib = link_mode == .Dynamic and is_lib;
const is_exe_or_dyn_lib = is_dyn_lib or output_mode == .Exe;
const have_dynamic_linker = self.base.options.link_libc and
self.base.options.link_mode == .Dynamic and is_exe_or_dyn_lib;
link_mode == .Dynamic and is_exe_or_dyn_lib;
const target = self.base.comp.root_mod.resolved_target.result;
const gc_sections = self.base.options.gc_sections orelse !is_obj;
const stack_size = self.base.options.stack_size_override orelse 16777216;
const allow_shlib_undefined = self.base.options.allow_shlib_undefined orelse !self.base.options.is_native_os;
const compiler_rt_path: ?[]const u8 = blk: {
if (comp.compiler_rt_lib) |x| break :blk x.full_object_path;
if (comp.compiler_rt_obj) |x| break :blk x.full_object_path;
@ -2354,7 +2379,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
// installation sources because they are always a product of the compiler version + target information.
man.hash.addOptionalBytes(self.base.options.entry);
man.hash.addOptional(self.base.options.image_base_override);
man.hash.add(gc_sections);
man.hash.add(self.base.gc_sections);
man.hash.addOptional(self.base.options.sort_section);
man.hash.add(self.base.options.eh_frame_hdr);
man.hash.add(self.base.options.emit_relocs);
@ -2362,9 +2387,9 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
man.hash.addListOfBytes(self.base.options.lib_dirs);
man.hash.addListOfBytes(self.base.options.rpath_list);
man.hash.add(self.base.options.each_lib_rpath);
if (self.base.options.output_mode == .Exe) {
man.hash.add(stack_size);
man.hash.add(self.base.options.build_id);
if (output_mode == .Exe) {
man.hash.add(self.base.stack_size);
man.hash.add(self.base.build_id);
}
man.hash.addListOfBytes(self.base.options.symbol_wrap_set.keys());
man.hash.add(self.base.options.skip_linker_dependencies);
@ -2390,9 +2415,9 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
}
man.hash.addOptionalBytes(self.base.options.soname);
man.hash.addOptional(self.base.options.version);
try link.hashAddSystemLibs(&man, self.base.options.system_libs);
try link.hashAddSystemLibs(&man, self.base.comp.system_libs);
man.hash.addListOfBytes(self.base.options.force_undefined_symbols.keys());
man.hash.add(allow_shlib_undefined);
man.hash.add(self.base.allow_shlib_undefined);
man.hash.add(self.base.options.bind_global_refs_locally);
man.hash.add(self.base.options.compress_debug_sections);
man.hash.add(self.base.options.tsan);
@ -2432,7 +2457,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
// copy when generating relocatables. Normally, we would expect `lld -r` to work.
// However, because LLD wants to resolve BPF relocations which it shouldn't, it fails
// before even generating the relocatable.
if (self.base.options.output_mode == .Obj and
if (output_mode == .Obj and
(self.base.options.lto or target.isBpfFreestanding()))
{
// In this case we must do a simple file copy
@ -2459,7 +2484,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
}
} else {
// Create an LLD command line and invoke it.
var argv = std.ArrayList([]const u8).init(self.base.allocator);
var argv = std.ArrayList([]const u8).init(gpa);
defer argv.deinit();
// We will invoke ourselves as a child process to gain access to LLD.
// This is necessary because LLD does not behave properly as a library -
@ -2503,15 +2528,17 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
.both => {}, // this is the default
}
if (self.base.options.output_mode == .Exe) {
try argv.append("-z");
try argv.append(try std.fmt.allocPrint(arena, "stack-size={d}", .{stack_size}));
if (output_mode == .Exe) {
try argv.appendSlice(&.{
"-z",
try std.fmt.allocPrint(arena, "stack-size={d}", .{self.base.stack_size}),
});
switch (self.base.options.build_id) {
switch (self.base.build_id) {
.none => {},
.fast, .uuid, .sha1, .md5 => {
try argv.append(try std.fmt.allocPrint(arena, "--build-id={s}", .{
@tagName(self.base.options.build_id),
@tagName(self.base.build_id),
}));
},
.hexstring => |hs| {
@ -2536,7 +2563,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
try argv.append(arg);
}
if (gc_sections) {
if (self.base.gc_sections) {
try argv.append("--gc-sections");
}
@ -2615,7 +2642,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
try argv.append(arg);
}
if (self.base.options.link_mode == .Static) {
if (link_mode == .Static) {
if (target.cpu.arch.isArmOrThumb()) {
try argv.append("-Bstatic");
} else {
@ -2625,7 +2652,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
try argv.append("-shared");
}
if (self.base.options.pie and self.base.options.output_mode == .Exe) {
if (self.base.options.pie and output_mode == .Exe) {
try argv.append("-pie");
}
@ -2648,7 +2675,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
if (csu.crtbegin) |v| try argv.append(v);
// rpaths
var rpath_table = std.StringHashMap(void).init(self.base.allocator);
var rpath_table = std.StringHashMap(void).init(gpa);
defer rpath_table.deinit();
for (self.base.options.rpath_list) |rpath| {
if ((try rpath_table.fetchPut(rpath, {})) == null) {
@ -2664,7 +2691,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
if (self.base.options.each_lib_rpath) {
var test_path = std.ArrayList(u8).init(arena);
for (self.base.options.lib_dirs) |lib_dir_path| {
for (self.base.options.system_libs.keys()) |link_lib| {
for (self.base.comp.system_libs.keys()) |link_lib| {
if (!(try self.accessLibPath(&test_path, null, lib_dir_path, link_lib, .Dynamic)))
continue;
if ((try rpath_table.fetchPut(lib_dir_path, {})) == null) {
@ -2763,8 +2790,8 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
// Shared libraries.
if (is_exe_or_dyn_lib) {
const system_libs = self.base.options.system_libs.keys();
const system_libs_values = self.base.options.system_libs.values();
const system_libs = self.base.comp.system_libs.keys();
const system_libs_values = self.base.comp.system_libs.values();
// Worst-case, we need an --as-needed argument for every lib, as well
// as one before and one after.
@ -2813,7 +2840,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
self.error_flags.missing_libc = false;
if (self.base.options.link_libc) {
if (self.base.options.libc_installation != null) {
const needs_grouping = self.base.options.link_mode == .Static;
const needs_grouping = link_mode == .Static;
if (needs_grouping) try argv.append("--start-group");
try argv.appendSlice(target_util.libcFullLinkFlags(target));
if (needs_grouping) try argv.append("--end-group");
@ -2826,7 +2853,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
}
try argv.append(try comp.get_libc_crt_file(arena, "libc_nonshared.a"));
} else if (target.isMusl()) {
try argv.append(try comp.get_libc_crt_file(arena, switch (self.base.options.link_mode) {
try argv.append(try comp.get_libc_crt_file(arena, switch (link_mode) {
.Static => "libc.a",
.Dynamic => "libc.so",
}));
@ -2847,7 +2874,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
if (csu.crtend) |v| try argv.append(v);
if (csu.crtn) |v| try argv.append(v);
if (allow_shlib_undefined) {
if (self.base.allow_shlib_undefined) {
try argv.append("--allow-shlib-undefined");
}
@ -2861,7 +2888,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
try argv.append("-Bsymbolic");
}
if (self.base.options.verbose_link) {
if (self.base.comp.verbose_link) {
// Skip over our own name so that the LLD linker name is the first argv item.
Compilation.dump_argv(argv.items[1..]);
}
@ -3087,10 +3114,12 @@ fn writeElfHeader(self: *Elf) !void {
assert(index == 16);
const elf_type: elf.ET = switch (self.base.options.output_mode) {
const output_mode = self.base.comp.config.output_mode;
const link_mode = self.base.comp.config.link_mode;
const elf_type: elf.ET = switch (output_mode) {
.Exe => if (self.base.options.pie) .DYN else .EXEC,
.Obj => .REL,
.Lib => switch (self.base.options.link_mode) {
.Lib => switch (link_mode) {
.Static => @as(elf.ET, .REL),
.Dynamic => .DYN,
},
@ -3216,7 +3245,6 @@ pub fn updateExports(
@panic("Attempted to compile for object format that was disabled by build configuration");
}
if (self.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, exports);
if (self.base.options.emit == null) return;
return self.zigObjectPtr().?.updateExports(self, mod, exported, exports);
}
@ -3280,6 +3308,8 @@ fn addLinkerDefinedSymbols(self: *Elf) !void {
}
fn allocateLinkerDefinedSymbols(self: *Elf) void {
const link_mode = self.base.comp.config.link_mode;
// _DYNAMIC
if (self.dynamic_section_index) |shndx| {
const shdr = &self.shdrs.items[shndx];
@ -3362,7 +3392,7 @@ fn allocateLinkerDefinedSymbols(self: *Elf) void {
// __rela_iplt_start, __rela_iplt_end
if (self.rela_dyn_section_index) |shndx| blk: {
if (self.base.options.link_mode != .Static or self.base.options.pie) break :blk;
if (link_mode != .Static or self.base.options.pie) break :blk;
const shdr = &self.shdrs.items[shndx];
const end_addr = shdr.sh_addr + shdr.sh_size;
const start_addr = end_addr - self.calcNumIRelativeRelocs() * @sizeOf(elf.Elf64_Rela);
@ -3531,7 +3561,7 @@ fn initSyntheticSections(self: *Elf) !void {
});
}
if (self.isDynLib() or self.shared_objects.items.len > 0 or self.base.options.pie) {
if (self.base.isDynLib() or self.shared_objects.items.len > 0 or self.base.options.pie) {
self.dynstrtab_section_index = try self.addSection(.{
.name = ".dynstr",
.flags = elf.SHF_ALLOC,
@ -3716,7 +3746,7 @@ fn initSpecialPhdrs(self: *Elf) !void {
self.phdr_gnu_stack_index = try self.addPhdr(.{
.type = elf.PT_GNU_STACK,
.flags = elf.PF_W | elf.PF_R,
.memsz = self.base.options.stack_size_override orelse 0,
.memsz = self.base.stack_size,
.@"align" = 1,
});
@ -3822,7 +3852,7 @@ fn setDynamicSection(self: *Elf, rpaths: []const []const u8) !void {
try self.dynamic.addNeeded(shared_object, self);
}
if (self.isDynLib()) {
if (self.base.isDynLib()) {
if (self.base.options.soname) |soname| {
try self.dynamic.setSoname(soname, self);
}
@ -3837,8 +3867,9 @@ fn sortDynamicSymtab(self: *Elf) void {
}
fn setVersionSymtab(self: *Elf) !void {
const gpa = self.base.comp.gpa;
if (self.versym_section_index == null) return;
try self.versym.resize(self.base.allocator, self.dynsym.count());
try self.versym.resize(gpa, self.dynsym.count());
self.versym.items[0] = elf.VER_NDX_LOCAL;
for (self.dynsym.entries.items, 1..) |entry, i| {
const sym = self.symbol(entry.symbol_index);
@ -5597,38 +5628,14 @@ const CsuObjects = struct {
};
pub fn calcImageBase(self: Elf) u64 {
if (self.isDynLib()) return 0;
if (self.isExe() and self.base.options.pie) return 0;
if (self.base.isDynLib()) return 0;
if (self.base.isExe() and self.base.options.pie) return 0;
return self.base.options.image_base_override orelse switch (self.ptr_width) {
.p32 => 0x1000,
.p64 => 0x1000000,
};
}
pub fn isStatic(self: Elf) bool {
return self.base.options.link_mode == .Static;
}
pub fn isObject(self: Elf) bool {
return self.base.options.output_mode == .Obj;
}
pub fn isExe(self: Elf) bool {
return self.base.options.output_mode == .Exe;
}
pub fn isStaticLib(self: Elf) bool {
return self.base.options.output_mode == .Lib and self.isStatic();
}
pub fn isRelocatable(self: Elf) bool {
return self.isObject() or self.isStaticLib();
}
pub fn isDynLib(self: Elf) bool {
return self.base.options.output_mode == .Lib and !self.isStatic();
}
pub fn isZigSection(self: Elf, shndx: u16) bool {
inline for (&[_]?u16{
self.zig_text_section_index,
@ -5668,8 +5675,9 @@ fn addPhdr(self: *Elf, opts: struct {
filesz: u64 = 0,
memsz: u64 = 0,
}) error{OutOfMemory}!u16 {
const gpa = self.base.comp.gpa;
const index = @as(u16, @intCast(self.phdrs.items.len));
try self.phdrs.append(self.base.allocator, .{
try self.phdrs.append(gpa, .{
.p_type = opts.type,
.p_flags = opts.flags,
.p_offset = opts.offset,
@ -5818,8 +5826,9 @@ pub fn atom(self: *Elf, atom_index: Atom.Index) ?*Atom {
}
pub fn addAtom(self: *Elf) !Atom.Index {
const gpa = self.base.comp.gpa;
const index = @as(Atom.Index, @intCast(self.atoms.items.len));
const atom_ptr = try self.atoms.addOne(self.base.allocator);
const atom_ptr = try self.atoms.addOne(gpa);
atom_ptr.* = .{ .atom_index = index };
return index;
}
@ -5841,7 +5850,8 @@ pub fn symbol(self: *Elf, sym_index: Symbol.Index) *Symbol {
}
pub fn addSymbol(self: *Elf) !Symbol.Index {
try self.symbols.ensureUnusedCapacity(self.base.allocator, 1);
const gpa = self.base.comp.gpa;
try self.symbols.ensureUnusedCapacity(gpa, 1);
const index = blk: {
if (self.symbols_free_list.popOrNull()) |index| {
log.debug(" (reusing symbol index {d})", .{index});
@ -5858,8 +5868,9 @@ pub fn addSymbol(self: *Elf) !Symbol.Index {
}
pub fn addSymbolExtra(self: *Elf, extra: Symbol.Extra) !u32 {
const gpa = self.base.comp.gpa;
const fields = @typeInfo(Symbol.Extra).Struct.fields;
try self.symbols_extra.ensureUnusedCapacity(self.base.allocator, fields.len);
try self.symbols_extra.ensureUnusedCapacity(gpa, fields.len);
return self.addSymbolExtraAssumeCapacity(extra);
}
@ -5959,8 +5970,9 @@ pub fn getOrCreateComdatGroupOwner(self: *Elf, name: [:0]const u8) !GetOrCreateC
}
pub fn addComdatGroup(self: *Elf) !ComdatGroup.Index {
const gpa = self.base.comp.gpa;
const index = @as(ComdatGroup.Index, @intCast(self.comdat_groups.items.len));
_ = try self.comdat_groups.addOne(self.base.allocator);
_ = try self.comdat_groups.addOne(gpa);
return index;
}
@ -6023,14 +6035,16 @@ const ErrorWithNotes = struct {
};
pub fn addErrorWithNotes(self: *Elf, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
try self.misc_errors.ensureUnusedCapacity(self.base.allocator, 1);
const gpa = self.base.comp.gpa;
try self.misc_errors.ensureUnusedCapacity(gpa, 1);
return self.addErrorWithNotesAssumeCapacity(note_count);
}
fn addErrorWithNotesAssumeCapacity(self: *Elf, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
const gpa = self.base.comp.gpa;
const index = self.misc_errors.items.len;
const err = self.misc_errors.addOneAssumeCapacity();
err.* = .{ .msg = undefined, .notes = try self.base.allocator.alloc(link.File.ErrorMsg, note_count) };
err.* = .{ .msg = undefined, .notes = try gpa.alloc(link.File.ErrorMsg, note_count) };
return .{ .index = index };
}
@ -6040,9 +6054,10 @@ pub fn getShString(self: Elf, off: u32) [:0]const u8 {
}
pub fn insertShString(self: *Elf, name: [:0]const u8) error{OutOfMemory}!u32 {
const gpa = self.base.comp.gpa;
const off = @as(u32, @intCast(self.shstrtab.items.len));
try self.shstrtab.ensureUnusedCapacity(self.base.allocator, name.len + 1);
self.shstrtab.writer(self.base.allocator).print("{s}\x00", .{name}) catch unreachable;
try self.shstrtab.ensureUnusedCapacity(gpa, name.len + 1);
self.shstrtab.writer(gpa).print("{s}\x00", .{name}) catch unreachable;
return off;
}
@ -6052,9 +6067,10 @@ pub fn getDynString(self: Elf, off: u32) [:0]const u8 {
}
pub fn insertDynString(self: *Elf, name: []const u8) error{OutOfMemory}!u32 {
const gpa = self.base.comp.gpa;
const off = @as(u32, @intCast(self.dynstrtab.items.len));
try self.dynstrtab.ensureUnusedCapacity(self.base.allocator, name.len + 1);
self.dynstrtab.writer(self.base.allocator).print("{s}\x00", .{name}) catch unreachable;
try self.dynstrtab.ensureUnusedCapacity(gpa, name.len + 1);
self.dynstrtab.writer(gpa).print("{s}\x00", .{name}) catch unreachable;
return off;
}

View File

@ -76,7 +76,7 @@ pub const symbol_mask: u32 = 0x7fffffff;
pub const SHN_ATOM: u16 = 0x100;
pub fn init(self: *ZigObject, elf_file: *Elf) !void {
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
try self.atoms.append(gpa, 0); // null input section
try self.relocs.append(gpa, .{}); // null relocs section
@ -96,7 +96,7 @@ pub fn init(self: *ZigObject, elf_file: *Elf) !void {
esym.st_shndx = elf.SHN_ABS;
symbol_ptr.esym_index = esym_index;
if (!elf_file.base.options.strip) {
if (elf_file.base.debug_format != .strip) {
self.dwarf = Dwarf.init(gpa, &elf_file.base, .dwarf32);
}
}
@ -155,13 +155,13 @@ pub fn deinit(self: *ZigObject, allocator: Allocator) void {
pub fn flushModule(self: *ZigObject, elf_file: *Elf) !void {
// Handle any lazy symbols that were emitted by incremental compilation.
if (self.lazy_syms.getPtr(.none)) |metadata| {
const module = elf_file.base.options.module.?;
const zcu = elf_file.base.comp.module.?;
// Most lazy symbols can be updated on first use, but
// anyerror needs to wait for everything to be flushed.
if (metadata.text_state != .unused) self.updateLazySymbol(
elf_file,
link.File.LazySymbol.initDecl(.code, null, module),
link.File.LazySymbol.initDecl(.code, null, zcu),
metadata.text_symbol_index,
) catch |err| return switch (err) {
error.CodegenFail => error.FlushFailure,
@ -169,7 +169,7 @@ pub fn flushModule(self: *ZigObject, elf_file: *Elf) !void {
};
if (metadata.rodata_state != .unused) self.updateLazySymbol(
elf_file,
link.File.LazySymbol.initDecl(.const_data, null, module),
link.File.LazySymbol.initDecl(.const_data, null, zcu),
metadata.rodata_symbol_index,
) catch |err| return switch (err) {
error.CodegenFail => error.FlushFailure,
@ -182,7 +182,8 @@ pub fn flushModule(self: *ZigObject, elf_file: *Elf) !void {
}
if (self.dwarf) |*dw| {
try dw.flushModule(elf_file.base.options.module.?);
const zcu = elf_file.base.comp.module.?;
try dw.flushModule(zcu);
// TODO I need to re-think how to handle ZigObject's debug sections AND debug sections
// extracted from input object files correctly.
@ -195,7 +196,7 @@ pub fn flushModule(self: *ZigObject, elf_file: *Elf) !void {
const text_shdr = elf_file.shdrs.items[elf_file.zig_text_section_index.?];
const low_pc = text_shdr.sh_addr;
const high_pc = text_shdr.sh_addr + text_shdr.sh_size;
try dw.writeDbgInfoHeader(elf_file.base.options.module.?, low_pc, high_pc);
try dw.writeDbgInfoHeader(zcu, low_pc, high_pc);
self.debug_info_header_dirty = false;
}
@ -268,7 +269,7 @@ pub fn addGlobalEsym(self: *ZigObject, allocator: Allocator) !Symbol.Index {
}
pub fn addAtom(self: *ZigObject, elf_file: *Elf) !Symbol.Index {
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
const atom_index = try elf_file.addAtom();
const symbol_index = try elf_file.addSymbol();
const esym_index = try self.addLocalEsym(gpa);
@ -411,6 +412,7 @@ pub fn allocateTlvAtoms(self: ZigObject, elf_file: *Elf) void {
}
pub fn scanRelocs(self: *ZigObject, elf_file: *Elf, undefs: anytype) !void {
const gpa = elf_file.base.comp.gpa;
for (self.atoms.items) |atom_index| {
const atom = elf_file.atom(atom_index) orelse continue;
if (!atom.flags.alive) continue;
@ -421,7 +423,7 @@ pub fn scanRelocs(self: *ZigObject, elf_file: *Elf, undefs: anytype) !void {
// Perhaps it would make sense to save the code until flushModule where we
// would free all of generated code?
const code = try self.codeAlloc(elf_file, atom_index);
defer elf_file.base.allocator.free(code);
defer gpa.free(code);
try atom.scanRelocs(elf_file, code, undefs);
} else try atom.scanRelocs(elf_file, null, undefs);
}
@ -447,7 +449,7 @@ pub fn markLive(self: *ZigObject, elf_file: *Elf) void {
/// We need this so that we can write to an archive.
/// TODO implement writing ZigObject data directly to a buffer instead.
pub fn readFileContents(self: *ZigObject, elf_file: *Elf) !void {
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
const shsize: u64 = switch (elf_file.ptr_width) {
.p32 => @sizeOf(elf.Elf32_Shdr),
.p64 => @sizeOf(elf.Elf64_Shdr),
@ -465,7 +467,7 @@ pub fn readFileContents(self: *ZigObject, elf_file: *Elf) !void {
}
pub fn updateArSymtab(self: ZigObject, ar_symtab: *Archive.ArSymtab, elf_file: *Elf) error{OutOfMemory}!void {
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
try ar_symtab.symtab.ensureUnusedCapacity(gpa, self.globals().len);
@ -508,7 +510,7 @@ pub fn addAtomsToRelaSections(self: ZigObject, elf_file: *Elf) !void {
const out_shdr = elf_file.shdrs.items[out_shndx];
if (out_shdr.sh_type == elf.SHT_NOBITS) continue;
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
const sec = elf_file.output_rela_sections.getPtr(out_shndx).?;
try sec.atom_list.append(gpa, atom_index);
}
@ -602,7 +604,7 @@ pub fn asFile(self: *ZigObject) File {
/// Returns atom's code.
/// Caller owns the memory.
pub fn codeAlloc(self: ZigObject, elf_file: *Elf, atom_index: Atom.Index) ![]u8 {
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
const atom = elf_file.atom(atom_index).?;
assert(atom.file_index == self.index);
const shdr = &elf_file.shdrs.items[atom.outputShndx().?];
@ -668,8 +670,8 @@ pub fn lowerAnonDecl(
explicit_alignment: InternPool.Alignment,
src_loc: Module.SrcLoc,
) !codegen.Result {
const gpa = elf_file.base.allocator;
const mod = elf_file.base.options.module.?;
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
const ty = Type.fromInterned(mod.intern_pool.typeOf(decl_val));
const decl_alignment = switch (explicit_alignment) {
.none => ty.abiAlignment(mod),
@ -716,8 +718,8 @@ pub fn getOrCreateMetadataForLazySymbol(
elf_file: *Elf,
lazy_sym: link.File.LazySymbol,
) !Symbol.Index {
const gpa = elf_file.base.allocator;
const mod = elf_file.base.options.module.?;
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
const gop = try self.lazy_syms.getOrPut(gpa, lazy_sym.getDecl(mod));
errdefer _ = if (!gop.found_existing) self.lazy_syms.pop();
if (!gop.found_existing) gop.value_ptr.* = .{};
@ -752,25 +754,28 @@ pub fn getOrCreateMetadataForLazySymbol(
}
fn freeUnnamedConsts(self: *ZigObject, elf_file: *Elf, decl_index: InternPool.DeclIndex) void {
const gpa = elf_file.base.comp.gpa;
const unnamed_consts = self.unnamed_consts.getPtr(decl_index) orelse return;
for (unnamed_consts.items) |sym_index| {
self.freeDeclMetadata(elf_file, sym_index);
}
unnamed_consts.clearAndFree(elf_file.base.allocator);
unnamed_consts.clearAndFree(gpa);
}
fn freeDeclMetadata(self: *ZigObject, elf_file: *Elf, sym_index: Symbol.Index) void {
_ = self;
const gpa = elf_file.base.comp.gpa;
const sym = elf_file.symbol(sym_index);
sym.atom(elf_file).?.free(elf_file);
log.debug("adding %{d} to local symbols free list", .{sym_index});
elf_file.symbols_free_list.append(elf_file.base.allocator, sym_index) catch {};
elf_file.symbols_free_list.append(gpa, sym_index) catch {};
elf_file.symbols.items[sym_index] = .{};
// TODO free GOT entry here
}
pub fn freeDecl(self: *ZigObject, elf_file: *Elf, decl_index: InternPool.DeclIndex) void {
const mod = elf_file.base.options.module.?;
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
log.debug("freeDecl {*}", .{decl});
@ -780,7 +785,7 @@ pub fn freeDecl(self: *ZigObject, elf_file: *Elf, decl_index: InternPool.DeclInd
const sym_index = kv.value.symbol_index;
self.freeDeclMetadata(elf_file, sym_index);
self.freeUnnamedConsts(elf_file, decl_index);
kv.value.exports.deinit(elf_file.base.allocator);
kv.value.exports.deinit(gpa);
}
if (self.dwarf) |*dw| {
@ -793,15 +798,16 @@ pub fn getOrCreateMetadataForDecl(
elf_file: *Elf,
decl_index: InternPool.DeclIndex,
) !Symbol.Index {
const gop = try self.decls.getOrPut(elf_file.base.allocator, decl_index);
const gpa = elf_file.base.comp.gpa;
const gop = try self.decls.getOrPut(gpa, decl_index);
if (!gop.found_existing) {
const single_threaded = elf_file.base.options.single_threaded;
const any_non_single_threaded = elf_file.base.comp.config.any_non_single_threaded;
const symbol_index = try self.addAtom(elf_file);
const mod = elf_file.base.options.module.?;
const mod = elf_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
const sym = elf_file.symbol(symbol_index);
if (decl.getOwnedVariable(mod)) |variable| {
if (variable.is_threadlocal and !single_threaded) {
if (variable.is_threadlocal and any_non_single_threaded) {
sym.flags.is_tls = true;
}
}
@ -820,13 +826,13 @@ fn getDeclShdrIndex(
code: []const u8,
) error{OutOfMemory}!u16 {
_ = self;
const mod = elf_file.base.options.module.?;
const single_threaded = elf_file.base.options.single_threaded;
const mod = elf_file.base.comp.module.?;
const any_non_single_threaded = elf_file.base.comp.config.any_non_single_threaded;
const shdr_index = switch (decl.ty.zigTypeTag(mod)) {
.Fn => elf_file.zig_text_section_index.?,
else => blk: {
if (decl.getOwnedVariable(mod)) |variable| {
if (variable.is_threadlocal and !single_threaded) {
if (variable.is_threadlocal and any_non_single_threaded) {
const is_all_zeroes = for (code) |byte| {
if (byte != 0) break false;
} else true;
@ -846,9 +852,12 @@ fn getDeclShdrIndex(
}
if (variable.is_const) break :blk elf_file.zig_data_rel_ro_section_index.?;
if (Value.fromInterned(variable.init).isUndefDeep(mod)) {
const mode = elf_file.base.options.optimize_mode;
if (mode == .Debug or mode == .ReleaseSafe) break :blk elf_file.zig_data_section_index.?;
break :blk elf_file.zig_bss_section_index.?;
// TODO: get the optimize_mode from the Module that owns the decl instead
// of using the root module here.
break :blk switch (elf_file.base.comp.root_mod.optimize_mode) {
.Debug, .ReleaseSafe => elf_file.zig_data_section_index.?,
.ReleaseFast, .ReleaseSmall => elf_file.zig_bss_section_index.?,
};
}
// TODO I blatantly copied the logic from the Wasm linker, but is there a less
// intrusive check for all zeroes than this?
@ -873,8 +882,8 @@ fn updateDeclCode(
code: []const u8,
stt_bits: u8,
) !void {
const gpa = elf_file.base.allocator;
const mod = elf_file.base.options.module.?;
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
@ -971,8 +980,8 @@ fn updateTlv(
shndx: u16,
code: []const u8,
) !void {
const gpa = elf_file.base.allocator;
const mod = elf_file.base.options.module.?;
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
const decl_name = mod.intern_pool.stringToSlice(try decl.getFullyQualifiedName(mod));
@ -1026,6 +1035,7 @@ pub fn updateFunc(
const tracy = trace(@src());
defer tracy.end();
const gpa = elf_file.base.comp.gpa;
const func = mod.funcInfo(func_index);
const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index);
@ -1034,7 +1044,7 @@ pub fn updateFunc(
self.freeUnnamedConsts(elf_file, decl_index);
elf_file.symbol(sym_index).atom(elf_file).?.freeRelocs(elf_file);
var code_buffer = std.ArrayList(u8).init(elf_file.base.allocator);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
var decl_state: ?Dwarf.DeclState = if (self.dwarf) |*dw| try dw.initDeclState(mod, decl_index) else null;
@ -1117,7 +1127,8 @@ pub fn updateDecl(
const sym_index = try self.getOrCreateMetadataForDecl(elf_file, decl_index);
elf_file.symbol(sym_index).atom(elf_file).?.freeRelocs(elf_file);
var code_buffer = std.ArrayList(u8).init(elf_file.base.allocator);
const gpa = elf_file.base.comp.gpa;
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
var decl_state: ?Dwarf.DeclState = if (self.dwarf) |*dw| try dw.initDeclState(mod, decl_index) else null;
@ -1179,8 +1190,8 @@ fn updateLazySymbol(
sym: link.File.LazySymbol,
symbol_index: Symbol.Index,
) !void {
const gpa = elf_file.base.allocator;
const mod = elf_file.base.options.module.?;
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
var required_alignment: InternPool.Alignment = .none;
var code_buffer = std.ArrayList(u8).init(gpa);
@ -1261,8 +1272,8 @@ pub fn lowerUnnamedConst(
typed_value: TypedValue,
decl_index: InternPool.DeclIndex,
) !u32 {
const gpa = elf_file.base.allocator;
const mod = elf_file.base.options.module.?;
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
const gop = try self.unnamed_consts.getOrPut(gpa, decl_index);
if (!gop.found_existing) {
gop.value_ptr.* = .{};
@ -1308,7 +1319,7 @@ fn lowerConst(
output_section_index: u16,
src_loc: Module.SrcLoc,
) !LowerConstResult {
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
@ -1364,7 +1375,7 @@ pub fn updateExports(
const tracy = trace(@src());
defer tracy.end();
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
const metadata = switch (exported) {
.decl_index => |decl_index| blk: {
_ = try self.getOrCreateMetadataForDecl(elf_file, decl_index);
@ -1467,7 +1478,7 @@ pub fn deleteDeclExport(
name: InternPool.NullTerminatedString,
) void {
const metadata = self.decls.getPtr(decl_index) orelse return;
const mod = elf_file.base.options.module.?;
const mod = elf_file.base.comp.module.?;
const exp_name = mod.intern_pool.stringToSlice(name);
const esym_index = metadata.@"export"(self, exp_name) orelse return;
log.debug("deleting export '{s}'", .{exp_name});
@ -1485,7 +1496,7 @@ pub fn deleteDeclExport(
pub fn getGlobalSymbol(self: *ZigObject, elf_file: *Elf, name: []const u8, lib_name: ?[]const u8) !u32 {
_ = lib_name;
const gpa = elf_file.base.allocator;
const gpa = elf_file.base.comp.gpa;
const off = try self.strtab.insert(gpa, name);
const lookup_gop = try self.globals_lookup.getOrPut(gpa, off);
if (!lookup_gop.found_existing) {

View File

@ -144,6 +144,35 @@ tlv_table: TlvSymbolTable = .{},
hot_state: if (is_hot_update_compatible) HotUpdateState else struct {} = .{},
darwin_sdk_layout: ?SdkLayout,
/// Size of the __PAGEZERO segment.
pagezero_vmsize: u64,
/// Minimum space for future expansion of the load commands.
headerpad_size: u32,
/// Set enough space as if all paths were MATPATHLEN.
headerpad_max_install_names: bool,
/// Remove dylibs that are unreachable by the entry point or exported symbols.
dead_strip_dylibs: bool,
frameworks: []const Framework,
/// Install name for the dylib.
/// TODO: unify with soname
install_name: ?[]const u8,
/// Path to entitlements file.
entitlements: ?[]const u8,
/// When adding a new field, remember to update `hashAddFrameworks`.
pub const Framework = struct {
needed: bool = false,
weak: bool = false,
path: []const u8,
};
pub fn hashAddFrameworks(man: *Cache.Manifest, hm: []const Framework) !void {
for (hm) |value| {
man.hash.add(value.needed);
man.hash.add(value.weak);
_ = try man.addFile(value.path, null);
}
}
/// The filesystem layout of darwin SDK elements.
pub const SdkLayout = enum {
@ -156,12 +185,14 @@ pub const SdkLayout = enum {
pub fn open(arena: Allocator, options: link.File.OpenOptions) !*MachO {
if (build_options.only_c) unreachable;
const target = options.comp.root_mod.resolved_target.result;
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
const use_llvm = options.comp.config.use_llvm;
assert(target.ofmt == .macho);
const gpa = options.comp.gpa;
const emit = options.emit;
const mode: Mode = mode: {
if (options.use_llvm or options.module == null or options.cache_mode == .whole)
if (use_llvm or options.module == null or options.cache_mode == .whole)
break :mode .zld;
break :mode .incremental;
};
@ -192,7 +223,11 @@ pub fn open(arena: Allocator, options: link.File.OpenOptions) !*MachO {
const file = try emit.directory.handle.createFile(sub_path, .{
.truncate = false,
.read = true,
.mode = link.determineMode(options),
.mode = link.File.determineMode(
use_lld,
options.comp.config.output_mode,
options.comp.config.link_mode,
),
});
self.base.file = file;
@ -242,21 +277,37 @@ pub fn open(arena: Allocator, options: link.File.OpenOptions) !*MachO {
pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*MachO {
const self = try arena.create(MachO);
const optimize_mode = options.comp.root_mod.optimize_mode;
const use_llvm = options.comp.config.use_llvm;
self.* = .{
.base = .{
.tag = .macho,
.comp = options.comp,
.emit = options.emit,
.gc_sections = options.gc_sections orelse (optimize_mode != .Debug),
.stack_size = options.stack_size orelse 16777216,
.allow_shlib_undefined = options.allow_shlib_undefined orelse false,
.file = null,
.disable_lld_caching = options.disable_lld_caching,
.build_id = options.build_id,
.rpath_list = options.rpath_list,
.force_undefined_symbols = options.force_undefined_symbols,
.debug_format = options.debug_format orelse .{ .dwarf = .@"32" },
.function_sections = options.function_sections,
.data_sections = options.data_sections,
},
.mode = if (options.use_llvm or options.module == null or options.cache_mode == .whole)
.mode = if (use_llvm or options.module == null or options.cache_mode == .whole)
.zld
else
.incremental,
.pagezero_vmsize = options.pagezero_size orelse default_pagezero_vmsize,
.headerpad_size = options.headerpad_size orelse default_headerpad_size,
.headerpad_max_install_names = options.headerpad_max_install_names,
.dead_strip_dylibs = options.dead_strip_dylibs,
};
if (options.use_llvm and options.module != null) {
if (use_llvm and options.module != null) {
self.llvm_object = try LlvmObject.create(arena, options);
}
@ -267,8 +318,9 @@ pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*MachO {
pub fn flush(self: *MachO, comp: *Compilation, prog_node: *std.Progress.Node) link.File.FlushError!void {
const gpa = self.base.comp.gpa;
const output_mode = self.base.comp.config.output_mode;
if (self.base.options.output_mode == .Lib and self.base.options.link_mode == .Static) {
if (output_mode == .Lib and self.base.options.link_mode == .Static) {
if (build_options.have_llvm) {
return self.base.linkAsArchive(comp, prog_node);
} else {
@ -303,6 +355,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
sub_prog_node.activate();
defer sub_prog_node.end();
const output_mode = self.base.comp.config.output_mode;
const module = self.base.options.module orelse return error.LinkingWithoutZigSourceUnimplemented;
if (self.lazy_syms.getPtr(.none)) |metadata| {
@ -335,7 +388,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
}
var libs = std.StringArrayHashMap(link.SystemLib).init(arena);
try self.resolveLibSystem(arena, comp, &.{}, &libs);
try self.resolveLibSystem(arena, comp, &libs);
const id_symlink_basename = "link.id";
@ -446,7 +499,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
try self.createDyldPrivateAtom();
try self.writeStubHelperPreamble();
if (self.base.options.output_mode == .Exe and self.getEntryPoint() != null) {
if (output_mode == .Exe and self.getEntryPoint() != null) {
const global = self.getEntryPoint().?;
if (self.getSymbol(global).undf()) {
// We do one additional check here in case the entry point was found in one of the dylibs.
@ -517,8 +570,8 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
// The most important here is to have the correct vm and filesize of the __LINKEDIT segment
// where the code signature goes into.
var codesig = CodeSignature.init(getPageSize(self.base.options.target.cpu.arch));
codesig.code_directory.ident = self.base.options.emit.?.sub_path;
if (self.base.options.entitlements) |path| {
codesig.code_directory.ident = self.base.emit.sub_path;
if (self.entitlements) |path| {
try codesig.addEntitlements(gpa, path);
}
try self.writeCodeSignaturePadding(&codesig);
@ -536,7 +589,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
try lc_writer.writeStruct(self.dysymtab_cmd);
try load_commands.writeDylinkerLC(lc_writer);
switch (self.base.options.output_mode) {
switch (output_mode) {
.Exe => blk: {
const seg_id = self.header_segment_cmd_index.?;
const seg = self.segments.items[seg_id];
@ -552,7 +605,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
try lc_writer.writeStruct(macho.entry_point_command{
.entryoff = @as(u32, @intCast(addr - seg.vmaddr)),
.stacksize = self.base.options.stack_size_override orelse 0,
.stacksize = self.base.stack_size,
});
},
.Lib => if (self.base.options.link_mode == .Dynamic) {
@ -591,7 +644,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
if (codesig) |*csig| {
try self.writeCodeSignature(comp, csig); // code signing always comes last
const emit = self.base.options.emit.?;
const emit = self.base.emit;
try invalidateKernelCache(emit.directory.handle, emit.sub_path);
}
@ -642,34 +695,20 @@ pub fn resolveLibSystem(
self: *MachO,
arena: Allocator,
comp: *Compilation,
search_dirs: []const []const u8,
out_libs: anytype,
) !void {
const gpa = self.base.comp.gpa;
var tmp_arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer tmp_arena_allocator.deinit();
const tmp_arena = tmp_arena_allocator.allocator();
var test_path = std.ArrayList(u8).init(tmp_arena);
var checked_paths = std.ArrayList([]const u8).init(tmp_arena);
var test_path = std.ArrayList(u8).init(arena);
var checked_paths = std.ArrayList([]const u8).init(arena);
success: {
for (search_dirs) |dir| if (try accessLibPath(
tmp_arena,
&test_path,
&checked_paths,
dir,
"libSystem",
)) break :success;
if (self.base.options.darwin_sdk_layout) |sdk_layout| switch (sdk_layout) {
.sdk => {
const dir = try fs.path.join(tmp_arena, &[_][]const u8{ self.base.options.sysroot.?, "usr", "lib" });
if (try accessLibPath(tmp_arena, &test_path, &checked_paths, dir, "libSystem")) break :success;
const dir = try fs.path.join(arena, &[_][]const u8{ self.base.options.sysroot.?, "usr", "lib" });
if (try accessLibPath(arena, &test_path, &checked_paths, dir, "libSystem")) break :success;
},
.vendored => {
const dir = try comp.zig_lib_directory.join(tmp_arena, &[_][]const u8{ "libc", "darwin" });
if (try accessLibPath(tmp_arena, &test_path, &checked_paths, dir, "libSystem")) break :success;
const dir = try comp.zig_lib_directory.join(arena, &[_][]const u8{ "libc", "darwin" });
if (try accessLibPath(arena, &test_path, &checked_paths, dir, "libSystem")) break :success;
},
};
@ -1082,7 +1121,7 @@ fn addDylib(self: *MachO, dylib: Dylib, dylib_options: DylibOpts, ctx: *ParseErr
try self.dylibs.append(gpa, dylib);
const should_link_dylib_even_if_unreachable = blk: {
if (self.base.options.dead_strip_dylibs and !dylib_options.needed) break :blk false;
if (self.dead_strip_dylibs and !dylib_options.needed) break :blk false;
break :blk !(dylib_options.dependent or self.referenced_dylibs.contains(gop.value_ptr.*));
};
@ -1597,7 +1636,8 @@ fn createThreadLocalDescriptorAtom(self: *MachO, sym_name: []const u8, target: S
}
pub fn createMhExecuteHeaderSymbol(self: *MachO) !void {
if (self.base.options.output_mode != .Exe) return;
const output_mode = self.base.comp.config.output_mode;
if (output_mode != .Exe) return;
const gpa = self.base.comp.gpa;
const sym_index = try self.allocateSymbol();
@ -1647,10 +1687,11 @@ pub fn createDsoHandleSymbol(self: *MachO) !void {
}
pub fn resolveSymbols(self: *MachO) !void {
const output_mode = self.base.comp.config.output_mode;
// We add the specified entrypoint as the first unresolved symbols so that
// we search for it in libraries should there be no object files specified
// on the linker line.
if (self.base.options.output_mode == .Exe) {
if (output_mode == .Exe) {
const entry_name = self.base.options.entry orelse load_commands.default_entry_point;
_ = try self.addUndefined(entry_name, .{});
}
@ -1867,9 +1908,10 @@ fn resolveSymbolsInDylibs(self: *MachO) !void {
}
fn resolveSymbolsAtLoading(self: *MachO) !void {
const is_lib = self.base.options.output_mode == .Lib;
const output_mode = self.base.comp.config.output_mode;
const is_lib = output_mode == .Lib;
const is_dyn_lib = self.base.options.link_mode == .Dynamic and is_lib;
const allow_undef = is_dyn_lib and (self.base.options.allow_shlib_undefined orelse false);
const allow_undef = is_dyn_lib and self.base.allow_shlib_undefined;
var next_sym: usize = 0;
while (next_sym < self.unresolved.count()) {
@ -2674,12 +2716,12 @@ fn getDeclOutputSection(self: *MachO, decl_index: InternPool.DeclIndex) u8 {
const val = decl.val;
const mod = self.base.options.module.?;
const zig_ty = ty.zigTypeTag(mod);
const mode = self.base.options.optimize_mode;
const single_threaded = self.base.options.single_threaded;
const any_non_single_threaded = self.base.comp.config.any_non_single_threaded;
const optimize_mode = self.base.comp.root_mod.optimize_mode;
const sect_id: u8 = blk: {
// TODO finish and audit this function
if (val.isUndefDeep(mod)) {
if (mode == .ReleaseFast or mode == .ReleaseSmall) {
if (optimize_mode == .ReleaseFast or optimize_mode == .ReleaseSmall) {
@panic("TODO __DATA,__bss");
} else {
break :blk self.data_section_index.?;
@ -2687,7 +2729,7 @@ fn getDeclOutputSection(self: *MachO, decl_index: InternPool.DeclIndex) u8 {
}
if (val.getVariable(mod)) |variable| {
if (variable.is_threadlocal and !single_threaded) {
if (variable.is_threadlocal and any_non_single_threaded) {
break :blk self.thread_data_section_index.?;
}
break :blk self.data_section_index.?;
@ -2796,8 +2838,6 @@ pub fn updateExports(
if (self.llvm_object) |llvm_object|
return llvm_object.updateExports(mod, exported, exports);
if (self.base.options.emit == null) return;
const tracy = trace(@src());
defer tracy.end();
@ -3093,7 +3133,7 @@ fn populateMissingMetadata(self: *MachO) !void {
if (self.header_segment_cmd_index == null) {
// The first __TEXT segment is immovable and covers MachO header and load commands.
self.header_segment_cmd_index = @as(u8, @intCast(self.segments.items.len));
const ideal_size = @max(self.base.options.headerpad_size orelse 0, default_headerpad_size);
const ideal_size = self.headerpad_size;
const needed_size = mem.alignForward(u64, padToIdeal(ideal_size), getPageSize(cpu_arch));
log.debug("found __TEXT segment (header-only) free space 0x{x} to 0x{x}", .{ 0, needed_size });
@ -3222,13 +3262,13 @@ fn populateMissingMetadata(self: *MachO) !void {
}
fn calcPagezeroSize(self: *MachO) u64 {
const pagezero_vmsize = self.base.options.pagezero_size orelse default_pagezero_vmsize;
const output_mode = self.base.comp.config.output_mode;
const page_size = getPageSize(self.base.options.target.cpu.arch);
const aligned_pagezero_vmsize = mem.alignBackward(u64, pagezero_vmsize, page_size);
if (self.base.options.output_mode == .Lib) return 0;
const aligned_pagezero_vmsize = mem.alignBackward(u64, self.pagezero_vmsize, page_size);
if (output_mode == .Lib) return 0;
if (aligned_pagezero_vmsize == 0) return 0;
if (aligned_pagezero_vmsize != pagezero_vmsize) {
log.warn("requested __PAGEZERO size (0x{x}) is not page aligned", .{pagezero_vmsize});
if (aligned_pagezero_vmsize != self.pagezero_vmsize) {
log.warn("requested __PAGEZERO size (0x{x}) is not page aligned", .{self.pagezero_vmsize});
log.warn(" rounding down to 0x{x}", .{aligned_pagezero_vmsize});
}
return aligned_pagezero_vmsize;
@ -4685,6 +4725,7 @@ pub fn writeCodeSignaturePadding(self: *MachO, code_sig: *CodeSignature) !void {
}
pub fn writeCodeSignature(self: *MachO, comp: *const Compilation, code_sig: *CodeSignature) !void {
const output_mode = self.base.comp.config.output_mode;
const seg_id = self.header_segment_cmd_index.?;
const seg = self.segments.items[seg_id];
const offset = self.codesig_cmd.dataoff;
@ -4698,7 +4739,7 @@ pub fn writeCodeSignature(self: *MachO, comp: *const Compilation, code_sig: *Cod
.exec_seg_base = seg.fileoff,
.exec_seg_limit = seg.filesize,
.file_size = offset,
.output_mode = self.base.options.output_mode,
.output_mode = output_mode,
}, buffer.writer());
assert(buffer.items.len == code_sig.size());
@ -4712,6 +4753,8 @@ pub fn writeCodeSignature(self: *MachO, comp: *const Compilation, code_sig: *Cod
/// Writes Mach-O file header.
pub fn writeHeader(self: *MachO, ncmds: u32, sizeofcmds: u32) !void {
const output_mode = self.base.comp.config.output_mode;
var header: macho.mach_header_64 = .{};
header.flags = macho.MH_NOUNDEFS | macho.MH_DYLDLINK | macho.MH_PIE | macho.MH_TWOLEVEL;
@ -4727,7 +4770,7 @@ pub fn writeHeader(self: *MachO, ncmds: u32, sizeofcmds: u32) !void {
else => unreachable,
}
switch (self.base.options.output_mode) {
switch (output_mode) {
.Exe => {
header.filetype = macho.MH_EXECUTE;
},

View File

@ -6,20 +6,21 @@ pub fn linkWithZld(
const tracy = trace(@src());
defer tracy.end();
const gpa = macho_file.base.allocator;
const options = &macho_file.base.options;
const target = options.target;
const gpa = macho_file.base.comp.gpa;
const target = macho_file.base.comp.root_mod.resolved_target.result;
const emit = macho_file.base.emit;
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const directory = options.emit.?.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{options.emit.?.sub_path});
const directory = emit.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{emit.?.sub_path});
const opt_zcu = macho_file.base.comp.module;
// If there is no Zig code to compile, then we should skip flushing the output file because it
// will not be part of the linker line anyway.
const module_obj_path: ?[]const u8 = if (options.module != null) blk: {
const module_obj_path: ?[]const u8 = if (opt_zcu != null) blk: {
try macho_file.flushModule(comp, prog_node);
if (fs.path.dirname(full_out_path)) |dirname| {
@ -34,22 +35,24 @@ pub fn linkWithZld(
sub_prog_node.context.refresh();
defer sub_prog_node.end();
const output_mode = macho_file.base.comp.config.output_mode;
const link_mode = macho_file.base.comp.config.link_mode;
const cpu_arch = target.cpu.arch;
const is_lib = options.output_mode == .Lib;
const is_dyn_lib = options.link_mode == .Dynamic and is_lib;
const is_exe_or_dyn_lib = is_dyn_lib or options.output_mode == .Exe;
const stack_size = options.stack_size_override orelse 0;
const is_debug_build = options.optimize_mode == .Debug;
const gc_sections = options.gc_sections orelse !is_debug_build;
const is_lib = output_mode == .Lib;
const is_dyn_lib = link_mode == .Dynamic and is_lib;
const is_exe_or_dyn_lib = is_dyn_lib or output_mode == .Exe;
const stack_size = macho_file.base.stack_size;
const id_symlink_basename = "zld.id";
var man: Cache.Manifest = undefined;
defer if (!options.disable_lld_caching) man.deinit();
defer if (!macho_file.base.disable_lld_caching) man.deinit();
var digest: [Cache.hex_digest_len]u8 = undefined;
if (!options.disable_lld_caching) {
const objects = macho_file.base.comp.objects;
if (!macho_file.base.disable_lld_caching) {
man = comp.cache_parent.obtain();
// We are about to obtain this lock, so here we give other processes a chance first.
@ -57,7 +60,7 @@ pub fn linkWithZld(
comptime assert(Compilation.link_hash_implementation_version == 10);
for (options.objects) |obj| {
for (objects) |obj| {
_ = try man.addFile(obj.path, null);
man.hash.add(obj.must_link);
}
@ -68,24 +71,22 @@ pub fn linkWithZld(
// We can skip hashing libc and libc++ components that we are in charge of building from Zig
// installation sources because they are always a product of the compiler version + target information.
man.hash.add(stack_size);
man.hash.addOptional(options.pagezero_size);
man.hash.addOptional(options.headerpad_size);
man.hash.add(options.headerpad_max_install_names);
man.hash.add(gc_sections);
man.hash.add(options.dead_strip_dylibs);
man.hash.add(options.strip);
man.hash.addListOfBytes(options.lib_dirs);
man.hash.addListOfBytes(options.framework_dirs);
try link.hashAddFrameworks(&man, options.frameworks);
man.hash.addListOfBytes(options.rpath_list);
man.hash.addOptional(macho_file.pagezero_vmsize);
man.hash.addOptional(macho_file.headerpad_size);
man.hash.add(macho_file.headerpad_max_install_names);
man.hash.add(macho_file.base.gc_sections);
man.hash.add(macho_file.dead_strip_dylibs);
man.hash.add(macho_file.base.comp.root_mod.strip);
try MachO.hashAddFrameworks(&man, macho_file.frameworks);
man.hash.addListOfBytes(macho_file.rpath_list);
if (is_dyn_lib) {
man.hash.addOptionalBytes(options.install_name);
man.hash.addOptional(options.version);
man.hash.addOptionalBytes(macho_file.install_name);
man.hash.addOptional(comp.version);
}
try link.hashAddSystemLibs(&man, options.system_libs);
man.hash.addOptionalBytes(options.sysroot);
man.hash.addListOfBytes(options.force_undefined_symbols.keys());
try man.addOptionalFile(options.entitlements);
try link.hashAddSystemLibs(&man, comp.system_libs);
man.hash.addOptionalBytes(comp.sysroot);
man.hash.addListOfBytes(macho_file.base.force_undefined_symbols.keys());
try man.addOptionalFile(macho_file.entitlements);
// We don't actually care whether it's a cache hit or miss; we just
// need the digest and the lock.
@ -125,13 +126,13 @@ pub fn linkWithZld(
};
}
if (options.output_mode == .Obj) {
if (output_mode == .Obj) {
// LLD's MachO driver does not support the equivalent of `-r` so we do a simple file copy
// here. TODO: think carefully about how we can avoid this redundant operation when doing
// build-obj. See also the corresponding TODO in linkAsArchive.
const the_object_path = blk: {
if (options.objects.len != 0) {
break :blk options.objects[0].path;
if (objects.len != 0) {
break :blk objects[0].path;
}
if (comp.c_object_table.count() != 0)
@ -150,7 +151,7 @@ pub fn linkWithZld(
try fs.cwd().copyFile(the_object_path, fs.cwd(), full_out_path, .{});
}
} else {
const sub_path = options.emit.?.sub_path;
const sub_path = emit.?.sub_path;
const old_file = macho_file.base.file; // TODO is this needed at all?
defer macho_file.base.file = old_file;
@ -158,7 +159,7 @@ pub fn linkWithZld(
const file = try directory.handle.createFile(sub_path, .{
.truncate = true,
.read = true,
.mode = link.determineMode(options.*),
.mode = link.File.determineMode(false, output_mode, link_mode),
});
defer file.close();
macho_file.base.file = file;
@ -175,8 +176,8 @@ pub fn linkWithZld(
// Positional arguments to the linker such as object files and static archives.
var positionals = std.ArrayList(Compilation.LinkObject).init(arena);
try positionals.ensureUnusedCapacity(options.objects.len);
positionals.appendSliceAssumeCapacity(options.objects);
try positionals.ensureUnusedCapacity(objects.len);
positionals.appendSliceAssumeCapacity(objects);
for (comp.c_object_table.keys()) |key| {
try positionals.append(.{ .path = key.status.success.object_path });
@ -190,7 +191,7 @@ pub fn linkWithZld(
if (comp.compiler_rt_obj) |obj| try positionals.append(.{ .path = obj.full_object_path });
// libc++ dep
if (options.link_libcpp) {
if (comp.config.link_libcpp) {
try positionals.ensureUnusedCapacity(2);
positionals.appendAssumeCapacity(.{ .path = comp.libcxxabi_static_lib.?.full_object_path });
positionals.appendAssumeCapacity(.{ .path = comp.libcxx_static_lib.?.full_object_path });
@ -199,23 +200,23 @@ pub fn linkWithZld(
var libs = std.StringArrayHashMap(link.SystemLib).init(arena);
{
const vals = options.system_libs.values();
const vals = comp.system_libs.values();
try libs.ensureUnusedCapacity(vals.len);
for (vals) |v| libs.putAssumeCapacity(v.path.?, v);
}
{
try libs.ensureUnusedCapacity(options.frameworks.len);
for (options.frameworks) |v| libs.putAssumeCapacity(v.path, .{
try libs.ensureUnusedCapacity(macho_file.frameworks.len);
for (macho_file.frameworks) |v| libs.putAssumeCapacity(v.path, .{
.needed = v.needed,
.weak = v.weak,
.path = v.path,
});
}
try macho_file.resolveLibSystem(arena, comp, options.lib_dirs, &libs);
try macho_file.resolveLibSystem(arena, comp, &libs);
if (options.verbose_link) {
if (comp.verbose_link) {
var argv = std.ArrayList([]const u8).init(arena);
try argv.append("zig");
@ -228,14 +229,14 @@ pub fn linkWithZld(
if (is_dyn_lib) {
try argv.append("-dylib");
if (options.install_name) |install_name| {
if (macho_file.install_name) |install_name| {
try argv.append("-install_name");
try argv.append(install_name);
}
}
{
const platform = Platform.fromTarget(options.target);
const platform = Platform.fromTarget(target);
try argv.append("-platform_version");
try argv.append(@tagName(platform.os_tag));
try argv.append(try std.fmt.allocPrint(arena, "{}", .{platform.version}));
@ -248,44 +249,39 @@ pub fn linkWithZld(
}
}
if (options.sysroot) |syslibroot| {
if (macho_file.sysroot) |syslibroot| {
try argv.append("-syslibroot");
try argv.append(syslibroot);
}
for (options.rpath_list) |rpath| {
for (macho_file.rpath_list) |rpath| {
try argv.append("-rpath");
try argv.append(rpath);
}
if (options.pagezero_size) |pagezero_size| {
try argv.append("-pagezero_size");
try argv.append(try std.fmt.allocPrint(arena, "0x{x}", .{pagezero_size}));
}
try argv.appendSlice(&.{
"-pagezero_size", try std.fmt.allocPrint(arena, "0x{x}", .{macho_file.pagezero_size}),
"-headerpad_size", try std.fmt.allocPrint(arena, "0x{x}", .{macho_file.headerpad_size}),
});
if (options.headerpad_size) |headerpad_size| {
try argv.append("-headerpad_size");
try argv.append(try std.fmt.allocPrint(arena, "0x{x}", .{headerpad_size}));
}
if (options.headerpad_max_install_names) {
if (macho_file.headerpad_max_install_names) {
try argv.append("-headerpad_max_install_names");
}
if (gc_sections) {
if (macho_file.base.gc_sections) {
try argv.append("-dead_strip");
}
if (options.dead_strip_dylibs) {
if (macho_file.dead_strip_dylibs) {
try argv.append("-dead_strip_dylibs");
}
if (options.entry) |entry| {
if (comp.config.entry) |entry| {
try argv.append("-e");
try argv.append(entry);
}
for (options.objects) |obj| {
for (objects) |obj| {
if (obj.must_link) {
try argv.append("-force_load");
}
@ -303,7 +299,7 @@ pub fn linkWithZld(
if (comp.compiler_rt_lib) |lib| try argv.append(lib.full_object_path);
if (comp.compiler_rt_obj) |obj| try argv.append(obj.full_object_path);
if (options.link_libcpp) {
if (comp.config.link_libcpp) {
try argv.append(comp.libcxxabi_static_lib.?.full_object_path);
try argv.append(comp.libcxx_static_lib.?.full_object_path);
}
@ -313,8 +309,8 @@ pub fn linkWithZld(
try argv.append("-lSystem");
for (options.system_libs.keys()) |l_name| {
const info = options.system_libs.get(l_name).?;
for (comp.system_libs.keys()) |l_name| {
const info = comp.system_libs.get(l_name).?;
const arg = if (info.needed)
try std.fmt.allocPrint(arena, "-needed-l{s}", .{l_name})
else if (info.weak)
@ -324,11 +320,7 @@ pub fn linkWithZld(
try argv.append(arg);
}
for (options.lib_dirs) |lib_dir| {
try argv.append(try std.fmt.allocPrint(arena, "-L{s}", .{lib_dir}));
}
for (options.frameworks) |framework| {
for (macho_file.frameworks) |framework| {
const name = std.fs.path.stem(framework.path);
const arg = if (framework.needed)
try std.fmt.allocPrint(arena, "-needed_framework {s}", .{name})
@ -339,11 +331,7 @@ pub fn linkWithZld(
try argv.append(arg);
}
for (options.framework_dirs) |framework_dir| {
try argv.append(try std.fmt.allocPrint(arena, "-F{s}", .{framework_dir}));
}
if (is_dyn_lib and (options.allow_shlib_undefined orelse false)) {
if (is_dyn_lib and macho_file.base.allow_shlib_undefined) {
try argv.append("-undefined");
try argv.append("dynamic_lookup");
}
@ -412,7 +400,7 @@ pub fn linkWithZld(
};
}
if (gc_sections) {
if (macho_file.base.gc_sections) {
try dead_strip.gcAtoms(macho_file);
}
@ -519,7 +507,7 @@ pub fn linkWithZld(
// where the code signature goes into.
var codesig = CodeSignature.init(MachO.getPageSize(cpu_arch));
codesig.code_directory.ident = fs.path.basename(full_out_path);
if (options.entitlements) |path| {
if (macho_file.entitlements) |path| {
try codesig.addEntitlements(gpa, path);
}
try macho_file.writeCodeSignaturePadding(&codesig);
@ -539,7 +527,7 @@ pub fn linkWithZld(
try lc_writer.writeStruct(macho_file.dysymtab_cmd);
try load_commands.writeDylinkerLC(lc_writer);
switch (macho_file.base.options.output_mode) {
switch (output_mode) {
.Exe => blk: {
const seg_id = macho_file.header_segment_cmd_index.?;
const seg = macho_file.segments.items[seg_id];
@ -555,10 +543,10 @@ pub fn linkWithZld(
try lc_writer.writeStruct(macho.entry_point_command{
.entryoff = @as(u32, @intCast(addr - seg.vmaddr)),
.stacksize = macho_file.base.options.stack_size_override orelse 0,
.stacksize = macho_file.base.stack_size,
});
},
.Lib => if (macho_file.base.options.link_mode == .Dynamic) {
.Lib => if (link_mode == .Dynamic) {
try load_commands.writeDylibIdLC(gpa, &macho_file.base.options, lc_writer);
},
else => {},
@ -598,11 +586,11 @@ pub fn linkWithZld(
if (codesig) |*csig| {
try macho_file.writeCodeSignature(comp, csig); // code signing always comes last
try MachO.invalidateKernelCache(directory.handle, macho_file.base.options.emit.?.sub_path);
try MachO.invalidateKernelCache(directory.handle, macho_file.base.emit.sub_path);
}
}
if (!options.disable_lld_caching) {
if (!macho_file.base.disable_lld_caching) {
// Update the file with the digest. If it fails we can continue; it only
// means that the next invocation will have an unnecessary cache miss.
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
@ -622,12 +610,11 @@ pub fn linkWithZld(
fn createSegments(macho_file: *MachO) !void {
const gpa = macho_file.base.allocator;
const pagezero_vmsize = macho_file.base.options.pagezero_size orelse MachO.default_pagezero_vmsize;
const page_size = MachO.getPageSize(macho_file.base.options.target.cpu.arch);
const aligned_pagezero_vmsize = mem.alignBackward(u64, pagezero_vmsize, page_size);
const aligned_pagezero_vmsize = mem.alignBackward(u64, macho_file.pagezero_vmsize, page_size);
if (macho_file.base.options.output_mode != .Lib and aligned_pagezero_vmsize > 0) {
if (aligned_pagezero_vmsize != pagezero_vmsize) {
log.warn("requested __PAGEZERO size (0x{x}) is not page aligned", .{pagezero_vmsize});
if (aligned_pagezero_vmsize != macho_file.pagezero_vmsize) {
log.warn("requested __PAGEZERO size (0x{x}) is not page aligned", .{macho_file.pagezero_vmsize});
log.warn(" rounding down to 0x{x}", .{aligned_pagezero_vmsize});
}
macho_file.pagezero_segment_cmd_index = @intCast(macho_file.segments.items.len);

View File

@ -24,46 +24,56 @@ const LlvmObject = @import("../codegen/llvm.zig").Object;
base: link.File,
llvm_object: *LlvmObject,
ptx_file_name: []const u8,
pub fn createEmpty(gpa: Allocator, options: link.Options) !*NvPtx {
if (!options.use_llvm) return error.PtxArchNotSupported;
pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*NvPtx {
if (build_options.only_c) unreachable;
if (!options.target.cpu.arch.isNvptx()) return error.PtxArchNotSupported;
const target = options.comp.root_mod.resolved_target.result;
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
const use_llvm = options.comp.config.use_llvm;
switch (options.target.os.tag) {
assert(use_llvm); // Caught by Compilation.Config.resolve.
assert(!use_lld); // Caught by Compilation.Config.resolve.
assert(target.cpu.arch.isNvptx()); // Caught by Compilation.Config.resolve.
switch (target.os.tag) {
// TODO: does it also work with nvcl ?
.cuda => {},
else => return error.PtxArchNotSupported,
}
const llvm_object = try LlvmObject.create(gpa, options);
const nvptx = try gpa.create(NvPtx);
const llvm_object = try LlvmObject.create(arena, options);
const nvptx = try arena.create(NvPtx);
nvptx.* = .{
.base = .{
.tag = .nvptx,
.options = options,
.comp = options.comp,
.emit = options.emit,
.gc_sections = options.gc_sections orelse false,
.stack_size = options.stack_size orelse 0,
.allow_shlib_undefined = options.allow_shlib_undefined orelse false,
.file = null,
.allocator = gpa,
.disable_lld_caching = options.disable_lld_caching,
.build_id = options.build_id,
.rpath_list = options.rpath_list,
.force_undefined_symbols = options.force_undefined_symbols,
.function_sections = options.function_sections,
.data_sections = options.data_sections,
},
.llvm_object = llvm_object,
.ptx_file_name = try std.mem.join(gpa, "", &[_][]const u8{ options.root_name, ".ptx" }),
};
return nvptx;
}
pub fn openPath(allocator: Allocator, sub_path: []const u8, options: link.Options) !*NvPtx {
if (!options.use_llvm) return error.PtxArchNotSupported;
assert(options.target.ofmt == .nvptx);
log.debug("Opening .ptx target file {s}", .{sub_path});
return createEmpty(allocator, options);
pub fn open(arena: Allocator, options: link.FileOpenOptions) !*NvPtx {
const target = options.comp.root_mod.resolved_target.result;
assert(target.ofmt == .nvptx);
return createEmpty(arena, options);
}
pub fn deinit(self: *NvPtx) void {
self.llvm_object.destroy(self.base.allocator);
self.base.allocator.free(self.ptx_file_name);
self.llvm_object.deinit();
}
pub fn updateFunc(self: *NvPtx, module: *Module, func_index: InternPool.Index, air: Air, liveness: Liveness) !void {
@ -110,7 +120,7 @@ pub fn flushModule(self: *NvPtx, comp: *Compilation, prog_node: *std.Progress.No
comp.emit_asm = .{
// 'null' means using the default cache dir: zig-cache/o/...
.directory = null,
.basename = self.ptx_file_name,
.basename = self.base.emit.sub_path,
};
defer {
comp.bin_file.options.emit = outfile;

View File

@ -318,12 +318,12 @@ pub fn createEmpty(gpa: Allocator, options: link.Options) !*Plan9 {
.magic = try aout.magicFromArch(self.base.options.target.cpu.arch),
};
// a / will always be in a file path
try self.file_segments.put(self.base.allocator, "/", 1);
try self.file_segments.put(gpa, "/", 1);
return self;
}
fn putFn(self: *Plan9, decl_index: InternPool.DeclIndex, out: FnDeclOutput) !void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const mod = self.base.options.module.?;
const decl = mod.declPtr(decl_index);
const fn_map_res = try self.fn_decl_table.getOrPut(gpa, decl.getFileScope(mod));
@ -379,6 +379,7 @@ fn putFn(self: *Plan9, decl_index: InternPool.DeclIndex, out: FnDeclOutput) !voi
}
fn addPathComponents(self: *Plan9, path: []const u8, a: *std.ArrayList(u8)) !void {
const gpa = self.base.comp.gpa;
const sep = std.fs.path.sep;
var it = std.mem.tokenizeScalar(u8, path, sep);
while (it.next()) |component| {
@ -386,7 +387,7 @@ fn addPathComponents(self: *Plan9, path: []const u8, a: *std.ArrayList(u8)) !voi
try a.writer().writeInt(u16, num, .big);
} else {
self.file_segments_i += 1;
try self.file_segments.put(self.base.allocator, component, self.file_segments_i);
try self.file_segments.put(gpa, component, self.file_segments_i);
try a.writer().writeInt(u16, self.file_segments_i, .big);
}
}
@ -397,6 +398,7 @@ pub fn updateFunc(self: *Plan9, mod: *Module, func_index: InternPool.Index, air:
@panic("Attempted to compile for object format that was disabled by build configuration");
}
const gpa = self.base.comp.gpa;
const func = mod.funcInfo(func_index);
const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index);
@ -404,10 +406,10 @@ pub fn updateFunc(self: *Plan9, mod: *Module, func_index: InternPool.Index, air:
const atom_idx = try self.seeDecl(decl_index);
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
var dbg_info_output: DebugInfoOutput = .{
.dbg_line = std.ArrayList(u8).init(self.base.allocator),
.dbg_line = std.ArrayList(u8).init(gpa),
.start_line = null,
.end_line = undefined,
.pcop_change_index = null,
@ -448,14 +450,15 @@ pub fn updateFunc(self: *Plan9, mod: *Module, func_index: InternPool.Index, air:
}
pub fn lowerUnnamedConst(self: *Plan9, tv: TypedValue, decl_index: InternPool.DeclIndex) !u32 {
const gpa = self.base.comp.gpa;
_ = try self.seeDecl(decl_index);
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const mod = self.base.options.module.?;
const decl = mod.declPtr(decl_index);
const gop = try self.unnamed_const_atoms.getOrPut(self.base.allocator, decl_index);
const gop = try self.unnamed_const_atoms.getOrPut(gpa, decl_index);
if (!gop.found_existing) {
gop.value_ptr.* = .{};
}
@ -465,7 +468,7 @@ pub fn lowerUnnamedConst(self: *Plan9, tv: TypedValue, decl_index: InternPool.De
const index = unnamed_consts.items.len;
// name is freed when the unnamed const is freed
const name = try std.fmt.allocPrint(self.base.allocator, "__unnamed_{s}_{d}", .{ decl_name, index });
const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
const sym_index = try self.allocateSymbolIndex();
const new_atom_idx = try self.createAtom();
@ -498,17 +501,18 @@ pub fn lowerUnnamedConst(self: *Plan9, tv: TypedValue, decl_index: InternPool.De
},
};
// duped_code is freed when the unnamed const is freed
const duped_code = try self.base.allocator.dupe(u8, code);
errdefer self.base.allocator.free(duped_code);
const duped_code = try gpa.dupe(u8, code);
errdefer gpa.free(duped_code);
const new_atom = self.getAtomPtr(new_atom_idx);
new_atom.* = info;
new_atom.code = .{ .code_ptr = duped_code.ptr, .other = .{ .code_len = duped_code.len } };
try unnamed_consts.append(self.base.allocator, new_atom_idx);
try unnamed_consts.append(gpa, new_atom_idx);
// we return the new_atom_idx to codegen
return new_atom_idx;
}
pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: InternPool.DeclIndex) !void {
const gpa = self.base.comp.gpa;
const decl = mod.declPtr(decl_index);
if (decl.isExtern(mod)) {
@ -517,7 +521,7 @@ pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: InternPool.DeclIndex)
}
const atom_idx = try self.seeDecl(decl_index);
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
// TODO we need the symbol index for symbol in the table of locals for the containing atom
@ -535,16 +539,17 @@ pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: InternPool.DeclIndex)
return;
},
};
try self.data_decl_table.ensureUnusedCapacity(self.base.allocator, 1);
const duped_code = try self.base.allocator.dupe(u8, code);
try self.data_decl_table.ensureUnusedCapacity(gpa, 1);
const duped_code = try gpa.dupe(u8, code);
self.getAtomPtr(self.decls.get(decl_index).?.index).code = .{ .code_ptr = null, .other = .{ .decl_index = decl_index } };
if (self.data_decl_table.fetchPutAssumeCapacity(decl_index, duped_code)) |old_entry| {
self.base.allocator.free(old_entry.value);
gpa.free(old_entry.value);
}
return self.updateFinish(decl_index);
}
/// called at the end of update{Decl,Func}
fn updateFinish(self: *Plan9, decl_index: InternPool.DeclIndex) !void {
const gpa = self.base.comp.gpa;
const mod = self.base.options.module.?;
const decl = mod.declPtr(decl_index);
const is_fn = (decl.ty.zigTypeTag(mod) == .Fn);
@ -558,7 +563,7 @@ fn updateFinish(self: *Plan9, decl_index: InternPool.DeclIndex) !void {
const sym: aout.Sym = .{
.value = undefined, // the value of stuff gets filled in in flushModule
.type = atom.type,
.name = try self.base.allocator.dupe(u8, mod.intern_pool.stringToSlice(decl.name)),
.name = try gpa.dupe(u8, mod.intern_pool.stringToSlice(decl.name)),
};
if (atom.sym_index) |s| {
@ -571,10 +576,11 @@ fn updateFinish(self: *Plan9, decl_index: InternPool.DeclIndex) !void {
}
fn allocateSymbolIndex(self: *Plan9) !usize {
const gpa = self.base.comp.gpa;
if (self.syms_index_free_list.popOrNull()) |i| {
return i;
} else {
_ = try self.syms.addOne(self.base.allocator);
_ = try self.syms.addOne(gpa);
return self.syms.items.len - 1;
}
}
@ -589,7 +595,8 @@ fn allocateGotIndex(self: *Plan9) usize {
}
pub fn flush(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.Node) link.File.FlushError!void {
assert(!self.base.options.use_lld);
const use_lld = build_options.have_llvm and self.base.comp.config.use_lld;
assert(!use_lld);
switch (self.base.options.effectiveOutputMode()) {
.Exe => {},
@ -650,7 +657,8 @@ pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.No
@panic("Attempted to compile for object format that was disabled by build configuration");
}
_ = comp;
const gpa = comp.gpa;
const tracy = trace(@src());
defer tracy.end();
@ -691,12 +699,12 @@ pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.No
const atom_count = self.atomCount();
assert(self.got_len == atom_count + self.got_index_free_list.items.len);
const got_size = self.got_len * if (!self.sixtyfour_bit) @as(u32, 4) else 8;
var got_table = try self.base.allocator.alloc(u8, got_size);
defer self.base.allocator.free(got_table);
var got_table = try gpa.alloc(u8, got_size);
defer gpa.free(got_table);
// + 4 for header, got, symbols, linecountinfo
var iovecs = try self.base.allocator.alloc(std.os.iovec_const, self.atomCount() + 4 - self.externCount());
defer self.base.allocator.free(iovecs);
var iovecs = try gpa.alloc(std.os.iovec_const, self.atomCount() + 4 - self.externCount());
defer gpa.free(iovecs);
const file = self.base.file.?;
@ -709,7 +717,7 @@ pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.No
var iovecs_i: usize = 1;
var text_i: u64 = 0;
var linecountinfo = std.ArrayList(u8).init(self.base.allocator);
var linecountinfo = std.ArrayList(u8).init(gpa);
defer linecountinfo.deinit();
// text
{
@ -901,10 +909,10 @@ pub fn flushModule(self: *Plan9, comp: *Compilation, prog_node: *std.Progress.No
}
}
}
var sym_buf = std.ArrayList(u8).init(self.base.allocator);
var sym_buf = std.ArrayList(u8).init(gpa);
try self.writeSyms(&sym_buf);
const syms = try sym_buf.toOwnedSlice();
defer self.base.allocator.free(syms);
defer gpa.free(syms);
assert(2 + self.atomCount() - self.externCount() == iovecs_i); // we didn't write all the decls
iovecs[iovecs_i] = .{ .iov_base = syms.ptr, .iov_len = syms.len };
iovecs_i += 1;
@ -985,6 +993,7 @@ fn addDeclExports(
decl_index: InternPool.DeclIndex,
exports: []const *Module.Export,
) !void {
const gpa = self.base.comp.gpa;
const metadata = self.decls.getPtr(decl_index).?;
const atom = self.getAtom(metadata.index);
@ -994,7 +1003,7 @@ fn addDeclExports(
if (exp.opts.section.unwrap()) |section_name| {
if (!mod.intern_pool.stringEqlSlice(section_name, ".text") and !mod.intern_pool.stringEqlSlice(section_name, ".data")) {
try mod.failed_exports.put(mod.gpa, exp, try Module.ErrorMsg.create(
self.base.allocator,
gpa,
mod.declPtr(decl_index).srcLoc(mod),
"plan9 does not support extra sections",
.{},
@ -1005,19 +1014,20 @@ fn addDeclExports(
const sym = .{
.value = atom.offset.?,
.type = atom.type.toGlobal(),
.name = try self.base.allocator.dupe(u8, exp_name),
.name = try gpa.dupe(u8, exp_name),
};
if (metadata.getExport(self, exp_name)) |i| {
self.syms.items[i] = sym;
} else {
try self.syms.append(self.base.allocator, sym);
try metadata.exports.append(self.base.allocator, self.syms.items.len - 1);
try self.syms.append(gpa, sym);
try metadata.exports.append(gpa, self.syms.items.len - 1);
}
}
}
pub fn freeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) void {
const gpa = self.base.comp.gpa;
// TODO audit the lifetimes of decls table entries. It's possible to get
// freeDecl without any updateDecl in between.
// However that is planned to change, see the TODO comment in Module.zig
@ -1029,17 +1039,17 @@ pub fn freeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) void {
const symidx_and_submap = self.fn_decl_table.get(decl.getFileScope(mod)).?;
var submap = symidx_and_submap.functions;
if (submap.fetchSwapRemove(decl_index)) |removed_entry| {
self.base.allocator.free(removed_entry.value.code);
self.base.allocator.free(removed_entry.value.lineinfo);
gpa.free(removed_entry.value.code);
gpa.free(removed_entry.value.lineinfo);
}
if (submap.count() == 0) {
self.syms.items[symidx_and_submap.sym_index] = aout.Sym.undefined_symbol;
self.syms_index_free_list.append(self.base.allocator, symidx_and_submap.sym_index) catch {};
submap.deinit(self.base.allocator);
self.syms_index_free_list.append(gpa, symidx_and_submap.sym_index) catch {};
submap.deinit(gpa);
}
} else {
if (self.data_decl_table.fetchSwapRemove(decl_index)) |removed_entry| {
self.base.allocator.free(removed_entry.value);
gpa.free(removed_entry.value);
}
}
if (self.decls.fetchRemove(decl_index)) |const_kv| {
@ -1047,35 +1057,36 @@ pub fn freeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) void {
const atom = self.getAtom(kv.value.index);
if (atom.got_index) |i| {
// TODO: if this catch {} is triggered, an assertion in flushModule will be triggered, because got_index_free_list will have the wrong length
self.got_index_free_list.append(self.base.allocator, i) catch {};
self.got_index_free_list.append(gpa, i) catch {};
}
if (atom.sym_index) |i| {
self.syms_index_free_list.append(self.base.allocator, i) catch {};
self.syms_index_free_list.append(gpa, i) catch {};
self.syms.items[i] = aout.Sym.undefined_symbol;
}
kv.value.exports.deinit(self.base.allocator);
kv.value.exports.deinit(gpa);
}
self.freeUnnamedConsts(decl_index);
{
const atom_index = self.decls.get(decl_index).?.index;
const relocs = self.relocs.getPtr(atom_index) orelse return;
relocs.clearAndFree(self.base.allocator);
relocs.clearAndFree(gpa);
assert(self.relocs.remove(atom_index));
}
}
fn freeUnnamedConsts(self: *Plan9, decl_index: InternPool.DeclIndex) void {
const gpa = self.base.comp.gpa;
const unnamed_consts = self.unnamed_const_atoms.getPtr(decl_index) orelse return;
for (unnamed_consts.items) |atom_idx| {
const atom = self.getAtom(atom_idx);
self.base.allocator.free(self.syms.items[atom.sym_index.?].name);
gpa.free(self.syms.items[atom.sym_index.?].name);
self.syms.items[atom.sym_index.?] = aout.Sym.undefined_symbol;
self.syms_index_free_list.append(self.base.allocator, atom.sym_index.?) catch {};
self.syms_index_free_list.append(gpa, atom.sym_index.?) catch {};
}
unnamed_consts.clearAndFree(self.base.allocator);
unnamed_consts.clearAndFree(gpa);
}
fn createAtom(self: *Plan9) !Atom.Index {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const index = @as(Atom.Index, @intCast(self.atoms.items.len));
const atom = try self.atoms.addOne(gpa);
atom.* = .{
@ -1089,7 +1100,8 @@ fn createAtom(self: *Plan9) !Atom.Index {
}
pub fn seeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) !Atom.Index {
const gop = try self.decls.getOrPut(self.base.allocator, decl_index);
const gpa = self.base.comp.gpa;
const gop = try self.decls.getOrPut(gpa, decl_index);
if (!gop.found_existing) {
const index = try self.createAtom();
self.getAtomPtr(index).got_index = self.allocateGotIndex();
@ -1134,7 +1146,8 @@ pub fn updateExports(
}
pub fn getOrCreateAtomForLazySymbol(self: *Plan9, sym: File.LazySymbol) !Atom.Index {
const gop = try self.lazy_syms.getOrPut(self.base.allocator, sym.getDecl(self.base.options.module.?));
const gpa = self.base.comp.gpa;
const gop = try self.lazy_syms.getOrPut(gpa, sym.getDecl(self.base.options.module.?));
errdefer _ = if (!gop.found_existing) self.lazy_syms.pop();
if (!gop.found_existing) gop.value_ptr.* = .{};
@ -1160,7 +1173,7 @@ pub fn getOrCreateAtomForLazySymbol(self: *Plan9, sym: File.LazySymbol) !Atom.In
}
fn updateLazySymbolAtom(self: *Plan9, sym: File.LazySymbol, atom_index: Atom.Index) !void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const mod = self.base.options.module.?;
var required_alignment: InternPool.Alignment = .none;
@ -1206,8 +1219,8 @@ fn updateLazySymbolAtom(self: *Plan9, sym: File.LazySymbol, atom_index: Atom.Ind
},
};
// duped_code is freed when the atom is freed
const duped_code = try self.base.allocator.dupe(u8, code);
errdefer self.base.allocator.free(duped_code);
const duped_code = try gpa.dupe(u8, code);
errdefer gpa.free(duped_code);
self.getAtomPtr(atom_index).code = .{
.code_ptr = duped_code.ptr,
.other = .{ .code_len = duped_code.len },
@ -1215,13 +1228,13 @@ fn updateLazySymbolAtom(self: *Plan9, sym: File.LazySymbol, atom_index: Atom.Ind
}
pub fn deinit(self: *Plan9) void {
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
{
var it = self.relocs.valueIterator();
while (it.next()) |relocs| {
relocs.deinit(self.base.allocator);
relocs.deinit(gpa);
}
self.relocs.deinit(self.base.allocator);
self.relocs.deinit(gpa);
}
// free the unnamed consts
var it_unc = self.unnamed_const_atoms.iterator();
@ -1280,24 +1293,36 @@ pub fn deinit(self: *Plan9) void {
}
}
pub fn openPath(allocator: Allocator, sub_path: []const u8, options: link.Options) !*Plan9 {
if (options.use_llvm)
return error.LLVMBackendDoesNotSupportPlan9;
assert(options.target.ofmt == .plan9);
pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Plan9 {
if (build_options.only_c) unreachable;
const self = try createEmpty(allocator, options);
const target = options.comp.root_mod.resolved_target.result;
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
const use_llvm = options.comp.config.use_llvm;
assert(!use_llvm); // Caught by Compilation.Config.resolve.
assert(!use_lld); // Caught by Compilation.Config.resolve.
assert(target.ofmt == .plan9);
const self = try createEmpty(arena, options);
errdefer self.base.destroy();
const file = try options.emit.?.directory.handle.createFile(sub_path, .{
const file = try options.emit.directory.handle.createFile(options.emit.sub_path, .{
.read = true,
.mode = link.determineMode(options),
.mode = link.File.determineMode(
use_lld,
options.comp.config.output_mode,
options.comp.config.link_mode,
),
});
errdefer file.close();
self.base.file = file;
self.bases = defaultBaseAddrs(options.target.cpu.arch);
self.bases = defaultBaseAddrs(target.cpu.arch);
try self.syms.appendSlice(self.base.allocator, &.{
const gpa = options.comp.gpa;
try self.syms.appendSlice(gpa, &.{
// we include the global offset table to make it easier for debugging
.{
.value = self.getAddr(0, .d), // the global offset table starts at 0
@ -1490,7 +1515,7 @@ pub fn lowerAnonDecl(self: *Plan9, decl_val: InternPool.Index, src_loc: Module.S
// be used by more than one function, however, its address is being used so we need
// to put it in some location.
// ...
const gpa = self.base.allocator;
const gpa = self.base.comp.gpa;
const gop = try self.anon_decls.getOrPut(gpa, decl_val);
const mod = self.base.options.module.?;
if (!gop.found_existing) {
@ -1538,11 +1563,12 @@ pub fn getAnonDeclVAddr(self: *Plan9, decl_val: InternPool.Index, reloc_info: li
}
pub fn addReloc(self: *Plan9, parent_index: Atom.Index, reloc: Reloc) !void {
const gop = try self.relocs.getOrPut(self.base.allocator, parent_index);
const gpa = self.base.comp.gpa;
const gop = try self.relocs.getOrPut(gpa, parent_index);
if (!gop.found_existing) {
gop.value_ptr.* = .{};
}
try gop.value_ptr.append(self.base.allocator, reloc);
try gop.value_ptr.append(gpa, reloc);
}
pub fn getAtom(self: *const Plan9, index: Atom.Index) Atom {

View File

@ -47,48 +47,65 @@ base: link.File,
object: codegen.Object,
pub fn createEmpty(gpa: Allocator, options: link.Options) !*SpirV {
const self = try gpa.create(SpirV);
pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*SpirV {
const gpa = options.comp.gpa;
const target = options.comp.root_mod.resolved_target.result;
const self = try arena.create(SpirV);
self.* = .{
.base = .{
.tag = .spirv,
.options = options,
.comp = options.comp,
.emit = options.emit,
.gc_sections = options.gc_sections orelse false,
.stack_size = options.stack_size orelse 0,
.allow_shlib_undefined = options.allow_shlib_undefined orelse false,
.file = null,
.allocator = gpa,
.disable_lld_caching = options.disable_lld_caching,
.build_id = options.build_id,
.rpath_list = options.rpath_list,
.force_undefined_symbols = options.force_undefined_symbols,
.function_sections = options.function_sections,
.data_sections = options.data_sections,
},
.object = codegen.Object.init(gpa),
};
errdefer self.deinit();
// TODO: Figure out where to put all of these
switch (options.target.cpu.arch) {
switch (target.cpu.arch) {
.spirv32, .spirv64 => {},
else => return error.TODOArchNotSupported,
else => unreachable, // Caught by Compilation.Config.resolve.
}
switch (options.target.os.tag) {
switch (target.os.tag) {
.opencl, .glsl450, .vulkan => {},
else => return error.TODOOsNotSupported,
else => unreachable, // Caught by Compilation.Config.resolve.
}
if (options.target.abi != .none) {
return error.TODOAbiNotSupported;
}
assert(target.abi != .none); // Caught by Compilation.Config.resolve.
return self;
}
pub fn openPath(allocator: Allocator, sub_path: []const u8, options: link.Options) !*SpirV {
assert(options.target.ofmt == .spirv);
pub fn open(arena: Allocator, options: link.File.OpenOptions) !*SpirV {
if (build_options.only_c) unreachable;
if (options.use_llvm) return error.LLVM_BackendIsTODO_ForSpirV; // TODO: LLVM Doesn't support SpirV at all.
if (options.use_lld) return error.LLD_LinkingIsTODO_ForSpirV; // TODO: LLD Doesn't support SpirV at all.
const target = options.comp.root_mod.resolved_target.result;
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
const use_llvm = options.comp.config.use_llvm;
const spirv = try createEmpty(allocator, options);
assert(!use_llvm); // Caught by Compilation.Config.resolve.
assert(!use_lld); // Caught by Compilation.Config.resolve.
assert(target.ofmt == .spirv); // Caught by Compilation.Config.resolve.
const spirv = try createEmpty(arena, options);
errdefer spirv.base.destroy();
// TODO: read the file and keep valid parts instead of truncating
const file = try options.emit.?.directory.handle.createFile(sub_path, .{ .truncate = true, .read = true });
const file = try options.emit.?.directory.handle.createFile(options.emit.sub_path, .{
.truncate = true,
.read = true,
});
spirv.base.file = file;
return spirv;
}
@ -150,11 +167,7 @@ pub fn freeDecl(self: *SpirV, decl_index: InternPool.DeclIndex) void {
}
pub fn flush(self: *SpirV, comp: *Compilation, prog_node: *std.Progress.Node) link.File.FlushError!void {
if (build_options.have_llvm and self.base.options.use_lld) {
return error.LLD_LinkingIsTODO_ForSpirV; // TODO: LLD Doesn't support SpirV at all.
} else {
return self.flushModule(comp, prog_node);
}
return self.flushModule(comp, prog_node);
}
pub fn flushModule(self: *SpirV, comp: *Compilation, prog_node: *std.Progress.Node) link.File.FlushError!void {

File diff suppressed because it is too large Load Diff

View File

@ -842,7 +842,7 @@ fn buildOutputType(
var linker_print_gc_sections: bool = false;
var linker_print_icf_sections: bool = false;
var linker_print_map: bool = false;
var linker_opt_bisect_limit: i32 = -1;
var llvm_opt_bisect_limit: c_int = -1;
var linker_z_nocopyreloc = false;
var linker_z_nodelete = false;
var linker_z_notext = false;
@ -859,7 +859,7 @@ fn buildOutputType(
var linker_module_definition_file: ?[]const u8 = null;
var test_no_exec = false;
var force_undefined_symbols: std.StringArrayHashMapUnmanaged(void) = .{};
var stack_size_override: ?u64 = null;
var stack_size: ?u64 = null;
var image_base_override: ?u64 = null;
var link_eh_frame_hdr = false;
var link_emit_relocs = false;
@ -892,7 +892,7 @@ fn buildOutputType(
var contains_res_file: bool = false;
var reference_trace: ?u32 = null;
var pdb_out_path: ?[]const u8 = null;
var dwarf_format: ?std.dwarf.Format = null;
var debug_format: ?link.File.DebugFormat = null;
var error_limit: ?Module.ErrorInt = null;
var want_structured_cfg: ?bool = null;
// These are before resolving sysroot.
@ -1129,10 +1129,7 @@ fn buildOutputType(
} else if (mem.eql(u8, arg, "--force_undefined")) {
try force_undefined_symbols.put(arena, args_iter.nextOrFatal(), {});
} else if (mem.eql(u8, arg, "--stack")) {
const next_arg = args_iter.nextOrFatal();
stack_size_override = std.fmt.parseUnsigned(u64, next_arg, 0) catch |err| {
fatal("unable to parse stack size '{s}': {s}", .{ next_arg, @errorName(err) });
};
stack_size = parseStackSize(args_iter.nextOrFatal());
} else if (mem.eql(u8, arg, "--image-base")) {
const next_arg = args_iter.nextOrFatal();
image_base_override = std.fmt.parseUnsigned(u64, next_arg, 0) catch |err| {
@ -1487,9 +1484,9 @@ fn buildOutputType(
} else if (mem.eql(u8, arg, "-fno-strip")) {
mod_opts.strip = false;
} else if (mem.eql(u8, arg, "-gdwarf32")) {
dwarf_format = .@"32";
debug_format = .{ .dwarf = .@"32" };
} else if (mem.eql(u8, arg, "-gdwarf64")) {
dwarf_format = .@"64";
debug_format = .{ .dwarf = .@"64" };
} else if (mem.eql(u8, arg, "-fformatted-panics")) {
formatted_panics = true;
} else if (mem.eql(u8, arg, "-fno-formatted-panics")) {
@ -1511,7 +1508,9 @@ fn buildOutputType(
} else if (mem.eql(u8, arg, "-fno-builtin")) {
no_builtin = true;
} else if (mem.startsWith(u8, arg, "-fopt-bisect-limit=")) {
linker_opt_bisect_limit = std.math.lossyCast(i32, parseIntSuffix(arg, "-fopt-bisect-limit=".len));
const next_arg = arg["-fopt-bisect-limit=".len..];
llvm_opt_bisect_limit = std.fmt.parseInt(c_int, next_arg, 0) catch |err|
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
} else if (mem.eql(u8, arg, "--eh-frame-hdr")) {
link_eh_frame_hdr = true;
} else if (mem.eql(u8, arg, "--dynamicbase")) {
@ -1994,11 +1993,11 @@ fn buildOutputType(
},
.gdwarf32 => {
mod_opts.strip = false;
dwarf_format = .@"32";
debug_format = .{ .dwarf = .@"32" };
},
.gdwarf64 => {
mod_opts.strip = false;
dwarf_format = .@"64";
debug_format = .{ .dwarf = .@"64" };
},
.sanitize => {
if (mem.eql(u8, it.only_arg, "undefined")) {
@ -2257,10 +2256,7 @@ fn buildOutputType(
} else if (mem.eql(u8, z_arg, "norelro")) {
linker_z_relro = false;
} else if (mem.startsWith(u8, z_arg, "stack-size=")) {
const next_arg = z_arg["stack-size=".len..];
stack_size_override = std.fmt.parseUnsigned(u64, next_arg, 0) catch |err| {
fatal("unable to parse stack size '{s}': {s}", .{ next_arg, @errorName(err) });
};
stack_size = parseStackSize(z_arg["stack-size=".len..]);
} else if (mem.startsWith(u8, z_arg, "common-page-size=")) {
linker_z_common_page_size = parseIntSuffix(z_arg, "common-page-size=".len);
} else if (mem.startsWith(u8, z_arg, "max-page-size=")) {
@ -2285,10 +2281,7 @@ fn buildOutputType(
} else if (mem.eql(u8, arg, "-u")) {
try force_undefined_symbols.put(arena, linker_args_it.nextOrFatal(), {});
} else if (mem.eql(u8, arg, "--stack") or mem.eql(u8, arg, "-stack_size")) {
const stack_size = linker_args_it.nextOrFatal();
stack_size_override = std.fmt.parseUnsigned(u64, stack_size, 0) catch |err| {
fatal("unable to parse stack size override '{s}': {s}", .{ stack_size, @errorName(err) });
};
stack_size = parseStackSize(linker_args_it.nextOrFatal());
} else if (mem.eql(u8, arg, "--image-base")) {
const image_base = linker_args_it.nextOrFatal();
image_base_override = std.fmt.parseUnsigned(u64, image_base, 0) catch |err| {
@ -3407,7 +3400,7 @@ fn buildOutputType(
.linker_print_gc_sections = linker_print_gc_sections,
.linker_print_icf_sections = linker_print_icf_sections,
.linker_print_map = linker_print_map,
.linker_opt_bisect_limit = linker_opt_bisect_limit,
.llvm_opt_bisect_limit = llvm_opt_bisect_limit,
.linker_global_base = linker_global_base,
.linker_export_symbol_names = linker_export_symbol_names.items,
.linker_z_nocopyreloc = linker_z_nocopyreloc,
@ -3430,7 +3423,7 @@ fn buildOutputType(
.link_eh_frame_hdr = link_eh_frame_hdr,
.link_emit_relocs = link_emit_relocs,
.force_undefined_symbols = force_undefined_symbols,
.stack_size_override = stack_size_override,
.stack_size = stack_size,
.image_base_override = image_base_override,
.formatted_panics = formatted_panics,
.function_sections = function_sections,
@ -3459,7 +3452,7 @@ fn buildOutputType(
.test_runner_path = test_runner_path,
.disable_lld_caching = !output_to_cache,
.subsystem = subsystem,
.dwarf_format = dwarf_format,
.debug_format = debug_format,
.debug_compile_errors = debug_compile_errors,
.enable_link_snapshots = enable_link_snapshots,
.install_name = install_name,
@ -7688,3 +7681,8 @@ fn resolveTargetQueryOrFatal(target_query: std.Target.Query) std.Target {
return std.zig.system.resolveTargetQuery(target_query) catch |err|
fatal("unable to resolve target: {s}", .{@errorName(err)});
}
fn parseStackSize(s: []const u8) u64 {
return std.fmt.parseUnsigned(u64, s, 0) catch |err|
fatal("unable to parse stack size '{s}': {s}", .{ s, @errorName(err) });
}

View File

@ -226,7 +226,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile, prog_node: *std.Progr
.is_native_abi = false,
.self_exe_path = comp.self_exe_path,
.verbose_cc = comp.verbose_cc,
.verbose_link = comp.bin_file.options.verbose_link,
.verbose_link = comp.verbose_link,
.verbose_air = comp.verbose_air,
.verbose_llvm_ir = comp.verbose_llvm_ir,
.verbose_cimport = comp.verbose_cimport,

View File

@ -323,6 +323,14 @@ pub fn hasLlvmSupport(target: std.Target, ofmt: std.Target.ObjectFormat) bool {
};
}
/// The set of targets that Zig supports using LLD to link for.
pub fn hasLldSupport(ofmt: std.Target.ObjectFormat) bool {
return switch (ofmt) {
.elf, .coff, .wasm => true,
else => false,
};
}
/// The set of targets that our own self-hosted backends have robust support for.
/// Used to select between LLVM backend and self-hosted backend when compiling in
/// debug mode. A given target should only return true here if it is passing greater