mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
stage2: Use {s} instead of {} when formatting strings
This commit is contained in:
parent
dd973fb365
commit
1c13ca5a05
@ -298,6 +298,20 @@ pub fn isNumber(comptime T: type) bool {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isIntegerNumber(comptime T: type) bool {
|
||||
return switch (@typeInfo(T)) {
|
||||
.Int, .ComptimeInt => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isFloatingNumber(comptime T: type) bool {
|
||||
return switch (@typeInfo(T)) {
|
||||
.Float, .ComptimeFloat => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
test "std.meta.trait.isNumber" {
|
||||
const NotANumber = struct {
|
||||
number: u8,
|
||||
|
||||
@ -549,7 +549,7 @@ pub const Manifest = struct {
|
||||
.target, .target_must_resolve, .prereq => {},
|
||||
else => |err| {
|
||||
try err.printError(error_buf.writer());
|
||||
std.log.err("failed parsing {}: {}", .{ dep_file_basename, error_buf.items });
|
||||
std.log.err("failed parsing {s}: {s}", .{ dep_file_basename, error_buf.items });
|
||||
return error.InvalidDepFile;
|
||||
},
|
||||
}
|
||||
@ -561,7 +561,7 @@ pub const Manifest = struct {
|
||||
.prereq => |bytes| try self.addFilePost(bytes),
|
||||
else => |err| {
|
||||
try err.printError(error_buf.writer());
|
||||
std.log.err("failed parsing {}: {}", .{ dep_file_basename, error_buf.items });
|
||||
std.log.err("failed parsing {s}: {s}", .{ dep_file_basename, error_buf.items });
|
||||
return error.InvalidDepFile;
|
||||
},
|
||||
}
|
||||
|
||||
@ -1475,7 +1475,7 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
|
||||
// lifetime annotations in the ZIR.
|
||||
var decl_arena = decl.typed_value.most_recent.arena.?.promote(module.gpa);
|
||||
defer decl.typed_value.most_recent.arena.?.* = decl_arena.state;
|
||||
log.debug("analyze liveness of {}\n", .{decl.name});
|
||||
log.debug("analyze liveness of {s}\n", .{decl.name});
|
||||
try liveness.analyze(module.gpa, &decl_arena.allocator, func.analysis.success);
|
||||
}
|
||||
|
||||
@ -1492,7 +1492,7 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
|
||||
module.failed_decls.putAssumeCapacityNoClobber(decl, try ErrorMsg.create(
|
||||
module.gpa,
|
||||
decl.src(),
|
||||
"unable to codegen: {}",
|
||||
"unable to codegen: {s}",
|
||||
.{@errorName(err)},
|
||||
));
|
||||
decl.analysis = .codegen_failure_retryable;
|
||||
@ -1535,7 +1535,7 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
|
||||
module.failed_decls.putAssumeCapacityNoClobber(decl, try ErrorMsg.create(
|
||||
module.gpa,
|
||||
decl.src(),
|
||||
"unable to update line number: {}",
|
||||
"unable to update line number: {s}",
|
||||
.{@errorName(err)},
|
||||
));
|
||||
decl.analysis = .codegen_failure_retryable;
|
||||
@ -1544,50 +1544,50 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
|
||||
.glibc_crt_file => |crt_file| {
|
||||
glibc.buildCRTFile(self, crt_file) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build glibc CRT file: {}", .{@errorName(err)});
|
||||
fatal("unable to build glibc CRT file: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.glibc_shared_objects => {
|
||||
glibc.buildSharedObjects(self) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build glibc shared objects: {}", .{@errorName(err)});
|
||||
fatal("unable to build glibc shared objects: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.musl_crt_file => |crt_file| {
|
||||
musl.buildCRTFile(self, crt_file) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build musl CRT file: {}", .{@errorName(err)});
|
||||
fatal("unable to build musl CRT file: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.mingw_crt_file => |crt_file| {
|
||||
mingw.buildCRTFile(self, crt_file) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build mingw-w64 CRT file: {}", .{@errorName(err)});
|
||||
fatal("unable to build mingw-w64 CRT file: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.windows_import_lib => |index| {
|
||||
const link_lib = self.bin_file.options.system_libs.items()[index].key;
|
||||
mingw.buildImportLib(self, link_lib) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to generate DLL import .lib file: {}", .{@errorName(err)});
|
||||
fatal("unable to generate DLL import .lib file: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.libunwind => {
|
||||
libunwind.buildStaticLib(self) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build libunwind: {}", .{@errorName(err)});
|
||||
fatal("unable to build libunwind: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.libcxx => {
|
||||
libcxx.buildLibCXX(self) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build libcxx: {}", .{@errorName(err)});
|
||||
fatal("unable to build libcxx: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.libcxxabi => {
|
||||
libcxx.buildLibCXXABI(self) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build libcxxabi: {}", .{@errorName(err)});
|
||||
fatal("unable to build libcxxabi: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.libtsan => {
|
||||
@ -1611,20 +1611,20 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
|
||||
.libssp => {
|
||||
self.buildOutputFromZig("ssp.zig", .Lib, &self.libssp_static_lib) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build libssp: {}", .{@errorName(err)});
|
||||
fatal("unable to build libssp: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.zig_libc => {
|
||||
self.buildOutputFromZig("c.zig", .Lib, &self.libc_static_lib) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to build zig's multitarget libc: {}", .{@errorName(err)});
|
||||
fatal("unable to build zig's multitarget libc: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.generate_builtin_zig => {
|
||||
// This Job is only queued up if there is a zig module.
|
||||
self.updateBuiltinZigFile(self.bin_file.options.module.?) catch |err| {
|
||||
// TODO Expose this as a normal compile error rather than crashing here.
|
||||
fatal("unable to update builtin.zig file: {}", .{@errorName(err)});
|
||||
fatal("unable to update builtin.zig file: {s}", .{@errorName(err)});
|
||||
};
|
||||
},
|
||||
.stage1_module => {
|
||||
@ -1704,11 +1704,11 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
|
||||
const out_h_path = try comp.local_cache_directory.join(arena, &[_][]const u8{
|
||||
tmp_dir_sub_path, cimport_basename,
|
||||
});
|
||||
const out_dep_path = try std.fmt.allocPrint(arena, "{}.d", .{out_h_path});
|
||||
const out_dep_path = try std.fmt.allocPrint(arena, "{s}.d", .{out_h_path});
|
||||
|
||||
try zig_cache_tmp_dir.writeFile(cimport_basename, c_src);
|
||||
if (comp.verbose_cimport) {
|
||||
log.info("C import source: {}", .{out_h_path});
|
||||
log.info("C import source: {s}", .{out_h_path});
|
||||
}
|
||||
|
||||
var argv = std.ArrayList([]const u8).init(comp.gpa);
|
||||
@ -1755,7 +1755,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
|
||||
defer tree.deinit();
|
||||
|
||||
if (comp.verbose_cimport) {
|
||||
log.info("C import .d file: {}", .{out_dep_path});
|
||||
log.info("C import .d file: {s}", .{out_dep_path});
|
||||
}
|
||||
|
||||
const dep_basename = std.fs.path.basename(out_dep_path);
|
||||
@ -1775,7 +1775,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
|
||||
try bos.flush();
|
||||
|
||||
man.writeManifest() catch |err| {
|
||||
log.warn("failed to write cache manifest for C import: {}", .{@errorName(err)});
|
||||
log.warn("failed to write cache manifest for C import: {s}", .{@errorName(err)});
|
||||
};
|
||||
|
||||
break :digest digest;
|
||||
@ -1785,7 +1785,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
|
||||
"o", &digest, cimport_zig_basename,
|
||||
});
|
||||
if (comp.verbose_cimport) {
|
||||
log.info("C import output: {}\n", .{out_zig_path});
|
||||
log.info("C import output: {s}\n", .{out_zig_path});
|
||||
}
|
||||
return CImportResult{
|
||||
.out_zig_path = out_zig_path,
|
||||
@ -1946,7 +1946,7 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_comp_progress_node: *
|
||||
child.stderr_behavior = .Inherit;
|
||||
|
||||
const term = child.spawnAndWait() catch |err| {
|
||||
return comp.failCObj(c_object, "unable to spawn {}: {}", .{ argv.items[0], @errorName(err) });
|
||||
return comp.failCObj(c_object, "unable to spawn {s}: {s}", .{ argv.items[0], @errorName(err) });
|
||||
};
|
||||
switch (term) {
|
||||
.Exited => |code| {
|
||||
@ -1974,7 +1974,7 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_comp_progress_node: *
|
||||
const stderr = try stderr_reader.readAllAlloc(arena, 10 * 1024 * 1024);
|
||||
|
||||
const term = child.wait() catch |err| {
|
||||
return comp.failCObj(c_object, "unable to spawn {}: {}", .{ argv.items[0], @errorName(err) });
|
||||
return comp.failCObj(c_object, "unable to spawn {s}: {s}", .{ argv.items[0], @errorName(err) });
|
||||
};
|
||||
|
||||
switch (term) {
|
||||
@ -1982,12 +1982,12 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_comp_progress_node: *
|
||||
if (code != 0) {
|
||||
// TODO parse clang stderr and turn it into an error message
|
||||
// and then call failCObjWithOwnedErrorMsg
|
||||
log.err("clang failed with stderr: {}", .{stderr});
|
||||
log.err("clang failed with stderr: {s}", .{stderr});
|
||||
return comp.failCObj(c_object, "clang exited with code {}", .{code});
|
||||
}
|
||||
},
|
||||
else => {
|
||||
log.err("clang terminated with stderr: {}", .{stderr});
|
||||
log.err("clang terminated with stderr: {s}", .{stderr});
|
||||
return comp.failCObj(c_object, "clang terminated unexpectedly", .{});
|
||||
},
|
||||
}
|
||||
@ -1999,7 +1999,7 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_comp_progress_node: *
|
||||
try man.addDepFilePost(zig_cache_tmp_dir, dep_basename);
|
||||
// Just to save disk space, we delete the file because it is never needed again.
|
||||
zig_cache_tmp_dir.deleteFile(dep_basename) catch |err| {
|
||||
log.warn("failed to delete '{}': {}", .{ dep_file_path, @errorName(err) });
|
||||
log.warn("failed to delete '{s}': {s}", .{ dep_file_path, @errorName(err) });
|
||||
};
|
||||
}
|
||||
|
||||
@ -2015,7 +2015,7 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_comp_progress_node: *
|
||||
try std.fs.rename(zig_cache_tmp_dir, tmp_basename, o_dir, o_basename);
|
||||
|
||||
man.writeManifest() catch |err| {
|
||||
log.warn("failed to write cache manifest when compiling '{}': {}", .{ c_object.src.src_path, @errorName(err) });
|
||||
log.warn("failed to write cache manifest when compiling '{s}': {s}", .{ c_object.src.src_path, @errorName(err) });
|
||||
};
|
||||
break :blk digest;
|
||||
};
|
||||
@ -2034,7 +2034,7 @@ pub fn tmpFilePath(comp: *Compilation, arena: *Allocator, suffix: []const u8) er
|
||||
const s = std.fs.path.sep_str;
|
||||
const rand_int = std.crypto.random.int(u64);
|
||||
if (comp.local_cache_directory.path) |p| {
|
||||
return std.fmt.allocPrint(arena, "{}" ++ s ++ "tmp" ++ s ++ "{x}-{s}", .{ p, rand_int, suffix });
|
||||
return std.fmt.allocPrint(arena, "{s}" ++ s ++ "tmp" ++ s ++ "{x}-{s}", .{ p, rand_int, suffix });
|
||||
} else {
|
||||
return std.fmt.allocPrint(arena, "tmp" ++ s ++ "{x}-{s}", .{ rand_int, suffix });
|
||||
}
|
||||
@ -2144,7 +2144,7 @@ pub fn addCCArgs(
|
||||
}
|
||||
const mcmodel = comp.bin_file.options.machine_code_model;
|
||||
if (mcmodel != .default) {
|
||||
try argv.append(try std.fmt.allocPrint(arena, "-mcmodel={}", .{@tagName(mcmodel)}));
|
||||
try argv.append(try std.fmt.allocPrint(arena, "-mcmodel={s}", .{@tagName(mcmodel)}));
|
||||
}
|
||||
|
||||
switch (target.os.tag) {
|
||||
@ -2497,22 +2497,22 @@ fn detectLibCIncludeDirs(
|
||||
const s = std.fs.path.sep_str;
|
||||
const arch_include_dir = try std.fmt.allocPrint(
|
||||
arena,
|
||||
"{}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{}-{}-{}",
|
||||
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{s}-{s}-{s}",
|
||||
.{ zig_lib_dir, arch_name, os_name, abi_name },
|
||||
);
|
||||
const generic_include_dir = try std.fmt.allocPrint(
|
||||
arena,
|
||||
"{}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "generic-{}",
|
||||
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "generic-{s}",
|
||||
.{ zig_lib_dir, generic_name },
|
||||
);
|
||||
const arch_os_include_dir = try std.fmt.allocPrint(
|
||||
arena,
|
||||
"{}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{}-{}-any",
|
||||
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{s}-{s}-any",
|
||||
.{ zig_lib_dir, @tagName(target.cpu.arch), os_name },
|
||||
);
|
||||
const generic_os_include_dir = try std.fmt.allocPrint(
|
||||
arena,
|
||||
"{}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "any-{}-any",
|
||||
"{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "any-{s}-any",
|
||||
.{ zig_lib_dir, os_name },
|
||||
);
|
||||
|
||||
@ -2631,9 +2631,9 @@ fn updateBuiltinZigFile(comp: *Compilation, mod: *Module) !void {
|
||||
|
||||
pub fn dump_argv(argv: []const []const u8) void {
|
||||
for (argv[0 .. argv.len - 1]) |arg| {
|
||||
std.debug.print("{} ", .{arg});
|
||||
std.debug.print("{s} ", .{arg});
|
||||
}
|
||||
std.debug.print("{}\n", .{argv[argv.len - 1]});
|
||||
std.debug.print("{s}\n", .{argv[argv.len - 1]});
|
||||
}
|
||||
|
||||
pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8 {
|
||||
@ -2653,15 +2653,15 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
|
||||
\\pub const arch = Target.current.cpu.arch;
|
||||
\\/// Deprecated
|
||||
\\pub const endian = Target.current.cpu.arch.endian();
|
||||
\\pub const output_mode = OutputMode.{};
|
||||
\\pub const link_mode = LinkMode.{};
|
||||
\\pub const output_mode = OutputMode.{s};
|
||||
\\pub const link_mode = LinkMode.{s};
|
||||
\\pub const is_test = {};
|
||||
\\pub const single_threaded = {};
|
||||
\\pub const abi = Abi.{};
|
||||
\\pub const abi = Abi.{s};
|
||||
\\pub const cpu: Cpu = Cpu{{
|
||||
\\ .arch = .{},
|
||||
\\ .model = &Target.{}.cpu.{},
|
||||
\\ .features = Target.{}.featureSet(&[_]Target.{}.Feature{{
|
||||
\\ .arch = .{s},
|
||||
\\ .model = &Target.{s}.cpu.{s},
|
||||
\\ .features = Target.{s}.featureSet(&[_]Target.{s}.Feature{{
|
||||
\\
|
||||
, .{
|
||||
@tagName(comp.bin_file.options.output_mode),
|
||||
@ -2692,7 +2692,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
|
||||
\\ }}),
|
||||
\\}};
|
||||
\\pub const os = Os{{
|
||||
\\ .tag = .{},
|
||||
\\ .tag = .{s},
|
||||
\\ .version_range = .{{
|
||||
,
|
||||
.{@tagName(target.os.tag)},
|
||||
@ -2778,8 +2778,8 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
|
||||
(comp.bin_file.options.skip_linker_dependencies and comp.bin_file.options.parent_compilation_link_libc);
|
||||
|
||||
try buffer.writer().print(
|
||||
\\pub const object_format = ObjectFormat.{};
|
||||
\\pub const mode = Mode.{};
|
||||
\\pub const object_format = ObjectFormat.{s};
|
||||
\\pub const mode = Mode.{s};
|
||||
\\pub const link_libc = {};
|
||||
\\pub const link_libcpp = {};
|
||||
\\pub const have_error_return_tracing = {};
|
||||
@ -2787,7 +2787,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
|
||||
\\pub const position_independent_code = {};
|
||||
\\pub const position_independent_executable = {};
|
||||
\\pub const strip_debug_info = {};
|
||||
\\pub const code_model = CodeModel.{};
|
||||
\\pub const code_model = CodeModel.{s};
|
||||
\\
|
||||
, .{
|
||||
@tagName(comp.bin_file.options.object_format),
|
||||
@ -3013,7 +3013,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("stage1 {} new_digest={} error: {}", .{ mod.root_pkg.root_src_path, digest, @errorName(err) });
|
||||
log.debug("stage1 {} new_digest={} error: {s}", .{ mod.root_pkg.root_src_path, digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
@ -3189,7 +3189,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
// Update the small file with the digest. If it fails we can continue; it only
|
||||
// means that the next invocation will have an unnecessary cache miss.
|
||||
const stage1_flags_byte = @bitCast(u8, mod.stage1_flags);
|
||||
log.debug("stage1 {} final digest={} flags={x}", .{
|
||||
log.debug("stage1 {s} final digest={} flags={x}", .{
|
||||
mod.root_pkg.root_src_path, digest, stage1_flags_byte,
|
||||
});
|
||||
var digest_plus_flags: [digest.len + 2]u8 = undefined;
|
||||
@ -3202,11 +3202,11 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
digest_plus_flags, stage1_flags_byte, mod.stage1_flags.have_winmain_crt_startup,
|
||||
});
|
||||
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest_plus_flags) catch |err| {
|
||||
log.warn("failed to save stage1 hash digest file: {}", .{@errorName(err)});
|
||||
log.warn("failed to save stage1 hash digest file: {s}", .{@errorName(err)});
|
||||
};
|
||||
// Failure here only means an unnecessary cache miss.
|
||||
man.writeManifest() catch |err| {
|
||||
log.warn("failed to write cache manifest when linking: {}", .{@errorName(err)});
|
||||
log.warn("failed to write cache manifest when linking: {s}", .{@errorName(err)});
|
||||
};
|
||||
// We hang on to this lock so that the output file path can be used without
|
||||
// other processes clobbering it.
|
||||
|
||||
@ -366,7 +366,7 @@ pub const Token = union(enum) {
|
||||
.incomplete_quoted_prerequisite,
|
||||
.incomplete_target,
|
||||
=> |index_and_bytes| {
|
||||
try writer.print("{} '", .{self.errStr()});
|
||||
try writer.print("{s} '", .{self.errStr()});
|
||||
if (self == .incomplete_target) {
|
||||
const tmp = Token{ .target_must_resolve = index_and_bytes.bytes };
|
||||
try tmp.resolve(writer);
|
||||
@ -383,7 +383,7 @@ pub const Token = union(enum) {
|
||||
=> |index_and_char| {
|
||||
try writer.writeAll("illegal char ");
|
||||
try printUnderstandableChar(writer, index_and_char.char);
|
||||
try writer.print(" at position {}: {}", .{ index_and_char.index, self.errStr() });
|
||||
try writer.print(" at position {}: {s}", .{ index_and_char.index, self.errStr() });
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -943,7 +943,7 @@ fn printSection(out: anytype, label: []const u8, bytes: []const u8) !void {
|
||||
|
||||
fn printLabel(out: anytype, label: []const u8, bytes: []const u8) !void {
|
||||
var buf: [80]u8 = undefined;
|
||||
var text = try std.fmt.bufPrint(buf[0..], "{} {} bytes ", .{ label, bytes.len });
|
||||
var text = try std.fmt.bufPrint(buf[0..], "{s} {} bytes ", .{ label, bytes.len });
|
||||
try out.writeAll(text);
|
||||
var i: usize = text.len;
|
||||
const end = 79;
|
||||
|
||||
@ -953,7 +953,7 @@ pub fn ensureDeclAnalyzed(self: *Module, decl: *Decl) InnerError!void {
|
||||
self.failed_decls.putAssumeCapacityNoClobber(decl, try Compilation.ErrorMsg.create(
|
||||
self.gpa,
|
||||
decl.src(),
|
||||
"unable to analyze: {}",
|
||||
"unable to analyze: {s}",
|
||||
.{@errorName(err)},
|
||||
));
|
||||
decl.analysis = .sema_failure_retryable;
|
||||
@ -1475,7 +1475,7 @@ fn getSrcModule(self: *Module, root_scope: *Scope.ZIRModule) !*zir.Module {
|
||||
if (zir_module.error_msg) |src_err_msg| {
|
||||
self.failed_files.putAssumeCapacityNoClobber(
|
||||
&root_scope.base,
|
||||
try Compilation.ErrorMsg.create(self.gpa, src_err_msg.byte_offset, "{}", .{src_err_msg.msg}),
|
||||
try Compilation.ErrorMsg.create(self.gpa, src_err_msg.byte_offset, "{s}", .{src_err_msg.msg}),
|
||||
);
|
||||
root_scope.status = .unloaded_parse_failure;
|
||||
return error.AnalysisFail;
|
||||
@ -1581,7 +1581,7 @@ pub fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void
|
||||
decl.src_index = decl_i;
|
||||
if (deleted_decls.remove(decl) == null) {
|
||||
decl.analysis = .sema_failure;
|
||||
const err_msg = try Compilation.ErrorMsg.create(self.gpa, tree.token_locs[name_tok].start, "redefinition of '{}'", .{decl.name});
|
||||
const err_msg = try Compilation.ErrorMsg.create(self.gpa, tree.token_locs[name_tok].start, "redefinition of '{s}'", .{decl.name});
|
||||
errdefer err_msg.destroy(self.gpa);
|
||||
try self.failed_decls.putNoClobber(self.gpa, decl, err_msg);
|
||||
} else {
|
||||
@ -1623,7 +1623,7 @@ pub fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void
|
||||
decl.src_index = decl_i;
|
||||
if (deleted_decls.remove(decl) == null) {
|
||||
decl.analysis = .sema_failure;
|
||||
const err_msg = try Compilation.ErrorMsg.create(self.gpa, name_loc.start, "redefinition of '{}'", .{decl.name});
|
||||
const err_msg = try Compilation.ErrorMsg.create(self.gpa, name_loc.start, "redefinition of '{s}'", .{decl.name});
|
||||
errdefer err_msg.destroy(self.gpa);
|
||||
try self.failed_decls.putNoClobber(self.gpa, decl, err_msg);
|
||||
} else if (!srcHashEql(decl.contents_hash, contents_hash)) {
|
||||
@ -1991,7 +1991,7 @@ pub fn analyzeExport(
|
||||
self.failed_exports.putAssumeCapacityNoClobber(new_export, try Compilation.ErrorMsg.create(
|
||||
self.gpa,
|
||||
src,
|
||||
"exported symbol collision: {}",
|
||||
"exported symbol collision: {s}",
|
||||
.{symbol_name},
|
||||
));
|
||||
// TODO: add a note
|
||||
@ -2007,7 +2007,7 @@ pub fn analyzeExport(
|
||||
self.failed_exports.putAssumeCapacityNoClobber(new_export, try Compilation.ErrorMsg.create(
|
||||
self.gpa,
|
||||
src,
|
||||
"unable to export: {}",
|
||||
"unable to export: {s}",
|
||||
.{@errorName(err)},
|
||||
));
|
||||
new_export.status = .failed_retryable;
|
||||
@ -2277,7 +2277,7 @@ pub fn createAnonymousDecl(
|
||||
) !*Decl {
|
||||
const name_index = self.getNextAnonNameIndex();
|
||||
const scope_decl = scope.decl().?;
|
||||
const name = try std.fmt.allocPrint(self.gpa, "{}__anon_{}", .{ scope_decl.name, name_index });
|
||||
const name = try std.fmt.allocPrint(self.gpa, "{s}__anon_{}", .{ scope_decl.name, name_index });
|
||||
defer self.gpa.free(name);
|
||||
const name_hash = scope.namespace().fullyQualifiedNameHash(name);
|
||||
const src_hash: std.zig.SrcHash = undefined;
|
||||
@ -2384,7 +2384,7 @@ pub fn analyzeDeref(self: *Module, scope: *Scope, src: usize, ptr: *Inst, ptr_sr
|
||||
|
||||
pub fn analyzeDeclRefByName(self: *Module, scope: *Scope, src: usize, decl_name: []const u8) InnerError!*Inst {
|
||||
const decl = self.lookupDeclName(scope, decl_name) orelse
|
||||
return self.fail(scope, src, "decl '{}' not found", .{decl_name});
|
||||
return self.fail(scope, src, "decl '{s}' not found", .{decl_name});
|
||||
return self.analyzeDeclRef(scope, src, decl);
|
||||
}
|
||||
|
||||
|
||||
@ -1955,7 +1955,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
|
||||
error.Overflow => return mod.failNode(
|
||||
scope,
|
||||
&ident.base,
|
||||
"primitive integer type '{}' exceeds maximum bit width of 65535",
|
||||
"primitive integer type '{s}' exceeds maximum bit width of 65535",
|
||||
.{ident_name},
|
||||
),
|
||||
error.InvalidCharacter => break :integer,
|
||||
@ -2010,7 +2010,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
|
||||
return rlWrapPtr(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclValInModule, .{ .decl = decl }, .{}));
|
||||
}
|
||||
|
||||
return mod.failNode(scope, &ident.base, "use of undeclared identifier '{}'", .{ident_name});
|
||||
return mod.failNode(scope, &ident.base, "use of undeclared identifier '{s}'", .{ident_name});
|
||||
}
|
||||
|
||||
fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.OneToken) InnerError!*zir.Inst {
|
||||
@ -2204,7 +2204,7 @@ fn ensureBuiltinParamCount(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinC
|
||||
return;
|
||||
|
||||
const s = if (count == 1) "" else "s";
|
||||
return mod.failTok(scope, call.builtin_token, "expected {} parameter{}, found {}", .{ count, s, call.params_len });
|
||||
return mod.failTok(scope, call.builtin_token, "expected {} parameter{s}, found {}", .{ count, s, call.params_len });
|
||||
}
|
||||
|
||||
fn simpleCast(
|
||||
@ -2383,7 +2383,7 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.Built
|
||||
} else if (mem.eql(u8, builtin_name, "@compileError")) {
|
||||
return compileError(mod, scope, call);
|
||||
} else {
|
||||
return mod.failTok(scope, call.builtin_token, "invalid builtin function: '{}'", .{builtin_name});
|
||||
return mod.failTok(scope, call.builtin_token, "invalid builtin function: '{s}'", .{builtin_name});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -228,7 +228,7 @@ pub fn generateSymbol(
|
||||
.fail = try ErrorMsg.create(
|
||||
bin_file.allocator,
|
||||
src,
|
||||
"TODO implement generateSymbol for type '{}'",
|
||||
"TODO implement generateSymbol for type '{s}'",
|
||||
.{@tagName(t)},
|
||||
),
|
||||
};
|
||||
@ -2029,7 +2029,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
});
|
||||
break :blk 0x84;
|
||||
},
|
||||
else => return self.fail(inst.base.src, "TODO implement condbr {} when condition is {}", .{ self.target.cpu.arch, @tagName(cond) }),
|
||||
else => return self.fail(inst.base.src, "TODO implement condbr {s} when condition is {s}", .{ self.target.cpu.arch, @tagName(cond) }),
|
||||
};
|
||||
self.code.appendSliceAssumeCapacity(&[_]u8{ 0x0f, opcode });
|
||||
const reloc = Reloc{ .rel32 = self.code.items.len };
|
||||
@ -2376,11 +2376,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.arm, .armeb => {
|
||||
for (inst.inputs) |input, i| {
|
||||
if (input.len < 3 or input[0] != '{' or input[input.len - 1] != '}') {
|
||||
return self.fail(inst.base.src, "unrecognized asm input constraint: '{}'", .{input});
|
||||
return self.fail(inst.base.src, "unrecognized asm input constraint: '{s}'", .{input});
|
||||
}
|
||||
const reg_name = input[1 .. input.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{}'", .{reg_name});
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
const arg = try self.resolveInst(inst.args[i]);
|
||||
try self.genSetReg(inst.base.src, reg, arg);
|
||||
}
|
||||
@ -2393,11 +2393,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
|
||||
if (inst.output) |output| {
|
||||
if (output.len < 4 or output[0] != '=' or output[1] != '{' or output[output.len - 1] != '}') {
|
||||
return self.fail(inst.base.src, "unrecognized asm output constraint: '{}'", .{output});
|
||||
return self.fail(inst.base.src, "unrecognized asm output constraint: '{s}'", .{output});
|
||||
}
|
||||
const reg_name = output[2 .. output.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{}'", .{reg_name});
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
return MCValue{ .register = reg };
|
||||
} else {
|
||||
return MCValue.none;
|
||||
@ -2406,11 +2406,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.aarch64 => {
|
||||
for (inst.inputs) |input, i| {
|
||||
if (input.len < 3 or input[0] != '{' or input[input.len - 1] != '}') {
|
||||
return self.fail(inst.base.src, "unrecognized asm input constraint: '{}'", .{input});
|
||||
return self.fail(inst.base.src, "unrecognized asm input constraint: '{s}'", .{input});
|
||||
}
|
||||
const reg_name = input[1 .. input.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{}'", .{reg_name});
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
const arg = try self.resolveInst(inst.args[i]);
|
||||
try self.genSetReg(inst.base.src, reg, arg);
|
||||
}
|
||||
@ -2425,11 +2425,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
|
||||
if (inst.output) |output| {
|
||||
if (output.len < 4 or output[0] != '=' or output[1] != '{' or output[output.len - 1] != '}') {
|
||||
return self.fail(inst.base.src, "unrecognized asm output constraint: '{}'", .{output});
|
||||
return self.fail(inst.base.src, "unrecognized asm output constraint: '{s}'", .{output});
|
||||
}
|
||||
const reg_name = output[2 .. output.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{}'", .{reg_name});
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
return MCValue{ .register = reg };
|
||||
} else {
|
||||
return MCValue.none;
|
||||
@ -2438,11 +2438,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.riscv64 => {
|
||||
for (inst.inputs) |input, i| {
|
||||
if (input.len < 3 or input[0] != '{' or input[input.len - 1] != '}') {
|
||||
return self.fail(inst.base.src, "unrecognized asm input constraint: '{}'", .{input});
|
||||
return self.fail(inst.base.src, "unrecognized asm input constraint: '{s}'", .{input});
|
||||
}
|
||||
const reg_name = input[1 .. input.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{}'", .{reg_name});
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
const arg = try self.resolveInst(inst.args[i]);
|
||||
try self.genSetReg(inst.base.src, reg, arg);
|
||||
}
|
||||
@ -2455,11 +2455,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
|
||||
if (inst.output) |output| {
|
||||
if (output.len < 4 or output[0] != '=' or output[1] != '{' or output[output.len - 1] != '}') {
|
||||
return self.fail(inst.base.src, "unrecognized asm output constraint: '{}'", .{output});
|
||||
return self.fail(inst.base.src, "unrecognized asm output constraint: '{s}'", .{output});
|
||||
}
|
||||
const reg_name = output[2 .. output.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{}'", .{reg_name});
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
return MCValue{ .register = reg };
|
||||
} else {
|
||||
return MCValue.none;
|
||||
@ -2468,11 +2468,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.x86_64, .i386 => {
|
||||
for (inst.inputs) |input, i| {
|
||||
if (input.len < 3 or input[0] != '{' or input[input.len - 1] != '}') {
|
||||
return self.fail(inst.base.src, "unrecognized asm input constraint: '{}'", .{input});
|
||||
return self.fail(inst.base.src, "unrecognized asm input constraint: '{s}'", .{input});
|
||||
}
|
||||
const reg_name = input[1 .. input.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{}'", .{reg_name});
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
const arg = try self.resolveInst(inst.args[i]);
|
||||
try self.genSetReg(inst.base.src, reg, arg);
|
||||
}
|
||||
@ -2485,11 +2485,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
|
||||
if (inst.output) |output| {
|
||||
if (output.len < 4 or output[0] != '=' or output[1] != '{' or output[output.len - 1] != '}') {
|
||||
return self.fail(inst.base.src, "unrecognized asm output constraint: '{}'", .{output});
|
||||
return self.fail(inst.base.src, "unrecognized asm output constraint: '{s}'", .{output});
|
||||
}
|
||||
const reg_name = output[2 .. output.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{}'", .{reg_name});
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
return MCValue{ .register = reg };
|
||||
} else {
|
||||
return MCValue.none;
|
||||
@ -3417,7 +3417,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
next_int_reg += 1;
|
||||
}
|
||||
},
|
||||
else => return self.fail(src, "TODO implement function parameters of type {}", .{@tagName(ty.zigTypeTag())}),
|
||||
else => return self.fail(src, "TODO implement function parameters of type {s}", .{@tagName(ty.zigTypeTag())}),
|
||||
}
|
||||
}
|
||||
result.stack_byte_count = next_stack_offset;
|
||||
|
||||
@ -235,7 +235,7 @@ fn renderFunctionSignature(
|
||||
try writer.writeAll(", ");
|
||||
}
|
||||
try renderType(ctx, writer, tv.ty.fnParamType(index));
|
||||
try writer.print(" arg{}", .{index});
|
||||
try writer.print(" arg{d}", .{index});
|
||||
}
|
||||
}
|
||||
try writer.writeByte(')');
|
||||
@ -481,8 +481,9 @@ fn genBinOp(ctx: *Context, file: *C, inst: *Inst.BinOp, operator: []const u8) !?
|
||||
const rhs = try ctx.resolveInst(inst.rhs);
|
||||
const writer = file.main.writer();
|
||||
const name = try ctx.name();
|
||||
try renderTypeAndName(ctx, writer, inst.base.ty, name, .Const);
|
||||
try writer.print(" = {s} {s} {s};\n", .{ lhs, operator, rhs });
|
||||
try writer.writeAll(indentation ++ "const ");
|
||||
try renderType(ctx, writer, inst.base.ty);
|
||||
try writer.print(" {s} = {s} " ++ operator ++ " {s};\n", .{ name, lhs, rhs });
|
||||
return name;
|
||||
}
|
||||
|
||||
@ -587,7 +588,7 @@ fn genAsm(ctx: *Context, file: *C, as: *Inst.Assembly) !?[]u8 {
|
||||
const arg = as.args[index];
|
||||
try writer.writeAll("register ");
|
||||
try renderType(ctx, writer, arg.ty);
|
||||
try writer.print(" {}_constant __asm__(\"{}\") = ", .{ reg, reg });
|
||||
try writer.print(" {s}_constant __asm__(\"{s}\") = ", .{ reg, reg });
|
||||
// TODO merge constant handling into inst_map as well
|
||||
if (arg.castTag(.constant)) |c| {
|
||||
try renderValue(ctx, writer, arg.ty, c.val);
|
||||
@ -597,13 +598,13 @@ fn genAsm(ctx: *Context, file: *C, as: *Inst.Assembly) !?[]u8 {
|
||||
if (!gop.found_existing) {
|
||||
return ctx.fail(ctx.decl.src(), "Internal error in C backend: asm argument not found in inst_map", .{});
|
||||
}
|
||||
try writer.print("{};\n ", .{gop.entry.value});
|
||||
try writer.print("{s};\n ", .{gop.entry.value});
|
||||
}
|
||||
} else {
|
||||
return ctx.fail(ctx.decl.src(), "TODO non-explicit inline asm regs", .{});
|
||||
}
|
||||
}
|
||||
try writer.print("__asm {} (\"{}\"", .{ if (as.is_volatile) @as([]const u8, "volatile") else "", as.asm_source });
|
||||
try writer.print("__asm {s} (\"{s}\"", .{ if (as.is_volatile) @as([]const u8, "volatile") else "", as.asm_source });
|
||||
if (as.output) |o| {
|
||||
return ctx.fail(ctx.decl.src(), "TODO inline asm output", .{});
|
||||
}
|
||||
@ -619,7 +620,7 @@ fn genAsm(ctx: *Context, file: *C, as: *Inst.Assembly) !?[]u8 {
|
||||
if (index > 0) {
|
||||
try writer.writeAll(", ");
|
||||
}
|
||||
try writer.print("\"\"({}_constant)", .{reg});
|
||||
try writer.print("\"\"({s}_constant)", .{reg});
|
||||
} else {
|
||||
// This is blocked by the earlier test
|
||||
unreachable;
|
||||
|
||||
125
src/codegen/llvm.zig
Normal file
125
src/codegen/llvm.zig
Normal file
@ -0,0 +1,125 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
pub fn targetTriple(allocator: *Allocator, target: std.Target) ![]u8 {
|
||||
const llvm_arch = switch (target.cpu.arch) {
|
||||
.arm => "arm",
|
||||
.armeb => "armeb",
|
||||
.aarch64 => "aarch64",
|
||||
.aarch64_be => "aarch64_be",
|
||||
.aarch64_32 => "aarch64_32",
|
||||
.arc => "arc",
|
||||
.avr => "avr",
|
||||
.bpfel => "bpfel",
|
||||
.bpfeb => "bpfeb",
|
||||
.hexagon => "hexagon",
|
||||
.mips => "mips",
|
||||
.mipsel => "mipsel",
|
||||
.mips64 => "mips64",
|
||||
.mips64el => "mips64el",
|
||||
.msp430 => "msp430",
|
||||
.powerpc => "powerpc",
|
||||
.powerpc64 => "powerpc64",
|
||||
.powerpc64le => "powerpc64le",
|
||||
.r600 => "r600",
|
||||
.amdgcn => "amdgcn",
|
||||
.riscv32 => "riscv32",
|
||||
.riscv64 => "riscv64",
|
||||
.sparc => "sparc",
|
||||
.sparcv9 => "sparcv9",
|
||||
.sparcel => "sparcel",
|
||||
.s390x => "s390x",
|
||||
.tce => "tce",
|
||||
.tcele => "tcele",
|
||||
.thumb => "thumb",
|
||||
.thumbeb => "thumbeb",
|
||||
.i386 => "i386",
|
||||
.x86_64 => "x86_64",
|
||||
.xcore => "xcore",
|
||||
.nvptx => "nvptx",
|
||||
.nvptx64 => "nvptx64",
|
||||
.le32 => "le32",
|
||||
.le64 => "le64",
|
||||
.amdil => "amdil",
|
||||
.amdil64 => "amdil64",
|
||||
.hsail => "hsail",
|
||||
.hsail64 => "hsail64",
|
||||
.spir => "spir",
|
||||
.spir64 => "spir64",
|
||||
.kalimba => "kalimba",
|
||||
.shave => "shave",
|
||||
.lanai => "lanai",
|
||||
.wasm32 => "wasm32",
|
||||
.wasm64 => "wasm64",
|
||||
.renderscript32 => "renderscript32",
|
||||
.renderscript64 => "renderscript64",
|
||||
.ve => "ve",
|
||||
.spu_2 => return error.LLVMBackendDoesNotSupportSPUMarkII,
|
||||
};
|
||||
// TODO Add a sub-arch for some architectures depending on CPU features.
|
||||
|
||||
const llvm_os = switch (target.os.tag) {
|
||||
.freestanding => "unknown",
|
||||
.ananas => "ananas",
|
||||
.cloudabi => "cloudabi",
|
||||
.dragonfly => "dragonfly",
|
||||
.freebsd => "freebsd",
|
||||
.fuchsia => "fuchsia",
|
||||
.ios => "ios",
|
||||
.kfreebsd => "kfreebsd",
|
||||
.linux => "linux",
|
||||
.lv2 => "lv2",
|
||||
.macos => "macosx",
|
||||
.netbsd => "netbsd",
|
||||
.openbsd => "openbsd",
|
||||
.solaris => "solaris",
|
||||
.windows => "windows",
|
||||
.haiku => "haiku",
|
||||
.minix => "minix",
|
||||
.rtems => "rtems",
|
||||
.nacl => "nacl",
|
||||
.cnk => "cnk",
|
||||
.aix => "aix",
|
||||
.cuda => "cuda",
|
||||
.nvcl => "nvcl",
|
||||
.amdhsa => "amdhsa",
|
||||
.ps4 => "ps4",
|
||||
.elfiamcu => "elfiamcu",
|
||||
.tvos => "tvos",
|
||||
.watchos => "watchos",
|
||||
.mesa3d => "mesa3d",
|
||||
.contiki => "contiki",
|
||||
.amdpal => "amdpal",
|
||||
.hermit => "hermit",
|
||||
.hurd => "hurd",
|
||||
.wasi => "wasi",
|
||||
.emscripten => "emscripten",
|
||||
.uefi => "windows",
|
||||
.other => "unknown",
|
||||
};
|
||||
|
||||
const llvm_abi = switch (target.abi) {
|
||||
.none => "unknown",
|
||||
.gnu => "gnu",
|
||||
.gnuabin32 => "gnuabin32",
|
||||
.gnuabi64 => "gnuabi64",
|
||||
.gnueabi => "gnueabi",
|
||||
.gnueabihf => "gnueabihf",
|
||||
.gnux32 => "gnux32",
|
||||
.code16 => "code16",
|
||||
.eabi => "eabi",
|
||||
.eabihf => "eabihf",
|
||||
.android => "android",
|
||||
.musl => "musl",
|
||||
.musleabi => "musleabi",
|
||||
.musleabihf => "musleabihf",
|
||||
.msvc => "msvc",
|
||||
.itanium => "itanium",
|
||||
.cygnus => "cygnus",
|
||||
.coreclr => "coreclr",
|
||||
.simulator => "simulator",
|
||||
.macabi => "macabi",
|
||||
};
|
||||
|
||||
return std.fmt.allocPrint(allocator, "{s}-unknown-{s}-{s}", .{ llvm_arch, llvm_os, llvm_abi });
|
||||
}
|
||||
@ -72,7 +72,7 @@ pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!
|
||||
errdefer version_table.deinit(gpa);
|
||||
|
||||
var glibc_dir = zig_lib_dir.openDir("libc" ++ path.sep_str ++ "glibc", .{}) catch |err| {
|
||||
std.log.err("unable to open glibc dir: {}", .{@errorName(err)});
|
||||
std.log.err("unable to open glibc dir: {s}", .{@errorName(err)});
|
||||
return error.ZigInstallationCorrupt;
|
||||
};
|
||||
defer glibc_dir.close();
|
||||
@ -81,7 +81,7 @@ pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!
|
||||
const vers_txt_contents = glibc_dir.readFileAlloc(gpa, "vers.txt", max_txt_size) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => {
|
||||
std.log.err("unable to read vers.txt: {}", .{@errorName(err)});
|
||||
std.log.err("unable to read vers.txt: {s}", .{@errorName(err)});
|
||||
return error.ZigInstallationCorrupt;
|
||||
},
|
||||
};
|
||||
@ -91,7 +91,7 @@ pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!
|
||||
const fns_txt_contents = glibc_dir.readFileAlloc(arena, "fns.txt", max_txt_size) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => {
|
||||
std.log.err("unable to read fns.txt: {}", .{@errorName(err)});
|
||||
std.log.err("unable to read fns.txt: {s}", .{@errorName(err)});
|
||||
return error.ZigInstallationCorrupt;
|
||||
},
|
||||
};
|
||||
@ -99,7 +99,7 @@ pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!
|
||||
const abi_txt_contents = glibc_dir.readFileAlloc(gpa, "abi.txt", max_txt_size) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => {
|
||||
std.log.err("unable to read abi.txt: {}", .{@errorName(err)});
|
||||
std.log.err("unable to read abi.txt: {s}", .{@errorName(err)});
|
||||
return error.ZigInstallationCorrupt;
|
||||
},
|
||||
};
|
||||
@ -116,7 +116,7 @@ pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!
|
||||
}
|
||||
const adjusted_line = line[prefix.len..];
|
||||
const ver = std.builtin.Version.parse(adjusted_line) catch |err| {
|
||||
std.log.err("vers.txt:{}: unable to parse glibc version '{}': {}", .{ line_i, line, @errorName(err) });
|
||||
std.log.err("vers.txt:{}: unable to parse glibc version '{s}': {s}", .{ line_i, line, @errorName(err) });
|
||||
return error.ZigInstallationCorrupt;
|
||||
};
|
||||
try all_versions.append(arena, ver);
|
||||
@ -136,7 +136,7 @@ pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!
|
||||
return error.ZigInstallationCorrupt;
|
||||
};
|
||||
const lib = findLib(lib_name) orelse {
|
||||
std.log.err("fns.txt:{}: unknown library name: {}", .{ line_i, lib_name });
|
||||
std.log.err("fns.txt:{}: unknown library name: {s}", .{ line_i, lib_name });
|
||||
return error.ZigInstallationCorrupt;
|
||||
};
|
||||
try all_functions.append(arena, .{
|
||||
@ -170,15 +170,15 @@ pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!
|
||||
return error.ZigInstallationCorrupt;
|
||||
};
|
||||
const arch_tag = std.meta.stringToEnum(std.Target.Cpu.Arch, arch_name) orelse {
|
||||
std.log.err("abi.txt:{}: unrecognized arch: '{}'", .{ line_i, arch_name });
|
||||
std.log.err("abi.txt:{}: unrecognized arch: '{s}'", .{ line_i, arch_name });
|
||||
return error.ZigInstallationCorrupt;
|
||||
};
|
||||
if (!mem.eql(u8, os_name, "linux")) {
|
||||
std.log.err("abi.txt:{}: expected OS 'linux', found '{}'", .{ line_i, os_name });
|
||||
std.log.err("abi.txt:{}: expected OS 'linux', found '{s}'", .{ line_i, os_name });
|
||||
return error.ZigInstallationCorrupt;
|
||||
}
|
||||
const abi_tag = std.meta.stringToEnum(std.Target.Abi, abi_name) orelse {
|
||||
std.log.err("abi.txt:{}: unrecognized ABI: '{}'", .{ line_i, abi_name });
|
||||
std.log.err("abi.txt:{}: unrecognized ABI: '{s}'", .{ line_i, abi_name });
|
||||
return error.ZigInstallationCorrupt;
|
||||
};
|
||||
|
||||
@ -211,7 +211,7 @@ pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!
|
||||
}
|
||||
const version_index = std.fmt.parseInt(u8, version_index_string, 10) catch |err| {
|
||||
// If this happens with legit data, increase the size of the integer type in the struct.
|
||||
std.log.err("abi.txt:{}: unable to parse version: {}", .{ line_i, @errorName(err) });
|
||||
std.log.err("abi.txt:{}: unable to parse version: {s}", .{ line_i, @errorName(err) });
|
||||
return error.ZigInstallationCorrupt;
|
||||
};
|
||||
|
||||
@ -531,7 +531,7 @@ fn add_include_dirs(comp: *Compilation, arena: *Allocator, args: *std.ArrayList(
|
||||
try args.append(try path.join(arena, &[_][]const u8{ comp.zig_lib_directory.path.?, lib_libc ++ "glibc" }));
|
||||
|
||||
try args.append("-I");
|
||||
try args.append(try std.fmt.allocPrint(arena, "{}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{}-{}-{}", .{
|
||||
try args.append(try std.fmt.allocPrint(arena, "{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{s}-{s}-{s}", .{
|
||||
comp.zig_lib_directory.path.?, @tagName(arch), @tagName(target.os.tag), @tagName(target.abi),
|
||||
}));
|
||||
|
||||
@ -539,7 +539,7 @@ fn add_include_dirs(comp: *Compilation, arena: *Allocator, args: *std.ArrayList(
|
||||
try args.append(try lib_path(comp, arena, lib_libc ++ "include" ++ s ++ "generic-glibc"));
|
||||
|
||||
try args.append("-I");
|
||||
try args.append(try std.fmt.allocPrint(arena, "{}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{}-linux-any", .{
|
||||
try args.append(try std.fmt.allocPrint(arena, "{s}" ++ s ++ "libc" ++ s ++ "include" ++ s ++ "{s}-linux-any", .{
|
||||
comp.zig_lib_directory.path.?, @tagName(arch),
|
||||
}));
|
||||
|
||||
@ -881,7 +881,7 @@ pub fn buildSharedObjects(comp: *Compilation) !void {
|
||||
if (o_directory.handle.createFile(ok_basename, .{})) |file| {
|
||||
file.close();
|
||||
} else |err| {
|
||||
std.log.warn("glibc shared objects: failed to mark completion: {}", .{@errorName(err)});
|
||||
std.log.warn("glibc shared objects: failed to mark completion: {s}", .{@errorName(err)});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -83,7 +83,7 @@ pub const LibCInstallation = struct {
|
||||
}
|
||||
inline for (fields) |field, i| {
|
||||
if (!found_keys[i].found) {
|
||||
log.err("missing field: {}\n", .{field.name});
|
||||
log.err("missing field: {s}\n", .{field.name});
|
||||
return error.ParseError;
|
||||
}
|
||||
}
|
||||
@ -96,18 +96,18 @@ pub const LibCInstallation = struct {
|
||||
return error.ParseError;
|
||||
}
|
||||
if (self.crt_dir == null and !is_darwin) {
|
||||
log.err("crt_dir may not be empty for {}\n", .{@tagName(Target.current.os.tag)});
|
||||
log.err("crt_dir may not be empty for {s}\n", .{@tagName(Target.current.os.tag)});
|
||||
return error.ParseError;
|
||||
}
|
||||
if (self.msvc_lib_dir == null and is_windows and !is_gnu) {
|
||||
log.err("msvc_lib_dir may not be empty for {}-{}\n", .{
|
||||
log.err("msvc_lib_dir may not be empty for {s}-{s}\n", .{
|
||||
@tagName(Target.current.os.tag),
|
||||
@tagName(Target.current.abi),
|
||||
});
|
||||
return error.ParseError;
|
||||
}
|
||||
if (self.kernel32_lib_dir == null and is_windows and !is_gnu) {
|
||||
log.err("kernel32_lib_dir may not be empty for {}-{}\n", .{
|
||||
log.err("kernel32_lib_dir may not be empty for {s}-{s}\n", .{
|
||||
@tagName(Target.current.os.tag),
|
||||
@tagName(Target.current.abi),
|
||||
});
|
||||
@ -128,25 +128,25 @@ pub const LibCInstallation = struct {
|
||||
try out.print(
|
||||
\\# The directory that contains `stdlib.h`.
|
||||
\\# On POSIX-like systems, include directories be found with: `cc -E -Wp,-v -xc /dev/null`
|
||||
\\include_dir={}
|
||||
\\include_dir={s}
|
||||
\\
|
||||
\\# The system-specific include directory. May be the same as `include_dir`.
|
||||
\\# On Windows it's the directory that includes `vcruntime.h`.
|
||||
\\# On POSIX it's the directory that includes `sys/errno.h`.
|
||||
\\sys_include_dir={}
|
||||
\\sys_include_dir={s}
|
||||
\\
|
||||
\\# The directory that contains `crt1.o` or `crt2.o`.
|
||||
\\# On POSIX, can be found with `cc -print-file-name=crt1.o`.
|
||||
\\# Not needed when targeting MacOS.
|
||||
\\crt_dir={}
|
||||
\\crt_dir={s}
|
||||
\\
|
||||
\\# The directory that contains `vcruntime.lib`.
|
||||
\\# Only needed when targeting MSVC on Windows.
|
||||
\\msvc_lib_dir={}
|
||||
\\msvc_lib_dir={s}
|
||||
\\
|
||||
\\# The directory that contains `kernel32.lib`.
|
||||
\\# Only needed when targeting MSVC on Windows.
|
||||
\\kernel32_lib_dir={}
|
||||
\\kernel32_lib_dir={s}
|
||||
\\
|
||||
, .{
|
||||
include_dir,
|
||||
@ -338,7 +338,7 @@ pub const LibCInstallation = struct {
|
||||
|
||||
for (searches) |search| {
|
||||
result_buf.shrink(0);
|
||||
try result_buf.outStream().print("{}\\Include\\{}\\ucrt", .{ search.path, search.version });
|
||||
try result_buf.outStream().print("{s}\\Include\\{s}\\ucrt", .{ search.path, search.version });
|
||||
|
||||
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
|
||||
error.FileNotFound,
|
||||
@ -384,7 +384,7 @@ pub const LibCInstallation = struct {
|
||||
|
||||
for (searches) |search| {
|
||||
result_buf.shrink(0);
|
||||
try result_buf.outStream().print("{}\\Lib\\{}\\ucrt\\{}", .{ search.path, search.version, arch_sub_dir });
|
||||
try result_buf.outStream().print("{s}\\Lib\\{s}\\ucrt\\{s}", .{ search.path, search.version, arch_sub_dir });
|
||||
|
||||
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
|
||||
error.FileNotFound,
|
||||
@ -439,7 +439,7 @@ pub const LibCInstallation = struct {
|
||||
for (searches) |search| {
|
||||
result_buf.shrink(0);
|
||||
const stream = result_buf.outStream();
|
||||
try stream.print("{}\\Lib\\{}\\um\\{}", .{ search.path, search.version, arch_sub_dir });
|
||||
try stream.print("{s}\\Lib\\{s}\\um\\{s}", .{ search.path, search.version, arch_sub_dir });
|
||||
|
||||
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
|
||||
error.FileNotFound,
|
||||
@ -520,7 +520,7 @@ fn ccPrintFileName(args: CCPrintFileNameOptions) ![:0]u8 {
|
||||
const allocator = args.allocator;
|
||||
|
||||
const cc_exe = std.os.getenvZ("CC") orelse default_cc_exe;
|
||||
const arg1 = try std.fmt.allocPrint(allocator, "-print-file-name={}", .{args.search_basename});
|
||||
const arg1 = try std.fmt.allocPrint(allocator, "-print-file-name={s}", .{args.search_basename});
|
||||
defer allocator.free(arg1);
|
||||
const argv = [_][]const u8{ cc_exe, arg1 };
|
||||
|
||||
@ -584,17 +584,17 @@ fn printVerboseInvocation(
|
||||
if (!verbose) return;
|
||||
|
||||
if (search_basename) |s| {
|
||||
std.debug.warn("Zig attempted to find the file '{}' by executing this command:\n", .{s});
|
||||
std.debug.warn("Zig attempted to find the file '{s}' by executing this command:\n", .{s});
|
||||
} else {
|
||||
std.debug.warn("Zig attempted to find the path to native system libc headers by executing this command:\n", .{});
|
||||
}
|
||||
for (argv) |arg, i| {
|
||||
if (i != 0) std.debug.warn(" ", .{});
|
||||
std.debug.warn("{}", .{arg});
|
||||
std.debug.warn("{s}", .{arg});
|
||||
}
|
||||
std.debug.warn("\n", .{});
|
||||
if (stderr) |s| {
|
||||
std.debug.warn("Output:\n==========\n{}\n==========\n", .{s});
|
||||
std.debug.warn("Output:\n==========\n{s}\n==========\n", .{s});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -560,9 +560,9 @@ pub const File = struct {
|
||||
const full_out_path_z = try arena.dupeZ(u8, full_out_path);
|
||||
|
||||
if (base.options.verbose_link) {
|
||||
std.debug.print("ar rcs {}", .{full_out_path_z});
|
||||
std.debug.print("ar rcs {s}", .{full_out_path_z});
|
||||
for (object_files.items) |arg| {
|
||||
std.debug.print(" {}", .{arg});
|
||||
std.debug.print(" {s}", .{arg});
|
||||
}
|
||||
std.debug.print("\n", .{});
|
||||
}
|
||||
@ -574,11 +574,11 @@ pub const File = struct {
|
||||
|
||||
if (!base.options.disable_lld_caching) {
|
||||
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
|
||||
log.warn("failed to save archive hash digest file: {}", .{@errorName(err)});
|
||||
log.warn("failed to save archive hash digest file: {s}", .{@errorName(err)});
|
||||
};
|
||||
|
||||
man.writeManifest() catch |err| {
|
||||
log.warn("failed to write cache manifest when archiving: {}", .{@errorName(err)});
|
||||
log.warn("failed to write cache manifest when archiving: {s}", .{@errorName(err)});
|
||||
};
|
||||
|
||||
base.lock = man.toOwnedLock();
|
||||
|
||||
@ -111,8 +111,11 @@ pub fn flushModule(self: *C, comp: *Compilation) !void {
|
||||
if (self.header.buf.items.len > 0) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
if (self.header.items.len > 0) {
|
||||
try writer.print("{s}\n", .{self.header.items});
|
||||
}
|
||||
if (self.constants.items.len > 0) {
|
||||
try writer.print("{}\n", .{self.constants.items});
|
||||
try writer.print("{s}\n", .{self.constants.items});
|
||||
}
|
||||
if (self.main.items.len > 1) {
|
||||
const last_two = self.main.items[self.main.items.len - 2 ..];
|
||||
|
||||
@ -686,7 +686,7 @@ pub fn updateDecl(self: *Coff, module: *Module, decl: *Module.Decl) !void {
|
||||
if (need_realloc) {
|
||||
const curr_vaddr = self.getDeclVAddr(decl);
|
||||
const vaddr = try self.growTextBlock(&decl.link.coff, code.len, required_alignment);
|
||||
log.debug("growing {} from 0x{x} to 0x{x}\n", .{ decl.name, curr_vaddr, vaddr });
|
||||
log.debug("growing {s} from 0x{x} to 0x{x}\n", .{ decl.name, curr_vaddr, vaddr });
|
||||
if (vaddr != curr_vaddr) {
|
||||
log.debug(" (writing new offset table entry)\n", .{});
|
||||
self.offset_table.items[decl.link.coff.offset_table_index] = vaddr;
|
||||
@ -697,7 +697,7 @@ pub fn updateDecl(self: *Coff, module: *Module, decl: *Module.Decl) !void {
|
||||
}
|
||||
} else {
|
||||
const vaddr = try self.allocateTextBlock(&decl.link.coff, code.len, required_alignment);
|
||||
log.debug("allocated text block for {} at 0x{x} (size: {Bi})\n", .{ mem.spanZ(decl.name), vaddr, code.len });
|
||||
log.debug("allocated text block for {s} at 0x{x} (size: {Bi})\n", .{ mem.spanZ(decl.name), vaddr, code.len });
|
||||
errdefer self.freeTextBlock(&decl.link.coff);
|
||||
self.offset_table.items[decl.link.coff.offset_table_index] = vaddr;
|
||||
try self.writeOffsetTableEntry(decl.link.coff.offset_table_index);
|
||||
@ -880,7 +880,7 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("COFF LLD new_digest={} error: {}", .{ digest, @errorName(err) });
|
||||
log.debug("COFF LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
@ -1236,11 +1236,11 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
|
||||
// Update the file with the digest. If it fails we can continue; it only
|
||||
// means that the next invocation will have an unnecessary cache miss.
|
||||
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
|
||||
log.warn("failed to save linking hash digest file: {}", .{@errorName(err)});
|
||||
log.warn("failed to save linking hash digest file: {s}", .{@errorName(err)});
|
||||
};
|
||||
// Again failure here only means an unnecessary cache miss.
|
||||
man.writeManifest() catch |err| {
|
||||
log.warn("failed to write cache manifest when linking: {}", .{@errorName(err)});
|
||||
log.warn("failed to write cache manifest when linking: {s}", .{@errorName(err)});
|
||||
};
|
||||
// We hang on to this lock so that the output file path can be used without
|
||||
// other processes clobbering it.
|
||||
|
||||
@ -1362,7 +1362,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("ELF LLD new_digest={} error: {}", .{ digest, @errorName(err) });
|
||||
log.debug("ELF LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
@ -1396,7 +1396,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
|
||||
|
||||
if (self.base.options.output_mode == .Exe) {
|
||||
try argv.append("-z");
|
||||
try argv.append(try std.fmt.allocPrint(arena, "stack-size={}", .{stack_size}));
|
||||
try argv.append(try std.fmt.allocPrint(arena, "stack-size={d}", .{stack_size}));
|
||||
}
|
||||
|
||||
if (self.base.options.image_base_override) |image_base| {
|
||||
@ -1438,7 +1438,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
|
||||
if (getLDMOption(target)) |ldm| {
|
||||
// Any target ELF will use the freebsd osabi if suffixed with "_fbsd".
|
||||
const arg = if (target.os.tag == .freebsd)
|
||||
try std.fmt.allocPrint(arena, "{}_fbsd", .{ldm})
|
||||
try std.fmt.allocPrint(arena, "{s}_fbsd", .{ldm})
|
||||
else
|
||||
ldm;
|
||||
try argv.append("-m");
|
||||
@ -1599,7 +1599,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
|
||||
// (the check for that needs to be earlier), but they could be full paths to .so files, in which
|
||||
// case we want to avoid prepending "-l".
|
||||
const ext = Compilation.classifyFileExt(link_lib);
|
||||
const arg = if (ext == .shared_library) link_lib else try std.fmt.allocPrint(arena, "-l{}", .{link_lib});
|
||||
const arg = if (ext == .shared_library) link_lib else try std.fmt.allocPrint(arena, "-l{s}", .{link_lib});
|
||||
argv.appendAssumeCapacity(arg);
|
||||
}
|
||||
|
||||
@ -1733,11 +1733,11 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
|
||||
// Update the file with the digest. If it fails we can continue; it only
|
||||
// means that the next invocation will have an unnecessary cache miss.
|
||||
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
|
||||
log.warn("failed to save linking hash digest file: {}", .{@errorName(err)});
|
||||
log.warn("failed to save linking hash digest file: {s}", .{@errorName(err)});
|
||||
};
|
||||
// Again failure here only means an unnecessary cache miss.
|
||||
man.writeManifest() catch |err| {
|
||||
log.warn("failed to write cache manifest when linking: {}", .{@errorName(err)});
|
||||
log.warn("failed to write cache manifest when linking: {s}", .{@errorName(err)});
|
||||
};
|
||||
// We hang on to this lock so that the output file path can be used without
|
||||
// other processes clobbering it.
|
||||
@ -2082,10 +2082,10 @@ pub fn allocateDeclIndexes(self: *Elf, decl: *Module.Decl) !void {
|
||||
try self.offset_table.ensureCapacity(self.base.allocator, self.offset_table.items.len + 1);
|
||||
|
||||
if (self.local_symbol_free_list.popOrNull()) |i| {
|
||||
log.debug("reusing symbol index {} for {}\n", .{ i, decl.name });
|
||||
log.debug("reusing symbol index {} for {s}\n", .{ i, decl.name });
|
||||
decl.link.elf.local_sym_index = i;
|
||||
} else {
|
||||
log.debug("allocating symbol index {} for {}\n", .{ self.local_symbols.items.len, decl.name });
|
||||
log.debug("allocating symbol index {} for {s}\n", .{ self.local_symbols.items.len, decl.name });
|
||||
decl.link.elf.local_sym_index = @intCast(u32, self.local_symbols.items.len);
|
||||
_ = self.local_symbols.addOneAssumeCapacity();
|
||||
}
|
||||
@ -2182,7 +2182,7 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
|
||||
if (zir_dumps.len != 0) {
|
||||
for (zir_dumps) |fn_name| {
|
||||
if (mem.eql(u8, mem.spanZ(decl.name), fn_name)) {
|
||||
std.debug.print("\n{}\n", .{decl.name});
|
||||
std.debug.print("\n{s}\n", .{decl.name});
|
||||
typed_value.val.castTag(.function).?.data.dump(module.*);
|
||||
}
|
||||
}
|
||||
|
||||
@ -520,7 +520,7 @@ fn linkWithLLD(self: *MachO, comp: *Compilation) !void {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("MachO LLD new_digest={} error: {}", .{ digest, @errorName(err) });
|
||||
log.debug("MachO LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
@ -706,7 +706,7 @@ fn linkWithLLD(self: *MachO, comp: *Compilation) !void {
|
||||
// (the check for that needs to be earlier), but they could be full paths to .dylib files, in which
|
||||
// case we want to avoid prepending "-l".
|
||||
const ext = Compilation.classifyFileExt(link_lib);
|
||||
const arg = if (ext == .shared_library) link_lib else try std.fmt.allocPrint(arena, "-l{}", .{link_lib});
|
||||
const arg = if (ext == .shared_library) link_lib else try std.fmt.allocPrint(arena, "-l{s}", .{link_lib});
|
||||
argv.appendAssumeCapacity(arg);
|
||||
}
|
||||
|
||||
@ -759,15 +759,15 @@ fn linkWithLLD(self: *MachO, comp: *Compilation) !void {
|
||||
self.base.allocator.free(result.stderr);
|
||||
}
|
||||
if (result.stdout.len != 0) {
|
||||
log.warn("unexpected LD stdout: {}", .{result.stdout});
|
||||
log.warn("unexpected LD stdout: {s}", .{result.stdout});
|
||||
}
|
||||
if (result.stderr.len != 0) {
|
||||
log.warn("unexpected LD stderr: {}", .{result.stderr});
|
||||
log.warn("unexpected LD stderr: {s}", .{result.stderr});
|
||||
}
|
||||
if (result.term != .Exited or result.term.Exited != 0) {
|
||||
// TODO parse this output and surface with the Compilation API rather than
|
||||
// directly outputting to stderr here.
|
||||
log.err("{}", .{result.stderr});
|
||||
log.err("{s}", .{result.stderr});
|
||||
return error.LDReportedFailure;
|
||||
}
|
||||
} else {
|
||||
@ -980,11 +980,11 @@ fn linkWithLLD(self: *MachO, comp: *Compilation) !void {
|
||||
// Update the file with the digest. If it fails we can continue; it only
|
||||
// means that the next invocation will have an unnecessary cache miss.
|
||||
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
|
||||
log.warn("failed to save linking hash digest file: {}", .{@errorName(err)});
|
||||
log.warn("failed to save linking hash digest file: {s}", .{@errorName(err)});
|
||||
};
|
||||
// Again failure here only means an unnecessary cache miss.
|
||||
man.writeManifest() catch |err| {
|
||||
log.warn("failed to write cache manifest when linking: {}", .{@errorName(err)});
|
||||
log.warn("failed to write cache manifest when linking: {s}", .{@errorName(err)});
|
||||
};
|
||||
// We hang on to this lock so that the output file path can be used without
|
||||
// other processes clobbering it.
|
||||
@ -1088,10 +1088,10 @@ pub fn allocateDeclIndexes(self: *MachO, decl: *Module.Decl) !void {
|
||||
try self.offset_table.ensureCapacity(self.base.allocator, self.offset_table.items.len + 1);
|
||||
|
||||
if (self.local_symbol_free_list.popOrNull()) |i| {
|
||||
log.debug("reusing symbol index {} for {}", .{ i, decl.name });
|
||||
log.debug("reusing symbol index {d} for {s}", .{ i, decl.name });
|
||||
decl.link.macho.local_sym_index = i;
|
||||
} else {
|
||||
log.debug("allocating symbol index {} for {}", .{ self.local_symbols.items.len, decl.name });
|
||||
log.debug("allocating symbol index {d} for {s}", .{ self.local_symbols.items.len, decl.name });
|
||||
decl.link.macho.local_sym_index = @intCast(u32, self.local_symbols.items.len);
|
||||
_ = self.local_symbols.addOneAssumeCapacity();
|
||||
}
|
||||
@ -1165,7 +1165,7 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
|
||||
const need_realloc = code.len > capacity or !mem.isAlignedGeneric(u64, symbol.n_value, required_alignment);
|
||||
if (need_realloc) {
|
||||
const vaddr = try self.growTextBlock(&decl.link.macho, code.len, required_alignment);
|
||||
log.debug("growing {} from 0x{x} to 0x{x}", .{ decl.name, symbol.n_value, vaddr });
|
||||
log.debug("growing {s} from 0x{x} to 0x{x}", .{ decl.name, symbol.n_value, vaddr });
|
||||
if (vaddr != symbol.n_value) {
|
||||
symbol.n_value = vaddr;
|
||||
log.debug(" (writing new offset table entry)", .{});
|
||||
@ -1188,7 +1188,7 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
|
||||
const decl_name = mem.spanZ(decl.name);
|
||||
const name_str_index = try self.makeString(decl_name);
|
||||
const addr = try self.allocateTextBlock(&decl.link.macho, code.len, required_alignment);
|
||||
log.debug("allocated text block for {} at 0x{x}", .{ decl_name, addr });
|
||||
log.debug("allocated text block for {s} at 0x{x}", .{ decl_name, addr });
|
||||
errdefer self.freeTextBlock(&decl.link.macho);
|
||||
|
||||
symbol.* = .{
|
||||
|
||||
@ -321,7 +321,7 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("WASM LLD new_digest={} error: {}", .{ digest, @errorName(err) });
|
||||
log.debug("WASM LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
@ -463,11 +463,11 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
|
||||
// Update the file with the digest. If it fails we can continue; it only
|
||||
// means that the next invocation will have an unnecessary cache miss.
|
||||
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
|
||||
log.warn("failed to save linking hash digest symlink: {}", .{@errorName(err)});
|
||||
log.warn("failed to save linking hash digest symlink: {s}", .{@errorName(err)});
|
||||
};
|
||||
// Again failure here only means an unnecessary cache miss.
|
||||
man.writeManifest() catch |err| {
|
||||
log.warn("failed to write cache manifest when linking: {}", .{@errorName(err)});
|
||||
log.warn("failed to write cache manifest when linking: {s}", .{@errorName(err)});
|
||||
};
|
||||
// We hang on to this lock so that the output file path can be used without
|
||||
// other processes clobbering it.
|
||||
|
||||
236
src/main.zig
236
src/main.zig
@ -118,7 +118,7 @@ pub fn main() anyerror!void {
|
||||
|
||||
pub fn mainArgs(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !void {
|
||||
if (args.len <= 1) {
|
||||
std.log.info("{}", .{usage});
|
||||
std.log.info("{s}", .{usage});
|
||||
fatal("expected command argument", .{});
|
||||
}
|
||||
|
||||
@ -204,8 +204,8 @@ pub fn mainArgs(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
} else if (mem.eql(u8, cmd, "help") or mem.eql(u8, cmd, "-h") or mem.eql(u8, cmd, "--help")) {
|
||||
try io.getStdOut().writeAll(usage);
|
||||
} else {
|
||||
std.log.info("{}", .{usage});
|
||||
fatal("unknown command: {}", .{args[1]});
|
||||
std.log.info("{s}", .{usage});
|
||||
fatal("unknown command: {s}", .{args[1]});
|
||||
}
|
||||
}
|
||||
|
||||
@ -615,7 +615,7 @@ fn buildOutputType(
|
||||
fatal("unexpected end-of-parameter mark: --", .{});
|
||||
}
|
||||
} else if (mem.eql(u8, arg, "--pkg-begin")) {
|
||||
if (i + 2 >= args.len) fatal("Expected 2 arguments after {}", .{arg});
|
||||
if (i + 2 >= args.len) fatal("Expected 2 arguments after {s}", .{arg});
|
||||
i += 1;
|
||||
const pkg_name = args[i];
|
||||
i += 1;
|
||||
@ -635,7 +635,7 @@ fn buildOutputType(
|
||||
cur_pkg = cur_pkg.parent orelse
|
||||
fatal("encountered --pkg-end with no matching --pkg-begin", .{});
|
||||
} else if (mem.eql(u8, arg, "--main-pkg-path")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
main_pkg_path = args[i];
|
||||
} else if (mem.eql(u8, arg, "-cflags")) {
|
||||
@ -653,10 +653,10 @@ fn buildOutputType(
|
||||
i += 1;
|
||||
const next_arg = args[i];
|
||||
color = std.meta.stringToEnum(Color, next_arg) orelse {
|
||||
fatal("expected [auto|on|off] after --color, found '{}'", .{next_arg});
|
||||
fatal("expected [auto|on|off] after --color, found '{s}'", .{next_arg});
|
||||
};
|
||||
} else if (mem.eql(u8, arg, "--subsystem")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
if (mem.eql(u8, args[i], "console")) {
|
||||
subsystem = .Console;
|
||||
@ -689,51 +689,51 @@ fn buildOutputType(
|
||||
});
|
||||
}
|
||||
} else if (mem.eql(u8, arg, "-O")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
optimize_mode_string = args[i];
|
||||
} else if (mem.eql(u8, arg, "--stack")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
stack_size_override = std.fmt.parseUnsigned(u64, args[i], 0) catch |err| {
|
||||
fatal("unable to parse '{}': {}", .{ arg, @errorName(err) });
|
||||
};
|
||||
} else if (mem.eql(u8, arg, "--image-base")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
image_base_override = std.fmt.parseUnsigned(u64, args[i], 0) catch |err| {
|
||||
fatal("unable to parse '{}': {}", .{ arg, @errorName(err) });
|
||||
};
|
||||
} else if (mem.eql(u8, arg, "--name")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
provided_name = args[i];
|
||||
} else if (mem.eql(u8, arg, "-rpath")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
try rpath_list.append(args[i]);
|
||||
} else if (mem.eql(u8, arg, "--library-directory") or mem.eql(u8, arg, "-L")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
try lib_dirs.append(args[i]);
|
||||
} else if (mem.eql(u8, arg, "-F")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
try framework_dirs.append(args[i]);
|
||||
} else if (mem.eql(u8, arg, "-framework")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
try frameworks.append(args[i]);
|
||||
} else if (mem.eql(u8, arg, "-T") or mem.eql(u8, arg, "--script")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
linker_script = args[i];
|
||||
} else if (mem.eql(u8, arg, "--version-script")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
version_script = args[i];
|
||||
} else if (mem.eql(u8, arg, "--library") or mem.eql(u8, arg, "-l")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
// We don't know whether this library is part of libc or libc++ until we resolve the target.
|
||||
// So we simply append to the list for now.
|
||||
i += 1;
|
||||
@ -743,7 +743,7 @@ fn buildOutputType(
|
||||
mem.eql(u8, arg, "-I") or
|
||||
mem.eql(u8, arg, "-dirafter"))
|
||||
{
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
try clang_argv.append(arg);
|
||||
try clang_argv.append(args[i]);
|
||||
@ -753,19 +753,19 @@ fn buildOutputType(
|
||||
}
|
||||
i += 1;
|
||||
version = std.builtin.Version.parse(args[i]) catch |err| {
|
||||
fatal("unable to parse --version '{}': {}", .{ args[i], @errorName(err) });
|
||||
fatal("unable to parse --version '{s}': {s}", .{ args[i], @errorName(err) });
|
||||
};
|
||||
have_version = true;
|
||||
} else if (mem.eql(u8, arg, "-target")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
target_arch_os_abi = args[i];
|
||||
} else if (mem.eql(u8, arg, "-mcpu")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
target_mcpu = args[i];
|
||||
} else if (mem.eql(u8, arg, "-mcmodel")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
machine_code_model = parseCodeModel(args[i]);
|
||||
} else if (mem.startsWith(u8, arg, "-ofmt=")) {
|
||||
@ -777,35 +777,35 @@ fn buildOutputType(
|
||||
} else if (mem.startsWith(u8, arg, "-O")) {
|
||||
optimize_mode_string = arg["-O".len..];
|
||||
} else if (mem.eql(u8, arg, "--dynamic-linker")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
target_dynamic_linker = args[i];
|
||||
} else if (mem.eql(u8, arg, "--libc")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
libc_paths_file = args[i];
|
||||
} else if (mem.eql(u8, arg, "--test-filter")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
test_filter = args[i];
|
||||
} else if (mem.eql(u8, arg, "--test-name-prefix")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
test_name_prefix = args[i];
|
||||
} else if (mem.eql(u8, arg, "--test-cmd")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
try test_exec_args.append(args[i]);
|
||||
} else if (mem.eql(u8, arg, "--cache-dir")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
override_local_cache_dir = args[i];
|
||||
} else if (mem.eql(u8, arg, "--global-cache-dir")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
override_global_cache_dir = args[i];
|
||||
} else if (mem.eql(u8, arg, "--override-lib-dir")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
|
||||
i += 1;
|
||||
override_lib_dir = args[i];
|
||||
} else if (mem.eql(u8, arg, "-fcompiler-rt")) {
|
||||
@ -968,7 +968,7 @@ fn buildOutputType(
|
||||
{
|
||||
try clang_argv.append(arg);
|
||||
} else {
|
||||
fatal("unrecognized parameter: '{}'", .{arg});
|
||||
fatal("unrecognized parameter: '{s}'", .{arg});
|
||||
}
|
||||
} else switch (Compilation.classifyFileExt(arg)) {
|
||||
.object, .static_library, .shared_library => {
|
||||
@ -982,19 +982,19 @@ fn buildOutputType(
|
||||
},
|
||||
.zig, .zir => {
|
||||
if (root_src_file) |other| {
|
||||
fatal("found another zig file '{}' after root source file '{}'", .{ arg, other });
|
||||
fatal("found another zig file '{s}' after root source file '{s}'", .{ arg, other });
|
||||
} else {
|
||||
root_src_file = arg;
|
||||
}
|
||||
},
|
||||
.unknown => {
|
||||
fatal("unrecognized file extension of parameter '{}'", .{arg});
|
||||
fatal("unrecognized file extension of parameter '{s}'", .{arg});
|
||||
},
|
||||
}
|
||||
}
|
||||
if (optimize_mode_string) |s| {
|
||||
optimize_mode = std.meta.stringToEnum(std.builtin.Mode, s) orelse
|
||||
fatal("unrecognized optimization mode: '{}'", .{s});
|
||||
fatal("unrecognized optimization mode: '{s}'", .{s});
|
||||
}
|
||||
},
|
||||
.cc, .cpp => {
|
||||
@ -1018,7 +1018,7 @@ fn buildOutputType(
|
||||
var it = ClangArgIterator.init(arena, all_args);
|
||||
while (it.has_next) {
|
||||
it.next() catch |err| {
|
||||
fatal("unable to parse command line parameters: {}", .{@errorName(err)});
|
||||
fatal("unable to parse command line parameters: {s}", .{@errorName(err)});
|
||||
};
|
||||
switch (it.zig_equivalent) {
|
||||
.target => target_arch_os_abi = it.only_arg, // example: -target riscv64-linux-unknown
|
||||
@ -1038,7 +1038,7 @@ fn buildOutputType(
|
||||
},
|
||||
.zig, .zir => {
|
||||
if (root_src_file) |other| {
|
||||
fatal("found another zig file '{}' after root source file '{}'", .{ it.only_arg, other });
|
||||
fatal("found another zig file '{s}' after root source file '{s}'", .{ it.only_arg, other });
|
||||
} else {
|
||||
root_src_file = it.only_arg;
|
||||
}
|
||||
@ -1153,7 +1153,7 @@ fn buildOutputType(
|
||||
if (mem.eql(u8, arg, "-soname")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
const name = linker_args.items[i];
|
||||
soname = .{ .yes = name };
|
||||
@ -1185,7 +1185,7 @@ fn buildOutputType(
|
||||
} else if (mem.eql(u8, arg, "-rpath")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
try rpath_list.append(linker_args.items[i]);
|
||||
} else if (mem.eql(u8, arg, "-I") or
|
||||
@ -1194,7 +1194,7 @@ fn buildOutputType(
|
||||
{
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
target_dynamic_linker = linker_args.items[i];
|
||||
} else if (mem.eql(u8, arg, "-E") or
|
||||
@ -1205,7 +1205,7 @@ fn buildOutputType(
|
||||
} else if (mem.eql(u8, arg, "--version-script")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
version_script = linker_args.items[i];
|
||||
} else if (mem.startsWith(u8, arg, "-O")) {
|
||||
@ -1227,7 +1227,7 @@ fn buildOutputType(
|
||||
} else if (mem.eql(u8, arg, "-z")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
const z_arg = linker_args.items[i];
|
||||
if (mem.eql(u8, z_arg, "nodelete")) {
|
||||
@ -1235,44 +1235,44 @@ fn buildOutputType(
|
||||
} else if (mem.eql(u8, z_arg, "defs")) {
|
||||
linker_z_defs = true;
|
||||
} else {
|
||||
warn("unsupported linker arg: -z {}", .{z_arg});
|
||||
warn("unsupported linker arg: -z {s}", .{z_arg});
|
||||
}
|
||||
} else if (mem.eql(u8, arg, "--major-image-version")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
version.major = std.fmt.parseUnsigned(u32, linker_args.items[i], 10) catch |err| {
|
||||
fatal("unable to parse '{}': {}", .{ arg, @errorName(err) });
|
||||
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
|
||||
};
|
||||
have_version = true;
|
||||
} else if (mem.eql(u8, arg, "--minor-image-version")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
version.minor = std.fmt.parseUnsigned(u32, linker_args.items[i], 10) catch |err| {
|
||||
fatal("unable to parse '{}': {}", .{ arg, @errorName(err) });
|
||||
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
|
||||
};
|
||||
have_version = true;
|
||||
} else if (mem.eql(u8, arg, "--stack")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
stack_size_override = std.fmt.parseUnsigned(u64, linker_args.items[i], 0) catch |err| {
|
||||
fatal("unable to parse '{}': {}", .{ arg, @errorName(err) });
|
||||
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
|
||||
};
|
||||
} else if (mem.eql(u8, arg, "--image-base")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
fatal("expected linker arg after '{}'", .{arg});
|
||||
fatal("expected linker arg after '{s}'", .{arg});
|
||||
}
|
||||
image_base_override = std.fmt.parseUnsigned(u64, linker_args.items[i], 0) catch |err| {
|
||||
fatal("unable to parse '{}': {}", .{ arg, @errorName(err) });
|
||||
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
|
||||
};
|
||||
} else {
|
||||
warn("unsupported linker arg: {}", .{arg});
|
||||
warn("unsupported linker arg: {s}", .{arg});
|
||||
}
|
||||
}
|
||||
|
||||
@ -1328,7 +1328,7 @@ fn buildOutputType(
|
||||
}
|
||||
|
||||
if (arg_mode == .translate_c and c_source_files.items.len != 1) {
|
||||
fatal("translate-c expects exactly 1 source file (found {})", .{c_source_files.items.len});
|
||||
fatal("translate-c expects exactly 1 source file (found {d})", .{c_source_files.items.len});
|
||||
}
|
||||
|
||||
if (root_src_file == null and arg_mode == .zig_test) {
|
||||
@ -1373,25 +1373,25 @@ fn buildOutputType(
|
||||
help: {
|
||||
var help_text = std.ArrayList(u8).init(arena);
|
||||
for (diags.arch.?.allCpuModels()) |cpu| {
|
||||
help_text.writer().print(" {}\n", .{cpu.name}) catch break :help;
|
||||
help_text.writer().print(" {s}\n", .{cpu.name}) catch break :help;
|
||||
}
|
||||
std.log.info("Available CPUs for architecture '{}': {}", .{
|
||||
std.log.info("Available CPUs for architecture '{s}': {s}", .{
|
||||
@tagName(diags.arch.?), help_text.items,
|
||||
});
|
||||
}
|
||||
fatal("Unknown CPU: '{}'", .{diags.cpu_name.?});
|
||||
fatal("Unknown CPU: '{s}'", .{diags.cpu_name.?});
|
||||
},
|
||||
error.UnknownCpuFeature => {
|
||||
help: {
|
||||
var help_text = std.ArrayList(u8).init(arena);
|
||||
for (diags.arch.?.allFeaturesList()) |feature| {
|
||||
help_text.writer().print(" {}: {}\n", .{ feature.name, feature.description }) catch break :help;
|
||||
help_text.writer().print(" {s}: {s}\n", .{ feature.name, feature.description }) catch break :help;
|
||||
}
|
||||
std.log.info("Available CPU features for architecture '{}': {}", .{
|
||||
std.log.info("Available CPU features for architecture '{s}': {s}", .{
|
||||
@tagName(diags.arch.?), help_text.items,
|
||||
});
|
||||
}
|
||||
fatal("Unknown CPU feature: '{}'", .{diags.unknown_feature_name});
|
||||
fatal("Unknown CPU feature: '{s}'", .{diags.unknown_feature_name});
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
@ -1431,10 +1431,10 @@ fn buildOutputType(
|
||||
|
||||
if (cross_target.isNativeOs() and (system_libs.items.len != 0 or want_native_include_dirs)) {
|
||||
const paths = std.zig.system.NativePaths.detect(arena) catch |err| {
|
||||
fatal("unable to detect native system paths: {}", .{@errorName(err)});
|
||||
fatal("unable to detect native system paths: {s}", .{@errorName(err)});
|
||||
};
|
||||
for (paths.warnings.items) |warning| {
|
||||
warn("{}", .{warning});
|
||||
warn("{s}", .{warning});
|
||||
}
|
||||
|
||||
const has_sysroot = if (comptime std.Target.current.isDarwin()) outer: {
|
||||
@ -1492,7 +1492,7 @@ fn buildOutputType(
|
||||
} else if (mem.eql(u8, ofmt, "raw")) {
|
||||
break :blk .raw;
|
||||
} else {
|
||||
fatal("unsupported object format: {}", .{ofmt});
|
||||
fatal("unsupported object format: {s}", .{ofmt});
|
||||
}
|
||||
};
|
||||
|
||||
@ -1562,7 +1562,7 @@ fn buildOutputType(
|
||||
}
|
||||
if (fs.path.dirname(full_path)) |dirname| {
|
||||
const handle = fs.cwd().openDir(dirname, .{}) catch |err| {
|
||||
fatal("unable to open output directory '{}': {}", .{ dirname, @errorName(err) });
|
||||
fatal("unable to open output directory '{s}': {s}", .{ dirname, @errorName(err) });
|
||||
};
|
||||
cleanup_emit_bin_dir = handle;
|
||||
break :b Compilation.EmitLoc{
|
||||
@ -1585,19 +1585,19 @@ fn buildOutputType(
|
||||
},
|
||||
};
|
||||
|
||||
const default_h_basename = try std.fmt.allocPrint(arena, "{}.h", .{root_name});
|
||||
const default_h_basename = try std.fmt.allocPrint(arena, "{s}.h", .{root_name});
|
||||
var emit_h_resolved = try emit_h.resolve(default_h_basename);
|
||||
defer emit_h_resolved.deinit();
|
||||
|
||||
const default_asm_basename = try std.fmt.allocPrint(arena, "{}.s", .{root_name});
|
||||
const default_asm_basename = try std.fmt.allocPrint(arena, "{s}.s", .{root_name});
|
||||
var emit_asm_resolved = try emit_asm.resolve(default_asm_basename);
|
||||
defer emit_asm_resolved.deinit();
|
||||
|
||||
const default_llvm_ir_basename = try std.fmt.allocPrint(arena, "{}.ll", .{root_name});
|
||||
const default_llvm_ir_basename = try std.fmt.allocPrint(arena, "{s}.ll", .{root_name});
|
||||
var emit_llvm_ir_resolved = try emit_llvm_ir.resolve(default_llvm_ir_basename);
|
||||
defer emit_llvm_ir_resolved.deinit();
|
||||
|
||||
const default_analysis_basename = try std.fmt.allocPrint(arena, "{}-analysis.json", .{root_name});
|
||||
const default_analysis_basename = try std.fmt.allocPrint(arena, "{s}-analysis.json", .{root_name});
|
||||
var emit_analysis_resolved = try emit_analysis.resolve(default_analysis_basename);
|
||||
defer emit_analysis_resolved.deinit();
|
||||
|
||||
@ -1609,10 +1609,10 @@ fn buildOutputType(
|
||||
.yes_default_path => blk: {
|
||||
if (root_src_file) |rsf| {
|
||||
if (mem.endsWith(u8, rsf, ".zir")) {
|
||||
break :blk try std.fmt.allocPrint(arena, "{}.out.zir", .{root_name});
|
||||
break :blk try std.fmt.allocPrint(arena, "{s}.out.zir", .{root_name});
|
||||
}
|
||||
}
|
||||
break :blk try std.fmt.allocPrint(arena, "{}.zir", .{root_name});
|
||||
break :blk try std.fmt.allocPrint(arena, "{s}.zir", .{root_name});
|
||||
},
|
||||
.yes => |p| p,
|
||||
};
|
||||
@ -1642,7 +1642,7 @@ fn buildOutputType(
|
||||
}
|
||||
else
|
||||
introspect.findZigLibDirFromSelfExe(arena, self_exe_path) catch |err| {
|
||||
fatal("unable to find zig installation directory: {}", .{@errorName(err)});
|
||||
fatal("unable to find zig installation directory: {s}", .{@errorName(err)});
|
||||
};
|
||||
defer zig_lib_directory.handle.close();
|
||||
|
||||
@ -1655,7 +1655,7 @@ fn buildOutputType(
|
||||
|
||||
if (libc_paths_file) |paths_file| {
|
||||
libc_installation = LibCInstallation.parse(gpa, paths_file) catch |err| {
|
||||
fatal("unable to parse libc paths file: {}", .{@errorName(err)});
|
||||
fatal("unable to parse libc paths file: {s}", .{@errorName(err)});
|
||||
};
|
||||
}
|
||||
|
||||
@ -1791,7 +1791,7 @@ fn buildOutputType(
|
||||
.disable_lld_caching = !have_enable_cache,
|
||||
.subsystem = subsystem,
|
||||
}) catch |err| {
|
||||
fatal("unable to create compilation: {}", .{@errorName(err)});
|
||||
fatal("unable to create compilation: {s}", .{@errorName(err)});
|
||||
};
|
||||
var comp_destroyed = false;
|
||||
defer if (!comp_destroyed) comp.destroy();
|
||||
@ -1914,12 +1914,12 @@ fn buildOutputType(
|
||||
if (!watch) return cleanExit();
|
||||
} else {
|
||||
const cmd = try argvCmd(arena, argv.items);
|
||||
fatal("the following test command failed with exit code {}:\n{}", .{ code, cmd });
|
||||
fatal("the following test command failed with exit code {}:\n{s}", .{ code, cmd });
|
||||
}
|
||||
},
|
||||
else => {
|
||||
const cmd = try argvCmd(arena, argv.items);
|
||||
fatal("the following test command crashed:\n{}", .{cmd});
|
||||
fatal("the following test command crashed:\n{s}", .{cmd});
|
||||
},
|
||||
}
|
||||
},
|
||||
@ -1936,7 +1936,7 @@ fn buildOutputType(
|
||||
try stderr.print("(zig) ", .{});
|
||||
try comp.makeBinFileExecutable();
|
||||
if (stdin.readUntilDelimiterOrEof(&repl_buf, '\n') catch |err| {
|
||||
try stderr.print("\nUnable to parse command: {}\n", .{@errorName(err)});
|
||||
try stderr.print("\nUnable to parse command: {s}\n", .{@errorName(err)});
|
||||
continue;
|
||||
}) |line| {
|
||||
const actual_line = mem.trimRight(u8, line, "\r\n ");
|
||||
@ -1954,7 +1954,7 @@ fn buildOutputType(
|
||||
} else if (mem.eql(u8, actual_line, "help")) {
|
||||
try stderr.writeAll(repl_help);
|
||||
} else {
|
||||
try stderr.print("unknown command: {}\n", .{actual_line});
|
||||
try stderr.print("unknown command: {s}\n", .{actual_line});
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
@ -2012,14 +2012,14 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
|
||||
assert(comp.c_source_files.len == 1);
|
||||
const c_source_file = comp.c_source_files[0];
|
||||
|
||||
const translated_zig_basename = try std.fmt.allocPrint(arena, "{}.zig", .{comp.bin_file.options.root_name});
|
||||
const translated_zig_basename = try std.fmt.allocPrint(arena, "{s}.zig", .{comp.bin_file.options.root_name});
|
||||
|
||||
var man: Cache.Manifest = comp.obtainCObjectCacheManifest();
|
||||
defer if (enable_cache) man.deinit();
|
||||
|
||||
man.hash.add(@as(u16, 0xb945)); // Random number to distinguish translate-c from compiling C objects
|
||||
_ = man.addFile(c_source_file.src_path, null) catch |err| {
|
||||
fatal("unable to process '{}': {}", .{ c_source_file.src_path, @errorName(err) });
|
||||
fatal("unable to process '{s}': {s}", .{ c_source_file.src_path, @errorName(err) });
|
||||
};
|
||||
|
||||
const digest = if (try man.hit()) man.final() else digest: {
|
||||
@ -2034,7 +2034,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
|
||||
break :blk null;
|
||||
|
||||
const c_src_basename = fs.path.basename(c_source_file.src_path);
|
||||
const dep_basename = try std.fmt.allocPrint(arena, "{}.d", .{c_src_basename});
|
||||
const dep_basename = try std.fmt.allocPrint(arena, "{s}.d", .{c_src_basename});
|
||||
const out_dep_path = try comp.tmpFilePath(arena, dep_basename);
|
||||
break :blk out_dep_path;
|
||||
};
|
||||
@ -2069,7 +2069,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
|
||||
error.ASTUnitFailure => fatal("clang API returned errors but due to a clang bug, it is not exposing the errors for zig to see. For more details: https://github.com/ziglang/zig/issues/4455", .{}),
|
||||
error.SemanticAnalyzeFail => {
|
||||
for (clang_errors) |clang_err| {
|
||||
std.debug.print("{}:{}:{}: {}\n", .{
|
||||
std.debug.print("{s}:{}:{}: {s}\n", .{
|
||||
if (clang_err.filename_ptr) |p| p[0..clang_err.filename_len] else "(no file)",
|
||||
clang_err.line + 1,
|
||||
clang_err.column + 1,
|
||||
@ -2087,7 +2087,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
|
||||
try man.addDepFilePost(zig_cache_tmp_dir, dep_basename);
|
||||
// Just to save disk space, we delete the file because it is never needed again.
|
||||
zig_cache_tmp_dir.deleteFile(dep_basename) catch |err| {
|
||||
warn("failed to delete '{}': {}", .{ dep_file_path, @errorName(err) });
|
||||
warn("failed to delete '{s}': {s}", .{ dep_file_path, @errorName(err) });
|
||||
};
|
||||
}
|
||||
|
||||
@ -2102,7 +2102,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
|
||||
_ = try std.zig.render(comp.gpa, bos.writer(), tree);
|
||||
try bos.flush();
|
||||
|
||||
man.writeManifest() catch |err| warn("failed to write cache manifest: {}", .{@errorName(err)});
|
||||
man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{@errorName(err)});
|
||||
|
||||
break :digest digest;
|
||||
};
|
||||
@ -2111,7 +2111,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
|
||||
const full_zig_path = try comp.local_cache_directory.join(arena, &[_][]const u8{
|
||||
"o", &digest, translated_zig_basename,
|
||||
});
|
||||
try io.getStdOut().writer().print("{}\n", .{full_zig_path});
|
||||
try io.getStdOut().writer().print("{s}\n", .{full_zig_path});
|
||||
return cleanExit();
|
||||
} else {
|
||||
const out_zig_path = try fs.path.join(arena, &[_][]const u8{ "o", &digest, translated_zig_basename });
|
||||
@ -2148,10 +2148,10 @@ pub fn cmdLibC(gpa: *Allocator, args: []const []const u8) !void {
|
||||
try stdout.writeAll(usage_libc);
|
||||
return cleanExit();
|
||||
} else {
|
||||
fatal("unrecognized parameter: '{}'", .{arg});
|
||||
fatal("unrecognized parameter: '{s}'", .{arg});
|
||||
}
|
||||
} else if (input_file != null) {
|
||||
fatal("unexpected extra parameter: '{}'", .{arg});
|
||||
fatal("unexpected extra parameter: '{s}'", .{arg});
|
||||
} else {
|
||||
input_file = arg;
|
||||
}
|
||||
@ -2159,7 +2159,7 @@ pub fn cmdLibC(gpa: *Allocator, args: []const []const u8) !void {
|
||||
}
|
||||
if (input_file) |libc_file| {
|
||||
var libc = LibCInstallation.parse(gpa, libc_file) catch |err| {
|
||||
fatal("unable to parse libc file: {}", .{@errorName(err)});
|
||||
fatal("unable to parse libc file: {s}", .{@errorName(err)});
|
||||
};
|
||||
defer libc.deinit(gpa);
|
||||
} else {
|
||||
@ -2167,7 +2167,7 @@ pub fn cmdLibC(gpa: *Allocator, args: []const []const u8) !void {
|
||||
.allocator = gpa,
|
||||
.verbose = true,
|
||||
}) catch |err| {
|
||||
fatal("unable to detect native libc: {}", .{@errorName(err)});
|
||||
fatal("unable to detect native libc: {s}", .{@errorName(err)});
|
||||
};
|
||||
defer libc.deinit(gpa);
|
||||
|
||||
@ -2205,16 +2205,16 @@ pub fn cmdInit(
|
||||
try io.getStdOut().writeAll(usage_init);
|
||||
return cleanExit();
|
||||
} else {
|
||||
fatal("unrecognized parameter: '{}'", .{arg});
|
||||
fatal("unrecognized parameter: '{s}'", .{arg});
|
||||
}
|
||||
} else {
|
||||
fatal("unexpected extra parameter: '{}'", .{arg});
|
||||
fatal("unexpected extra parameter: '{s}'", .{arg});
|
||||
}
|
||||
}
|
||||
}
|
||||
const self_exe_path = try fs.selfExePathAlloc(arena);
|
||||
var zig_lib_directory = introspect.findZigLibDirFromSelfExe(arena, self_exe_path) catch |err| {
|
||||
fatal("unable to find zig installation directory: {}\n", .{@errorName(err)});
|
||||
fatal("unable to find zig installation directory: {s}\n", .{@errorName(err)});
|
||||
};
|
||||
defer zig_lib_directory.handle.close();
|
||||
|
||||
@ -2232,7 +2232,7 @@ pub fn cmdInit(
|
||||
|
||||
const max_bytes = 10 * 1024 * 1024;
|
||||
const build_zig_contents = template_dir.readFileAlloc(arena, "build.zig", max_bytes) catch |err| {
|
||||
fatal("unable to read template file 'build.zig': {}", .{@errorName(err)});
|
||||
fatal("unable to read template file 'build.zig': {s}", .{@errorName(err)});
|
||||
};
|
||||
var modified_build_zig_contents = std.ArrayList(u8).init(arena);
|
||||
try modified_build_zig_contents.ensureCapacity(build_zig_contents.len);
|
||||
@ -2244,13 +2244,13 @@ pub fn cmdInit(
|
||||
}
|
||||
}
|
||||
const main_zig_contents = template_dir.readFileAlloc(arena, "src" ++ s ++ "main.zig", max_bytes) catch |err| {
|
||||
fatal("unable to read template file 'main.zig': {}", .{@errorName(err)});
|
||||
fatal("unable to read template file 'main.zig': {s}", .{@errorName(err)});
|
||||
};
|
||||
if (fs.cwd().access("build.zig", .{})) |_| {
|
||||
fatal("existing build.zig file would be overwritten", .{});
|
||||
} else |err| switch (err) {
|
||||
error.FileNotFound => {},
|
||||
else => fatal("unable to test existence of build.zig: {}\n", .{@errorName(err)}),
|
||||
else => fatal("unable to test existence of build.zig: {s}\n", .{@errorName(err)}),
|
||||
}
|
||||
var src_dir = try fs.cwd().makeOpenPath("src", .{});
|
||||
defer src_dir.close();
|
||||
@ -2311,23 +2311,23 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
const arg = args[i];
|
||||
if (mem.startsWith(u8, arg, "-")) {
|
||||
if (mem.eql(u8, arg, "--build-file")) {
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{}'", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{s}'", .{arg});
|
||||
i += 1;
|
||||
build_file = args[i];
|
||||
continue;
|
||||
} else if (mem.eql(u8, arg, "--override-lib-dir")) {
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{}'", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{s}'", .{arg});
|
||||
i += 1;
|
||||
override_lib_dir = args[i];
|
||||
try child_argv.appendSlice(&[_][]const u8{ arg, args[i] });
|
||||
continue;
|
||||
} else if (mem.eql(u8, arg, "--cache-dir")) {
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{}'", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{s}'", .{arg});
|
||||
i += 1;
|
||||
override_local_cache_dir = args[i];
|
||||
continue;
|
||||
} else if (mem.eql(u8, arg, "--global-cache-dir")) {
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{}'", .{arg});
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{s}'", .{arg});
|
||||
i += 1;
|
||||
override_global_cache_dir = args[i];
|
||||
continue;
|
||||
@ -2344,7 +2344,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
}
|
||||
else
|
||||
introspect.findZigLibDirFromSelfExe(arena, self_exe_path) catch |err| {
|
||||
fatal("unable to find zig installation directory: {}", .{@errorName(err)});
|
||||
fatal("unable to find zig installation directory: {s}", .{@errorName(err)});
|
||||
};
|
||||
defer zig_lib_directory.handle.close();
|
||||
|
||||
@ -2385,7 +2385,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
} else |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
dirname = fs.path.dirname(dirname) orelse {
|
||||
std.log.info("{}", .{
|
||||
std.log.info("{s}", .{
|
||||
\\Initialize a 'build.zig' template file with `zig init-lib` or `zig init-exe`,
|
||||
\\or see `zig --help` for more options.
|
||||
});
|
||||
@ -2467,7 +2467,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
.self_exe_path = self_exe_path,
|
||||
.thread_pool = &thread_pool,
|
||||
}) catch |err| {
|
||||
fatal("unable to create compilation: {}", .{@errorName(err)});
|
||||
fatal("unable to create compilation: {s}", .{@errorName(err)});
|
||||
};
|
||||
defer comp.destroy();
|
||||
|
||||
@ -2493,11 +2493,11 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
.Exited => |code| {
|
||||
if (code == 0) return cleanExit();
|
||||
const cmd = try argvCmd(arena, child_argv);
|
||||
fatal("the following build command failed with exit code {}:\n{}", .{ code, cmd });
|
||||
fatal("the following build command failed with exit code {}:\n{s}", .{ code, cmd });
|
||||
},
|
||||
else => {
|
||||
const cmd = try argvCmd(arena, child_argv);
|
||||
fatal("the following build command crashed:\n{}", .{cmd});
|
||||
fatal("the following build command crashed:\n{s}", .{cmd});
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -2564,14 +2564,14 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
|
||||
i += 1;
|
||||
const next_arg = args[i];
|
||||
color = std.meta.stringToEnum(Color, next_arg) orelse {
|
||||
fatal("expected [auto|on|off] after --color, found '{}'", .{next_arg});
|
||||
fatal("expected [auto|on|off] after --color, found '{s}'", .{next_arg});
|
||||
};
|
||||
} else if (mem.eql(u8, arg, "--stdin")) {
|
||||
stdin_flag = true;
|
||||
} else if (mem.eql(u8, arg, "--check")) {
|
||||
check_flag = true;
|
||||
} else {
|
||||
fatal("unrecognized parameter: '{}'", .{arg});
|
||||
fatal("unrecognized parameter: '{s}'", .{arg});
|
||||
}
|
||||
} else {
|
||||
try input_files.append(arg);
|
||||
@ -2590,7 +2590,7 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
|
||||
defer gpa.free(source_code);
|
||||
|
||||
const tree = std.zig.parse(gpa, source_code) catch |err| {
|
||||
fatal("error parsing stdin: {}", .{err});
|
||||
fatal("error parsing stdin: {s}", .{err});
|
||||
};
|
||||
defer tree.deinit();
|
||||
|
||||
@ -2629,7 +2629,7 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
|
||||
for (input_files.items) |file_path| {
|
||||
// Get the real path here to avoid Windows failing on relative file paths with . or .. in them.
|
||||
const real_path = fs.realpathAlloc(gpa, file_path) catch |err| {
|
||||
fatal("unable to open '{}': {}", .{ file_path, err });
|
||||
fatal("unable to open '{s}': {s}", .{ file_path, @errorName(err) });
|
||||
};
|
||||
defer gpa.free(real_path);
|
||||
|
||||
@ -2668,7 +2668,7 @@ fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_
|
||||
fmtPathFile(fmt, file_path, check_mode, dir, sub_path) catch |err| switch (err) {
|
||||
error.IsDir, error.AccessDenied => return fmtPathDir(fmt, file_path, check_mode, dir, sub_path),
|
||||
else => {
|
||||
warn("unable to format '{}': {}", .{ file_path, err });
|
||||
warn("unable to format '{s}': {s}", .{ file_path, @errorName(err) });
|
||||
fmt.any_error = true;
|
||||
return;
|
||||
},
|
||||
@ -2702,7 +2702,7 @@ fn fmtPathDir(
|
||||
try fmtPathDir(fmt, full_path, check_mode, dir, entry.name);
|
||||
} else {
|
||||
fmtPathFile(fmt, full_path, check_mode, dir, entry.name) catch |err| {
|
||||
warn("unable to format '{}': {}", .{ full_path, err });
|
||||
warn("unable to format '{s}': {s}", .{ full_path, @errorName(err) });
|
||||
fmt.any_error = true;
|
||||
return;
|
||||
};
|
||||
@ -2761,7 +2761,7 @@ fn fmtPathFile(
|
||||
const anything_changed = try std.zig.render(fmt.gpa, io.null_out_stream, tree);
|
||||
if (anything_changed) {
|
||||
const stdout = io.getStdOut().writer();
|
||||
try stdout.print("{}\n", .{file_path});
|
||||
try stdout.print("{s}\n", .{file_path});
|
||||
fmt.any_error = true;
|
||||
}
|
||||
} else {
|
||||
@ -2779,7 +2779,7 @@ fn fmtPathFile(
|
||||
try af.file.writeAll(fmt.out_buffer.items);
|
||||
try af.finish();
|
||||
const stdout = io.getStdOut().writer();
|
||||
try stdout.print("{}\n", .{file_path});
|
||||
try stdout.print("{s}\n", .{file_path});
|
||||
}
|
||||
}
|
||||
|
||||
@ -2812,7 +2812,7 @@ fn printErrMsgToFile(
|
||||
const text = text_buf.items;
|
||||
|
||||
const stream = file.outStream();
|
||||
try stream.print("{}:{}:{}: error: {}\n", .{ path, start_loc.line + 1, start_loc.column + 1, text });
|
||||
try stream.print("{s}:{}:{}: error: {s}\n", .{ path, start_loc.line + 1, start_loc.column + 1, text });
|
||||
|
||||
if (!color_on) return;
|
||||
|
||||
@ -2984,7 +2984,7 @@ pub const ClangArgIterator = struct {
|
||||
const max_bytes = 10 * 1024 * 1024; // 10 MiB of command line arguments is a reasonable limit
|
||||
const resp_file_path = arg[1..];
|
||||
const resp_contents = fs.cwd().readFileAlloc(allocator, resp_file_path, max_bytes) catch |err| {
|
||||
fatal("unable to read response file '{}': {}", .{ resp_file_path, @errorName(err) });
|
||||
fatal("unable to read response file '{s}': {s}", .{ resp_file_path, @errorName(err) });
|
||||
};
|
||||
defer allocator.free(resp_contents);
|
||||
// TODO is there a specification for this file format? Let's find it and make this parsing more robust
|
||||
@ -3057,7 +3057,7 @@ pub const ClangArgIterator = struct {
|
||||
const prefix_len = clang_arg.matchStartsWith(arg);
|
||||
if (prefix_len == arg.len) {
|
||||
if (self.next_index >= self.argv.len) {
|
||||
fatal("Expected parameter after '{}'", .{arg});
|
||||
fatal("Expected parameter after '{s}'", .{arg});
|
||||
}
|
||||
self.only_arg = self.argv[self.next_index];
|
||||
self.incrementArgIndex();
|
||||
@ -3078,7 +3078,7 @@ pub const ClangArgIterator = struct {
|
||||
if (prefix_len != 0) {
|
||||
self.only_arg = arg[prefix_len..];
|
||||
if (self.next_index >= self.argv.len) {
|
||||
fatal("Expected parameter after '{}'", .{arg});
|
||||
fatal("Expected parameter after '{s}'", .{arg});
|
||||
}
|
||||
self.second_arg = self.argv[self.next_index];
|
||||
self.incrementArgIndex();
|
||||
@ -3089,7 +3089,7 @@ pub const ClangArgIterator = struct {
|
||||
},
|
||||
.separate => if (clang_arg.matchEql(arg) > 0) {
|
||||
if (self.next_index >= self.argv.len) {
|
||||
fatal("Expected parameter after '{}'", .{arg});
|
||||
fatal("Expected parameter after '{s}'", .{arg});
|
||||
}
|
||||
self.only_arg = self.argv[self.next_index];
|
||||
self.incrementArgIndex();
|
||||
@ -3115,7 +3115,7 @@ pub const ClangArgIterator = struct {
|
||||
},
|
||||
}
|
||||
else {
|
||||
fatal("Unknown Clang option: '{}'", .{arg});
|
||||
fatal("Unknown Clang option: '{s}'", .{arg});
|
||||
}
|
||||
}
|
||||
|
||||
@ -3143,7 +3143,7 @@ pub const ClangArgIterator = struct {
|
||||
|
||||
fn parseCodeModel(arg: []const u8) std.builtin.CodeModel {
|
||||
return std.meta.stringToEnum(std.builtin.CodeModel, arg) orelse
|
||||
fatal("unsupported machine code model: '{}'", .{arg});
|
||||
fatal("unsupported machine code model: '{s}'", .{arg});
|
||||
}
|
||||
|
||||
/// Raise the open file descriptor limit. Ask and ye shall receive.
|
||||
@ -3263,7 +3263,7 @@ fn detectNativeTargetInfo(gpa: *Allocator, cross_target: std.zig.CrossTarget) !s
|
||||
// CPU model & feature detection is todo so here we rely on LLVM.
|
||||
// https://github.com/ziglang/zig/issues/4591
|
||||
if (!build_options.have_llvm)
|
||||
fatal("CPU features detection is not yet available for {} without LLVM extensions", .{@tagName(arch)});
|
||||
fatal("CPU features detection is not yet available for {s} without LLVM extensions", .{@tagName(arch)});
|
||||
|
||||
const llvm = @import("llvm_bindings.zig");
|
||||
const llvm_cpu_name = llvm.GetHostCPUName();
|
||||
|
||||
@ -381,7 +381,7 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void {
|
||||
|
||||
const term = child.wait() catch |err| {
|
||||
// TODO surface a proper error here
|
||||
log.err("unable to spawn {}: {}", .{ args[0], @errorName(err) });
|
||||
log.err("unable to spawn {s}: {s}", .{ args[0], @errorName(err) });
|
||||
return error.ClangPreprocessorFailed;
|
||||
};
|
||||
|
||||
@ -395,7 +395,7 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void {
|
||||
},
|
||||
else => {
|
||||
// TODO surface a proper error here
|
||||
log.err("clang terminated unexpectedly with stderr: {}", .{stderr});
|
||||
log.err("clang terminated unexpectedly with stderr: {s}", .{stderr});
|
||||
return error.ClangPreprocessorFailed;
|
||||
},
|
||||
}
|
||||
|
||||
@ -155,21 +155,21 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
|
||||
if (!is_arch_specific) {
|
||||
// Look for an arch specific override.
|
||||
override_path.shrinkRetainingCapacity(0);
|
||||
try override_path.writer().print("{}" ++ s ++ "{}" ++ s ++ "{}.s", .{
|
||||
try override_path.writer().print("{s}" ++ s ++ "{s}" ++ s ++ "{s}.s", .{
|
||||
dirname, arch_name, noextbasename,
|
||||
});
|
||||
if (source_table.contains(override_path.items))
|
||||
continue;
|
||||
|
||||
override_path.shrinkRetainingCapacity(0);
|
||||
try override_path.writer().print("{}" ++ s ++ "{}" ++ s ++ "{}.S", .{
|
||||
try override_path.writer().print("{s}" ++ s ++ "{s}" ++ s ++ "{s}.S", .{
|
||||
dirname, arch_name, noextbasename,
|
||||
});
|
||||
if (source_table.contains(override_path.items))
|
||||
continue;
|
||||
|
||||
override_path.shrinkRetainingCapacity(0);
|
||||
try override_path.writer().print("{}" ++ s ++ "{}" ++ s ++ "{}.c", .{
|
||||
try override_path.writer().print("{s}" ++ s ++ "{s}" ++ s ++ "{s}.c", .{
|
||||
dirname, arch_name, noextbasename,
|
||||
});
|
||||
if (source_table.contains(override_path.items))
|
||||
@ -322,7 +322,7 @@ fn add_cc_args(
|
||||
const target = comp.getTarget();
|
||||
const arch_name = target_util.archMuslName(target.cpu.arch);
|
||||
const os_name = @tagName(target.os.tag);
|
||||
const triple = try std.fmt.allocPrint(arena, "{}-{}-musl", .{ arch_name, os_name });
|
||||
const triple = try std.fmt.allocPrint(arena, "{s}-{s}-musl", .{ arch_name, os_name });
|
||||
const o_arg = if (want_O3) "-O3" else "-Os";
|
||||
|
||||
try args.appendSlice(&[_][]const u8{
|
||||
|
||||
@ -9,7 +9,7 @@ pub fn cmdEnv(gpa: *Allocator, args: []const []const u8, stdout: std.fs.File.Wri
|
||||
defer gpa.free(self_exe_path);
|
||||
|
||||
var zig_lib_directory = introspect.findZigLibDirFromSelfExe(gpa, self_exe_path) catch |err| {
|
||||
fatal("unable to find zig installation directory: {}\n", .{@errorName(err)});
|
||||
fatal("unable to find zig installation directory: {s}\n", .{@errorName(err)});
|
||||
};
|
||||
defer gpa.free(zig_lib_directory.path.?);
|
||||
defer zig_lib_directory.handle.close();
|
||||
|
||||
@ -18,7 +18,7 @@ pub fn cmdTargets(
|
||||
native_target: Target,
|
||||
) !void {
|
||||
var zig_lib_directory = introspect.findZigLibDir(allocator) catch |err| {
|
||||
fatal("unable to find zig installation directory: {}\n", .{@errorName(err)});
|
||||
fatal("unable to find zig installation directory: {s}\n", .{@errorName(err)});
|
||||
};
|
||||
defer zig_lib_directory.handle.close();
|
||||
defer allocator.free(zig_lib_directory.path.?);
|
||||
@ -61,7 +61,7 @@ pub fn cmdTargets(
|
||||
try jws.objectField("libc");
|
||||
try jws.beginArray();
|
||||
for (target.available_libcs) |libc| {
|
||||
const tmp = try std.fmt.allocPrint(allocator, "{}-{}-{}", .{
|
||||
const tmp = try std.fmt.allocPrint(allocator, "{s}-{s}-{s}", .{
|
||||
@tagName(libc.arch), @tagName(libc.os), @tagName(libc.abi),
|
||||
});
|
||||
defer allocator.free(tmp);
|
||||
|
||||
@ -37,14 +37,14 @@ pub export fn main(argc: c_int, argv: [*][*:0]u8) c_int {
|
||||
defer arena_instance.deinit();
|
||||
const arena = &arena_instance.allocator;
|
||||
|
||||
const args = arena.alloc([]const u8, @intCast(usize, argc)) catch fatal("{}", .{"OutOfMemory"});
|
||||
const args = arena.alloc([]const u8, @intCast(usize, argc)) catch fatal("{s}", .{"OutOfMemory"});
|
||||
for (args) |*arg, i| {
|
||||
arg.* = mem.spanZ(argv[i]);
|
||||
}
|
||||
if (std.builtin.mode == .Debug) {
|
||||
stage2.mainArgs(gpa, arena, args) catch unreachable;
|
||||
} else {
|
||||
stage2.mainArgs(gpa, arena, args) catch |err| fatal("{}", .{@errorName(err)});
|
||||
stage2.mainArgs(gpa, arena, args) catch |err| fatal("{s}", .{@errorName(err)});
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -136,7 +136,7 @@ const Scope = struct {
|
||||
var proposed_name = name_copy;
|
||||
while (scope.contains(proposed_name)) {
|
||||
scope.mangle_count += 1;
|
||||
proposed_name = try std.fmt.allocPrint(c.arena, "{}_{}", .{ name, scope.mangle_count });
|
||||
proposed_name = try std.fmt.allocPrint(c.arena, "{s}_{}", .{ name, scope.mangle_count });
|
||||
}
|
||||
try scope.variables.append(.{ .name = name_copy, .alias = proposed_name });
|
||||
return proposed_name;
|
||||
@ -290,7 +290,7 @@ pub const Context = struct {
|
||||
|
||||
const line = c.source_manager.getSpellingLineNumber(spelling_loc);
|
||||
const column = c.source_manager.getSpellingColumnNumber(spelling_loc);
|
||||
return std.fmt.allocPrint(c.arena, "{}:{}:{}", .{ filename, line, column });
|
||||
return std.fmt.allocPrint(c.arena, "{s}:{d}:{d}", .{ filename, line, column });
|
||||
}
|
||||
|
||||
fn createCall(c: *Context, fn_expr: *ast.Node, params_len: ast.NodeIndex) !*ast.Node.Call {
|
||||
@ -530,7 +530,7 @@ fn declVisitor(c: *Context, decl: *const clang.Decl) Error!void {
|
||||
},
|
||||
else => {
|
||||
const decl_name = try c.str(decl.getDeclKindName());
|
||||
try emitWarning(c, decl.getLocation(), "ignoring {} declaration", .{decl_name});
|
||||
try emitWarning(c, decl.getLocation(), "ignoring {s} declaration", .{decl_name});
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -625,7 +625,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void {
|
||||
const param_name = if (param.name_token) |name_tok|
|
||||
tokenSlice(c, name_tok)
|
||||
else
|
||||
return failDecl(c, fn_decl_loc, fn_name, "function {} parameter has no name", .{fn_name});
|
||||
return failDecl(c, fn_decl_loc, fn_name, "function {s} parameter has no name", .{fn_name});
|
||||
|
||||
const c_param = fn_decl.getParamDecl(param_id);
|
||||
const qual_type = c_param.getOriginalType();
|
||||
@ -634,7 +634,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void {
|
||||
const mangled_param_name = try block_scope.makeMangledName(c, param_name);
|
||||
|
||||
if (!is_const) {
|
||||
const bare_arg_name = try std.fmt.allocPrint(c.arena, "arg_{}", .{mangled_param_name});
|
||||
const bare_arg_name = try std.fmt.allocPrint(c.arena, "arg_{s}", .{mangled_param_name});
|
||||
const arg_name = try block_scope.makeMangledName(c, bare_arg_name);
|
||||
|
||||
const mut_tok = try appendToken(c, .Keyword_var, "var");
|
||||
@ -727,7 +727,7 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co
|
||||
|
||||
// TODO https://github.com/ziglang/zig/issues/3756
|
||||
// TODO https://github.com/ziglang/zig/issues/1802
|
||||
const checked_name = if (isZigPrimitiveType(var_name)) try std.fmt.allocPrint(c.arena, "{}_{}", .{ var_name, c.getMangle() }) else var_name;
|
||||
const checked_name = if (isZigPrimitiveType(var_name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ var_name, c.getMangle() }) else var_name;
|
||||
const var_decl_loc = var_decl.getLocation();
|
||||
|
||||
const qual_type = var_decl.getTypeSourceInfo_getType();
|
||||
@ -808,7 +808,7 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co
|
||||
_ = try appendToken(rp.c, .LParen, "(");
|
||||
const expr = try transCreateNodeStringLiteral(
|
||||
rp.c,
|
||||
try std.fmt.allocPrint(rp.c.arena, "\"{}\"", .{str_ptr[0..str_len]}),
|
||||
try std.fmt.allocPrint(rp.c.arena, "\"{s}\"", .{str_ptr[0..str_len]}),
|
||||
);
|
||||
_ = try appendToken(rp.c, .RParen, ")");
|
||||
|
||||
@ -887,7 +887,7 @@ fn transTypeDef(c: *Context, typedef_decl: *const clang.TypedefNameDecl, top_lev
|
||||
|
||||
// TODO https://github.com/ziglang/zig/issues/3756
|
||||
// TODO https://github.com/ziglang/zig/issues/1802
|
||||
const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.arena, "{}_{}", .{ typedef_name, c.getMangle() }) else typedef_name;
|
||||
const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ typedef_name, c.getMangle() }) else typedef_name;
|
||||
if (checkForBuiltinTypedef(checked_name)) |builtin| {
|
||||
return transTypeDefAsBuiltin(c, typedef_decl, builtin);
|
||||
}
|
||||
@ -958,11 +958,11 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*as
|
||||
container_kind_name = "struct";
|
||||
container_kind = .Keyword_struct;
|
||||
} else {
|
||||
try emitWarning(c, record_loc, "record {} is not a struct or union", .{bare_name});
|
||||
try emitWarning(c, record_loc, "record {s} is not a struct or union", .{bare_name});
|
||||
return null;
|
||||
}
|
||||
|
||||
const name = try std.fmt.allocPrint(c.arena, "{}_{}", .{ container_kind_name, bare_name });
|
||||
const name = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ container_kind_name, bare_name });
|
||||
_ = try c.decl_table.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), name);
|
||||
|
||||
const visib_tok = if (!is_unnamed) try appendToken(c, .Keyword_pub, "pub") else null;
|
||||
@ -1003,7 +1003,7 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*as
|
||||
_ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {});
|
||||
const opaque_type = try transCreateNodeOpaqueType(c);
|
||||
semicolon = try appendToken(c, .Semicolon, ";");
|
||||
try emitWarning(c, field_loc, "{} demoted to opaque type - has bitfield", .{container_kind_name});
|
||||
try emitWarning(c, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name});
|
||||
break :blk opaque_type;
|
||||
}
|
||||
|
||||
@ -1011,7 +1011,7 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*as
|
||||
_ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {});
|
||||
const opaque_type = try transCreateNodeOpaqueType(c);
|
||||
semicolon = try appendToken(c, .Semicolon, ";");
|
||||
try emitWarning(c, field_loc, "{} demoted to opaque type - has variable length array", .{container_kind_name});
|
||||
try emitWarning(c, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name});
|
||||
break :blk opaque_type;
|
||||
}
|
||||
|
||||
@ -1030,7 +1030,7 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*as
|
||||
_ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {});
|
||||
const opaque_type = try transCreateNodeOpaqueType(c);
|
||||
semicolon = try appendToken(c, .Semicolon, ";");
|
||||
try emitWarning(c, record_loc, "{} demoted to opaque type - unable to translate type of field {}", .{ container_kind_name, raw_name });
|
||||
try emitWarning(c, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, raw_name });
|
||||
break :blk opaque_type;
|
||||
},
|
||||
else => |e| return e,
|
||||
@ -1114,7 +1114,7 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node
|
||||
is_unnamed = true;
|
||||
}
|
||||
|
||||
const name = try std.fmt.allocPrint(c.arena, "enum_{}", .{bare_name});
|
||||
const name = try std.fmt.allocPrint(c.arena, "enum_{s}", .{bare_name});
|
||||
_ = try c.decl_table.put(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), name);
|
||||
|
||||
const visib_tok = if (!is_unnamed) try appendToken(c, .Keyword_pub, "pub") else null;
|
||||
@ -1385,7 +1385,7 @@ fn transStmt(
|
||||
rp,
|
||||
error.UnsupportedTranslation,
|
||||
stmt.getBeginLoc(),
|
||||
"TODO implement translation of stmt class {}",
|
||||
"TODO implement translation of stmt class {s}",
|
||||
.{@tagName(sc)},
|
||||
);
|
||||
},
|
||||
@ -1684,7 +1684,7 @@ fn transDeclStmtOne(
|
||||
rp,
|
||||
error.UnsupportedTranslation,
|
||||
decl.getLocation(),
|
||||
"TODO implement translation of DeclStmt kind {}",
|
||||
"TODO implement translation of DeclStmt kind {s}",
|
||||
.{@tagName(kind)},
|
||||
),
|
||||
}
|
||||
@ -1782,7 +1782,7 @@ fn transImplicitCastExpr(
|
||||
rp,
|
||||
error.UnsupportedTranslation,
|
||||
@ptrCast(*const clang.Stmt, expr).getBeginLoc(),
|
||||
"TODO implement translation of CastKind {}",
|
||||
"TODO implement translation of CastKind {s}",
|
||||
.{@tagName(kind)},
|
||||
),
|
||||
}
|
||||
@ -2043,7 +2043,7 @@ fn transStringLiteral(
|
||||
rp,
|
||||
error.UnsupportedTranslation,
|
||||
@ptrCast(*const clang.Stmt, stmt).getBeginLoc(),
|
||||
"TODO: support string literal kind {}",
|
||||
"TODO: support string literal kind {s}",
|
||||
.{kind},
|
||||
),
|
||||
}
|
||||
@ -2168,7 +2168,6 @@ fn transCCast(
|
||||
// @boolToInt returns either a comptime_int or a u1
|
||||
// TODO: if dst_type is 1 bit & signed (bitfield) we need @bitCast
|
||||
// instead of @as
|
||||
|
||||
const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
|
||||
builtin_node.params()[0] = expr;
|
||||
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
|
||||
@ -2455,7 +2454,7 @@ fn transInitListExpr(
|
||||
);
|
||||
} else {
|
||||
const type_name = rp.c.str(qual_type.getTypeClassName());
|
||||
return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported initlist type: '{}'", .{type_name});
|
||||
return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported initlist type: '{s}'", .{type_name});
|
||||
}
|
||||
}
|
||||
|
||||
@ -4433,7 +4432,8 @@ fn transCreateNodeBoolLiteral(c: *Context, value: bool) !*ast.Node {
|
||||
}
|
||||
|
||||
fn transCreateNodeInt(c: *Context, int: anytype) !*ast.Node {
|
||||
const token = try appendTokenFmt(c, .IntegerLiteral, "{}", .{int});
|
||||
const fmt_s = if (comptime std.meta.trait.isIntegerNumber(@TypeOf(int))) "{d}" else "{s}";
|
||||
const token = try appendTokenFmt(c, .IntegerLiteral, fmt_s, .{int});
|
||||
const node = try c.arena.create(ast.Node.OneToken);
|
||||
node.* = .{
|
||||
.base = .{ .tag = .IntegerLiteral },
|
||||
@ -4442,8 +4442,8 @@ fn transCreateNodeInt(c: *Context, int: anytype) !*ast.Node {
|
||||
return &node.base;
|
||||
}
|
||||
|
||||
fn transCreateNodeFloat(c: *Context, int: anytype) !*ast.Node {
|
||||
const token = try appendTokenFmt(c, .FloatLiteral, "{}", .{int});
|
||||
fn transCreateNodeFloat(c: *Context, str: []const u8) !*ast.Node {
|
||||
const token = try appendTokenFmt(c, .FloatLiteral, "{s}", .{str});
|
||||
const node = try c.arena.create(ast.Node.OneToken);
|
||||
node.* = .{
|
||||
.base = .{ .tag = .FloatLiteral },
|
||||
@ -4916,7 +4916,7 @@ fn transType(rp: RestorePoint, ty: *const clang.Type, source_loc: clang.SourceLo
|
||||
},
|
||||
else => {
|
||||
const type_name = rp.c.str(ty.getTypeClassName());
|
||||
return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported type: '{}'", .{type_name});
|
||||
return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported type: '{s}'", .{type_name});
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -4999,7 +4999,7 @@ fn transCC(
|
||||
rp,
|
||||
error.UnsupportedType,
|
||||
source_loc,
|
||||
"unsupported calling convention: {}",
|
||||
"unsupported calling convention: {s}",
|
||||
.{@tagName(clang_cc)},
|
||||
),
|
||||
}
|
||||
@ -5117,7 +5117,7 @@ fn finishTransFnProto(
|
||||
_ = try appendToken(rp.c, .LParen, "(");
|
||||
const expr = try transCreateNodeStringLiteral(
|
||||
rp.c,
|
||||
try std.fmt.allocPrint(rp.c.arena, "\"{}\"", .{str_ptr[0..str_len]}),
|
||||
try std.fmt.allocPrint(rp.c.arena, "\"{s}\"", .{str_ptr[0..str_len]}),
|
||||
);
|
||||
_ = try appendToken(rp.c, .RParen, ")");
|
||||
|
||||
@ -5214,7 +5214,7 @@ fn revertAndWarn(
|
||||
|
||||
fn emitWarning(c: *Context, loc: clang.SourceLocation, comptime format: []const u8, args: anytype) !void {
|
||||
const args_prefix = .{c.locStr(loc)};
|
||||
_ = try appendTokenFmt(c, .LineComment, "// {}: warning: " ++ format, args_prefix ++ args);
|
||||
_ = try appendTokenFmt(c, .LineComment, "// {s}: warning: " ++ format, args_prefix ++ args);
|
||||
}
|
||||
|
||||
pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, comptime format: []const u8, args: anytype) !void {
|
||||
@ -5228,7 +5228,7 @@ pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, compti
|
||||
const msg_tok = try appendTokenFmt(c, .StringLiteral, "\"" ++ format ++ "\"", args);
|
||||
const rparen_tok = try appendToken(c, .RParen, ")");
|
||||
const semi_tok = try appendToken(c, .Semicolon, ";");
|
||||
_ = try appendTokenFmt(c, .LineComment, "// {}", .{c.locStr(loc)});
|
||||
_ = try appendTokenFmt(c, .LineComment, "// {s}", .{c.locStr(loc)});
|
||||
|
||||
const msg_node = try c.arena.create(ast.Node.OneToken);
|
||||
msg_node.* = .{
|
||||
@ -5258,7 +5258,7 @@ pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, compti
|
||||
|
||||
fn appendToken(c: *Context, token_id: Token.Id, bytes: []const u8) !ast.TokenIndex {
|
||||
std.debug.assert(token_id != .Identifier); // use appendIdentifier
|
||||
return appendTokenFmt(c, token_id, "{}", .{bytes});
|
||||
return appendTokenFmt(c, token_id, "{s}", .{bytes});
|
||||
}
|
||||
|
||||
fn appendTokenFmt(c: *Context, token_id: Token.Id, comptime format: []const u8, args: anytype) !ast.TokenIndex {
|
||||
@ -5329,7 +5329,7 @@ fn transCreateNodeIdentifier(c: *Context, name: []const u8) !*ast.Node {
|
||||
}
|
||||
|
||||
fn transCreateNodeIdentifierUnchecked(c: *Context, name: []const u8) !*ast.Node {
|
||||
const token_index = try appendTokenFmt(c, .Identifier, "{}", .{name});
|
||||
const token_index = try appendTokenFmt(c, .Identifier, "{s}", .{name});
|
||||
const identifier = try c.arena.create(ast.Node.OneToken);
|
||||
identifier.* = .{
|
||||
.base = .{ .tag = .Identifier },
|
||||
@ -5390,7 +5390,7 @@ fn transPreprocessorEntities(c: *Context, unit: *clang.ASTUnit) Error!void {
|
||||
const name = try c.str(raw_name);
|
||||
// TODO https://github.com/ziglang/zig/issues/3756
|
||||
// TODO https://github.com/ziglang/zig/issues/1802
|
||||
const mangled_name = if (isZigPrimitiveType(name)) try std.fmt.allocPrint(c.arena, "{}_{}", .{ name, c.getMangle() }) else name;
|
||||
const mangled_name = if (isZigPrimitiveType(name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ name, c.getMangle() }) else name;
|
||||
if (scope.containsNow(mangled_name)) {
|
||||
continue;
|
||||
}
|
||||
@ -5468,7 +5468,7 @@ fn transMacroDefine(c: *Context, m: *MacroCtx) ParseError!void {
|
||||
const init_node = try parseCExpr(c, m, scope);
|
||||
const last = m.next().?;
|
||||
if (last != .Eof and last != .Nl)
|
||||
return m.fail(c, "unable to translate C expr: unexpected token .{}", .{@tagName(last)});
|
||||
return m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(last)});
|
||||
|
||||
const semicolon_token = try appendToken(c, .Semicolon, ";");
|
||||
const node = try ast.Node.VarDecl.create(c.arena, .{
|
||||
@ -5540,7 +5540,7 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void {
|
||||
const expr = try parseCExpr(c, m, scope);
|
||||
const last = m.next().?;
|
||||
if (last != .Eof and last != .Nl)
|
||||
return m.fail(c, "unable to translate C expr: unexpected token .{}", .{@tagName(last)});
|
||||
return m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(last)});
|
||||
_ = try appendToken(c, .Semicolon, ";");
|
||||
const type_of_arg = if (!expr.tag.isBlock()) expr else blk: {
|
||||
const stmts = expr.blockStatements();
|
||||
@ -5623,11 +5623,11 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node {
|
||||
switch (lit_bytes[1]) {
|
||||
'0'...'7' => {
|
||||
// Octal
|
||||
lit_bytes = try std.fmt.allocPrint(c.arena, "0o{}", .{lit_bytes});
|
||||
lit_bytes = try std.fmt.allocPrint(c.arena, "0o{s}", .{lit_bytes});
|
||||
},
|
||||
'X' => {
|
||||
// Hexadecimal with capital X, valid in C but not in Zig
|
||||
lit_bytes = try std.fmt.allocPrint(c.arena, "0x{}", .{lit_bytes[2..]});
|
||||
lit_bytes = try std.fmt.allocPrint(c.arena, "0x{s}", .{lit_bytes[2..]});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@ -5659,7 +5659,7 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node {
|
||||
},
|
||||
.FloatLiteral => |suffix| {
|
||||
if (lit_bytes[0] == '.')
|
||||
lit_bytes = try std.fmt.allocPrint(c.arena, "0{}", .{lit_bytes});
|
||||
lit_bytes = try std.fmt.allocPrint(c.arena, "0{s}", .{lit_bytes});
|
||||
if (suffix == .none) {
|
||||
return transCreateNodeFloat(c, lit_bytes);
|
||||
}
|
||||
@ -5937,7 +5937,7 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*
|
||||
|
||||
const next_id = m.next().?;
|
||||
if (next_id != .RParen) {
|
||||
try m.fail(c, "unable to translate C expr: expected ')' instead got: {}", .{@tagName(next_id)});
|
||||
try m.fail(c, "unable to translate C expr: expected ')' instead got: {s}", .{@tagName(next_id)});
|
||||
return error.ParseError;
|
||||
}
|
||||
var saw_l_paren = false;
|
||||
@ -5995,7 +5995,7 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*
|
||||
return &group_node.base;
|
||||
},
|
||||
else => {
|
||||
try m.fail(c, "unable to translate C expr: unexpected token .{}", .{@tagName(tok)});
|
||||
try m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(tok)});
|
||||
return error.ParseError;
|
||||
},
|
||||
}
|
||||
|
||||
@ -464,7 +464,7 @@ pub const Value = extern union {
|
||||
.ty => return val.castTag(.ty).?.data.format("", options, out_stream),
|
||||
.int_type => {
|
||||
const int_type = val.castTag(.int_type).?.data;
|
||||
return out_stream.print("{}{}", .{
|
||||
return out_stream.print("{s}{d}", .{
|
||||
if (int_type.signed) "s" else "u",
|
||||
int_type.bits,
|
||||
});
|
||||
@ -507,7 +507,7 @@ pub const Value = extern union {
|
||||
}
|
||||
return out_stream.writeAll("}");
|
||||
},
|
||||
.@"error" => return out_stream.print("error.{}", .{val.castTag(.@"error").?.data.name}),
|
||||
.@"error" => return out_stream.print("error.{s}", .{val.castTag(.@"error").?.data.name}),
|
||||
.inferred_alloc => return out_stream.writeAll("(inferred allocation value)"),
|
||||
};
|
||||
}
|
||||
|
||||
44
src/zir.zig
44
src/zir.zig
@ -1150,7 +1150,7 @@ pub const Module = struct {
|
||||
|
||||
for (self.decls) |decl, i| {
|
||||
write.next_instr_index = 0;
|
||||
try stream.print("@{} ", .{decl.name});
|
||||
try stream.print("@{s} ", .{decl.name});
|
||||
try write.writeInstToStream(stream, decl.inst);
|
||||
try stream.writeByte('\n');
|
||||
}
|
||||
@ -1206,13 +1206,13 @@ const Writer = struct {
|
||||
if (@typeInfo(arg_field.field_type) == .Optional) {
|
||||
if (@field(inst.kw_args, arg_field.name)) |non_optional| {
|
||||
if (need_comma) try stream.writeAll(", ");
|
||||
try stream.print("{}=", .{arg_field.name});
|
||||
try stream.print("{s}=", .{arg_field.name});
|
||||
try self.writeParamToStream(stream, &non_optional);
|
||||
need_comma = true;
|
||||
}
|
||||
} else {
|
||||
if (need_comma) try stream.writeAll(", ");
|
||||
try stream.print("{}=", .{arg_field.name});
|
||||
try stream.print("{s}=", .{arg_field.name});
|
||||
try self.writeParamToStream(stream, &@field(inst.kw_args, arg_field.name));
|
||||
need_comma = true;
|
||||
}
|
||||
@ -1334,16 +1334,16 @@ const Writer = struct {
|
||||
if (info.index) |i| {
|
||||
try stream.print("%{}", .{info.index});
|
||||
} else {
|
||||
try stream.print("@{}", .{info.name});
|
||||
try stream.print("@{s}", .{info.name});
|
||||
}
|
||||
} else if (inst.cast(Inst.DeclVal)) |decl_val| {
|
||||
try stream.print("@{}", .{decl_val.positionals.name});
|
||||
try stream.print("@{s}", .{decl_val.positionals.name});
|
||||
} else if (inst.cast(Inst.DeclValInModule)) |decl_val| {
|
||||
try stream.print("@{}", .{decl_val.positionals.decl.name});
|
||||
try stream.print("@{s}", .{decl_val.positionals.decl.name});
|
||||
} else {
|
||||
// This should be unreachable in theory, but since ZIR is used for debugging the compiler
|
||||
// we output some debug text instead.
|
||||
try stream.print("?{}?", .{@tagName(inst.tag)});
|
||||
try stream.print("?{s}?", .{@tagName(inst.tag)});
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -1424,7 +1424,7 @@ const Parser = struct {
|
||||
const decl = try parseInstruction(self, &body_context, ident);
|
||||
const ident_index = body_context.instructions.items.len;
|
||||
if (try body_context.name_map.fetchPut(ident, decl.inst)) |_| {
|
||||
return self.fail("redefinition of identifier '{}'", .{ident});
|
||||
return self.fail("redefinition of identifier '{s}'", .{ident});
|
||||
}
|
||||
try body_context.instructions.append(decl.inst);
|
||||
continue;
|
||||
@ -1510,7 +1510,7 @@ const Parser = struct {
|
||||
const decl = try parseInstruction(self, null, ident);
|
||||
const ident_index = self.decls.items.len;
|
||||
if (try self.global_name_map.fetchPut(ident, decl.inst)) |_| {
|
||||
return self.fail("redefinition of identifier '{}'", .{ident});
|
||||
return self.fail("redefinition of identifier '{s}'", .{ident});
|
||||
}
|
||||
try self.decls.append(self.allocator, decl);
|
||||
},
|
||||
@ -1538,7 +1538,7 @@ const Parser = struct {
|
||||
for (bytes) |byte| {
|
||||
if (self.source[self.i] != byte) {
|
||||
self.i = start;
|
||||
return self.fail("expected '{}'", .{bytes});
|
||||
return self.fail("expected '{s}'", .{bytes});
|
||||
}
|
||||
self.i += 1;
|
||||
}
|
||||
@ -1585,7 +1585,7 @@ const Parser = struct {
|
||||
return parseInstructionGeneric(self, field.name, tag.Type(), tag, body_ctx, name, contents_start);
|
||||
}
|
||||
}
|
||||
return self.fail("unknown instruction '{}'", .{fn_name});
|
||||
return self.fail("unknown instruction '{s}'", .{fn_name});
|
||||
}
|
||||
|
||||
fn parseInstructionGeneric(
|
||||
@ -1621,7 +1621,7 @@ const Parser = struct {
|
||||
self.i += 1;
|
||||
skipSpace(self);
|
||||
} else if (self.source[self.i] == ')') {
|
||||
return self.fail("expected positional parameter '{}'", .{arg_field.name});
|
||||
return self.fail("expected positional parameter '{s}'", .{arg_field.name});
|
||||
}
|
||||
@field(inst_specific.positionals, arg_field.name) = try parseParameterGeneric(
|
||||
self,
|
||||
@ -1648,7 +1648,7 @@ const Parser = struct {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
return self.fail("unrecognized keyword parameter: '{}'", .{name});
|
||||
return self.fail("unrecognized keyword parameter: '{s}'", .{name});
|
||||
}
|
||||
skipSpace(self);
|
||||
}
|
||||
@ -1672,7 +1672,7 @@ const Parser = struct {
|
||||
' ', '\n', ',', ')' => {
|
||||
const enum_name = self.source[start..self.i];
|
||||
return std.meta.stringToEnum(T, enum_name) orelse {
|
||||
return self.fail("tag '{}' not a member of enum '{}'", .{ enum_name, @typeName(T) });
|
||||
return self.fail("tag '{s}' not a member of enum '{s}'", .{ enum_name, @typeName(T) });
|
||||
};
|
||||
},
|
||||
0 => return self.failByte(0),
|
||||
@ -1710,7 +1710,7 @@ const Parser = struct {
|
||||
BigIntConst => return self.parseIntegerLiteral(),
|
||||
usize => {
|
||||
const big_int = try self.parseIntegerLiteral();
|
||||
return big_int.to(usize) catch |err| return self.fail("integer literal: {}", .{@errorName(err)});
|
||||
return big_int.to(usize) catch |err| return self.fail("integer literal: {s}", .{@errorName(err)});
|
||||
},
|
||||
TypedValue => return self.fail("'const' is a special instruction; not legal in ZIR text", .{}),
|
||||
*IrModule.Decl => return self.fail("'declval_in_module' is a special instruction; not legal in ZIR text", .{}),
|
||||
@ -1759,7 +1759,7 @@ const Parser = struct {
|
||||
},
|
||||
else => @compileError("Unimplemented: ir parseParameterGeneric for type " ++ @typeName(T)),
|
||||
}
|
||||
return self.fail("TODO parse parameter {}", .{@typeName(T)});
|
||||
return self.fail("TODO parse parameter {s}", .{@typeName(T)});
|
||||
}
|
||||
|
||||
fn parseParameterInst(self: *Parser, body_ctx: ?*Body) !*Inst {
|
||||
@ -1788,7 +1788,7 @@ const Parser = struct {
|
||||
const src = name_start - 1;
|
||||
if (local_ref) {
|
||||
self.i = src;
|
||||
return self.fail("unrecognized identifier: {}", .{bad_name});
|
||||
return self.fail("unrecognized identifier: {s}", .{bad_name});
|
||||
} else {
|
||||
const declval = try self.arena.allocator.create(Inst.DeclVal);
|
||||
declval.* = .{
|
||||
@ -1873,7 +1873,7 @@ pub fn dumpFn(old_module: IrModule, module_fn: *IrModule.Fn) void {
|
||||
|
||||
const fn_ty = module_fn.owner_decl.typed_value.most_recent.typed_value.ty;
|
||||
_ = ctx.emitFn(module_fn, 0, fn_ty) catch |err| {
|
||||
std.debug.print("unable to dump function: {}\n", .{err});
|
||||
std.debug.print("unable to dump function: {s}\n", .{@errorName(err)});
|
||||
return;
|
||||
};
|
||||
var module = Module{
|
||||
@ -2203,7 +2203,7 @@ const EmitZIR = struct {
|
||||
};
|
||||
return self.emitStringLiteral(src, bytes);
|
||||
},
|
||||
else => |t| std.debug.panic("TODO implement emitTypedValue for pointer to {}", .{@tagName(t)}),
|
||||
else => |t| std.debug.panic("TODO implement emitTypedValue for pointer to {s}", .{@tagName(t)}),
|
||||
}
|
||||
},
|
||||
.ComptimeInt => return self.emitComptimeIntVal(src, typed_value.val),
|
||||
@ -2274,7 +2274,7 @@ const EmitZIR = struct {
|
||||
};
|
||||
return self.emitUnnamedDecl(&inst.base);
|
||||
},
|
||||
else => |t| std.debug.panic("TODO implement emitTypedValue for {}", .{@tagName(t)}),
|
||||
else => |t| std.debug.panic("TODO implement emitTypedValue for {s}", .{@tagName(t)}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -2947,7 +2947,7 @@ pub fn dumpZir(allocator: *Allocator, kind: []const u8, decl_name: [*:0]const u8
|
||||
try write.inst_table.ensureCapacity(@intCast(u32, instructions.len));
|
||||
|
||||
const stderr = std.io.getStdErr().outStream();
|
||||
try stderr.print("{} {s} {{ // unanalyzed\n", .{ kind, decl_name });
|
||||
try stderr.print("{s} {s} {{ // unanalyzed\n", .{ kind, decl_name });
|
||||
|
||||
for (instructions) |inst| {
|
||||
const my_i = write.next_instr_index;
|
||||
@ -2967,5 +2967,5 @@ pub fn dumpZir(allocator: *Allocator, kind: []const u8, decl_name: [*:0]const u8
|
||||
try stderr.writeByte('\n');
|
||||
}
|
||||
|
||||
try stderr.print("}} // {} {s}\n\n", .{ kind, decl_name });
|
||||
try stderr.print("}} // {s} {s}\n\n", .{ kind, decl_name });
|
||||
}
|
||||
|
||||
@ -274,7 +274,7 @@ pub fn resolveInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
|
||||
const entry = if (old_inst.cast(zir.Inst.DeclVal)) |declval| blk: {
|
||||
const decl_name = declval.positionals.name;
|
||||
const entry = zir_module.contents.module.findDecl(decl_name) orelse
|
||||
return mod.fail(scope, old_inst.src, "decl '{}' not found", .{decl_name});
|
||||
return mod.fail(scope, old_inst.src, "decl '{s}' not found", .{decl_name});
|
||||
break :blk entry;
|
||||
} else blk: {
|
||||
// If this assert trips, the instruction that was referenced did not get
|
||||
@ -564,14 +564,14 @@ fn analyzeInstStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerErr
|
||||
fn analyzeInstExport(mod: *Module, scope: *Scope, export_inst: *zir.Inst.Export) InnerError!*Inst {
|
||||
const symbol_name = try resolveConstString(mod, scope, export_inst.positionals.symbol_name);
|
||||
const exported_decl = mod.lookupDeclName(scope, export_inst.positionals.decl_name) orelse
|
||||
return mod.fail(scope, export_inst.base.src, "decl '{}' not found", .{export_inst.positionals.decl_name});
|
||||
return mod.fail(scope, export_inst.base.src, "decl '{s}' not found", .{export_inst.positionals.decl_name});
|
||||
try mod.analyzeExport(scope, export_inst.base.src, symbol_name, exported_decl);
|
||||
return mod.constVoid(scope, export_inst.base.src);
|
||||
}
|
||||
|
||||
fn analyzeInstCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const msg = try resolveConstString(mod, scope, inst.positionals.operand);
|
||||
return mod.fail(scope, inst.base.src, "{}", .{msg});
|
||||
return mod.fail(scope, inst.base.src, "{s}", .{msg});
|
||||
}
|
||||
|
||||
fn analyzeInstArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
|
||||
@ -918,7 +918,7 @@ fn analyzeInstErrorSet(mod: *Module, scope: *Scope, inst: *zir.Inst.ErrorSet) In
|
||||
for (inst.positionals.fields) |field_name| {
|
||||
const entry = try mod.getErrorValue(field_name);
|
||||
if (payload.data.fields.fetchPutAssumeCapacity(entry.key, entry.value)) |prev| {
|
||||
return mod.fail(scope, inst.base.src, "duplicate error: '{}'", .{field_name});
|
||||
return mod.fail(scope, inst.base.src, "duplicate error: '{s}'", .{field_name});
|
||||
}
|
||||
}
|
||||
// TODO create name in format "error:line:column"
|
||||
@ -1068,7 +1068,7 @@ fn analyzeInstFieldPtr(mod: *Module, scope: *Scope, fieldptr: *zir.Inst.FieldPtr
|
||||
return mod.fail(
|
||||
scope,
|
||||
fieldptr.positionals.field_name.src,
|
||||
"no member named '{}' in '{}'",
|
||||
"no member named '{s}' in '{}'",
|
||||
.{ field_name, elem_ty },
|
||||
);
|
||||
}
|
||||
@ -1089,7 +1089,7 @@ fn analyzeInstFieldPtr(mod: *Module, scope: *Scope, fieldptr: *zir.Inst.FieldPtr
|
||||
return mod.fail(
|
||||
scope,
|
||||
fieldptr.positionals.field_name.src,
|
||||
"no member named '{}' in '{}'",
|
||||
"no member named '{s}' in '{}'",
|
||||
.{ field_name, elem_ty },
|
||||
);
|
||||
}
|
||||
@ -1107,7 +1107,7 @@ fn analyzeInstFieldPtr(mod: *Module, scope: *Scope, fieldptr: *zir.Inst.FieldPtr
|
||||
// TODO resolve inferred error sets
|
||||
const entry = if (val.castTag(.error_set)) |payload|
|
||||
(payload.data.fields.getEntry(field_name) orelse
|
||||
return mod.fail(scope, fieldptr.base.src, "no error named '{}' in '{}'", .{ field_name, child_type })).*
|
||||
return mod.fail(scope, fieldptr.base.src, "no error named '{s}' in '{}'", .{ field_name, child_type })).*
|
||||
else
|
||||
try mod.getErrorValue(field_name);
|
||||
|
||||
@ -1135,9 +1135,9 @@ fn analyzeInstFieldPtr(mod: *Module, scope: *Scope, fieldptr: *zir.Inst.FieldPtr
|
||||
}
|
||||
|
||||
if (&container_scope.file_scope.base == mod.root_scope) {
|
||||
return mod.fail(scope, fieldptr.base.src, "root source file has no member called '{}'", .{field_name});
|
||||
return mod.fail(scope, fieldptr.base.src, "root source file has no member called '{s}'", .{field_name});
|
||||
} else {
|
||||
return mod.fail(scope, fieldptr.base.src, "container '{}' has no member called '{}'", .{ child_type, field_name });
|
||||
return mod.fail(scope, fieldptr.base.src, "container '{}' has no member called '{s}'", .{ child_type, field_name });
|
||||
}
|
||||
},
|
||||
else => return mod.fail(scope, fieldptr.base.src, "type '{}' does not support field access", .{child_type}),
|
||||
@ -1503,14 +1503,14 @@ fn analyzeInstImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerErr
|
||||
|
||||
const file_scope = mod.analyzeImport(scope, inst.base.src, operand) catch |err| switch (err) {
|
||||
error.ImportOutsidePkgPath => {
|
||||
return mod.fail(scope, inst.base.src, "import of file outside package path: '{}'", .{operand});
|
||||
return mod.fail(scope, inst.base.src, "import of file outside package path: '{s}'", .{operand});
|
||||
},
|
||||
error.FileNotFound => {
|
||||
return mod.fail(scope, inst.base.src, "unable to find '{}'", .{operand});
|
||||
return mod.fail(scope, inst.base.src, "unable to find '{s}'", .{operand});
|
||||
},
|
||||
else => {
|
||||
// TODO user friendly error to string
|
||||
return mod.fail(scope, inst.base.src, "unable to open '{}': {}", .{ operand, @errorName(err) });
|
||||
return mod.fail(scope, inst.base.src, "unable to open '{s}': {s}", .{ operand, @errorName(err) });
|
||||
},
|
||||
};
|
||||
return mod.constType(scope, inst.base.src, file_scope.root_container.ty);
|
||||
@ -1637,7 +1637,7 @@ fn analyzeInstArithmetic(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) Inn
|
||||
const is_float = scalar_tag == .Float or scalar_tag == .ComptimeFloat;
|
||||
|
||||
if (!is_int and !(is_float and floatOpAllowed(inst.base.tag))) {
|
||||
return mod.fail(scope, inst.base.src, "invalid operands to binary expression: '{}' and '{}'", .{ @tagName(lhs.ty.zigTypeTag()), @tagName(rhs.ty.zigTypeTag()) });
|
||||
return mod.fail(scope, inst.base.src, "invalid operands to binary expression: '{s}' and '{s}'", .{ @tagName(lhs.ty.zigTypeTag()), @tagName(rhs.ty.zigTypeTag()) });
|
||||
}
|
||||
|
||||
if (casted_lhs.value()) |lhs_val| {
|
||||
@ -1656,7 +1656,7 @@ fn analyzeInstArithmetic(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) Inn
|
||||
const ir_tag = switch (inst.base.tag) {
|
||||
.add => Inst.Tag.add,
|
||||
.sub => Inst.Tag.sub,
|
||||
else => return mod.fail(scope, inst.base.src, "TODO implement arithmetic for operand '{}''", .{@tagName(inst.base.tag)}),
|
||||
else => return mod.fail(scope, inst.base.src, "TODO implement arithmetic for operand '{s}''", .{@tagName(inst.base.tag)}),
|
||||
};
|
||||
|
||||
return mod.addBinOp(b, inst.base.src, scalar_type, ir_tag, casted_lhs, casted_rhs);
|
||||
@ -1689,7 +1689,7 @@ fn analyzeInstComptimeOp(mod: *Module, scope: *Scope, res_type: Type, inst: *zir
|
||||
mod.floatSub(scope, res_type, inst.base.src, lhs_val, rhs_val);
|
||||
break :blk val;
|
||||
},
|
||||
else => return mod.fail(scope, inst.base.src, "TODO Implement arithmetic operand '{}'", .{@tagName(inst.base.tag)}),
|
||||
else => return mod.fail(scope, inst.base.src, "TODO Implement arithmetic operand '{s}'", .{@tagName(inst.base.tag)}),
|
||||
};
|
||||
|
||||
return mod.constInst(scope, inst.base.src, .{
|
||||
@ -1781,7 +1781,7 @@ fn analyzeInstCmp(
|
||||
return mod.fail(scope, inst.base.src, "TODO implement equality comparison between a union's tag value and an enum literal", .{});
|
||||
} else if (lhs_ty_tag == .ErrorSet and rhs_ty_tag == .ErrorSet) {
|
||||
if (!is_equality_cmp) {
|
||||
return mod.fail(scope, inst.base.src, "{} operator not allowed for errors", .{@tagName(op)});
|
||||
return mod.fail(scope, inst.base.src, "{s} operator not allowed for errors", .{@tagName(op)});
|
||||
}
|
||||
return mod.fail(scope, inst.base.src, "TODO implement equality comparison between errors", .{});
|
||||
} else if (lhs.ty.isNumeric() and rhs.ty.isNumeric()) {
|
||||
@ -1962,7 +1962,7 @@ fn analyzeDeclVal(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclVal) InnerErr
|
||||
const decl_name = inst.positionals.name;
|
||||
const zir_module = scope.namespace().cast(Scope.ZIRModule).?;
|
||||
const src_decl = zir_module.contents.module.findDecl(decl_name) orelse
|
||||
return mod.fail(scope, inst.base.src, "use of undeclared identifier '{}'", .{decl_name});
|
||||
return mod.fail(scope, inst.base.src, "use of undeclared identifier '{s}'", .{decl_name});
|
||||
|
||||
const decl = try resolveCompleteZirDecl(mod, scope, src_decl.decl);
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user