mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
WIP
This commit is contained in:
parent
d0911786c9
commit
b31c32879d
@ -513,11 +513,6 @@ set(ZIG_STAGE2_SOURCES
|
||||
src/InternPool.zig
|
||||
src/Liveness.zig
|
||||
src/Liveness/Verify.zig
|
||||
src/Package.zig
|
||||
src/Package/Fetch.zig
|
||||
src/Package/Fetch/git.zig
|
||||
src/Package/Manifest.zig
|
||||
src/Package/Module.zig
|
||||
src/RangeSet.zig
|
||||
src/Sema.zig
|
||||
src/Sema/bitcast.zig
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const native_os = builtin.os.tag;
|
||||
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const io = std.io;
|
||||
const fmt = std.fmt;
|
||||
@ -12,14 +14,11 @@ const Watch = std.Build.Watch;
|
||||
const Fuzz = std.Build.Fuzz;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const fatal = std.process.fatal;
|
||||
const runner = @This();
|
||||
|
||||
pub const root = @import("@build");
|
||||
pub const dependencies = @import("@dependencies");
|
||||
const Directory = std.Build.Cache.Directory;
|
||||
const Package = std.zig.Package;
|
||||
|
||||
pub const std_options: std.Options = .{
|
||||
.side_channels_mitigations = .none,
|
||||
.http_disable_tls = true,
|
||||
.crypto_fork_safety = false,
|
||||
};
|
||||
|
||||
@ -34,6 +33,7 @@ pub fn main() !void {
|
||||
.child_allocator = single_threaded_arena.allocator(),
|
||||
};
|
||||
const arena = thread_safe_arena.allocator();
|
||||
const gpa = arena;
|
||||
|
||||
const args = try process.argsAlloc(arena);
|
||||
|
||||
@ -42,26 +42,20 @@ pub fn main() !void {
|
||||
|
||||
const zig_exe = nextArg(args, &arg_idx) orelse fatal("missing zig compiler path", .{});
|
||||
const zig_lib_dir = nextArg(args, &arg_idx) orelse fatal("missing zig lib directory path", .{});
|
||||
const build_root = nextArg(args, &arg_idx) orelse fatal("missing build root directory path", .{});
|
||||
const cache_root = nextArg(args, &arg_idx) orelse fatal("missing cache root directory path", .{});
|
||||
const global_cache_root = nextArg(args, &arg_idx) orelse fatal("missing global cache root directory path", .{});
|
||||
|
||||
const zig_lib_directory: std.Build.Cache.Directory = .{
|
||||
const zig_lib_directory: Directory = .{
|
||||
.path = zig_lib_dir,
|
||||
.handle = try std.fs.cwd().openDir(zig_lib_dir, .{}),
|
||||
};
|
||||
|
||||
const build_root_directory: std.Build.Cache.Directory = .{
|
||||
.path = build_root,
|
||||
.handle = try std.fs.cwd().openDir(build_root, .{}),
|
||||
};
|
||||
|
||||
const local_cache_directory: std.Build.Cache.Directory = .{
|
||||
const local_cache_directory: Directory = .{
|
||||
.path = cache_root,
|
||||
.handle = try std.fs.cwd().makeOpenPath(cache_root, .{}),
|
||||
};
|
||||
|
||||
const global_cache_directory: std.Build.Cache.Directory = .{
|
||||
const global_cache_directory: Directory = .{
|
||||
.path = global_cache_root,
|
||||
.handle = try std.fs.cwd().makeOpenPath(global_cache_root, .{}),
|
||||
};
|
||||
@ -69,7 +63,7 @@ pub fn main() !void {
|
||||
var graph: std.Build.Graph = .{
|
||||
.arena = arena,
|
||||
.cache = .{
|
||||
.gpa = arena,
|
||||
.gpa = gpa,
|
||||
.manifest_dir = try local_cache_directory.handle.makeOpenPath("h", .{}),
|
||||
},
|
||||
.zig_exe = zig_exe,
|
||||
@ -82,25 +76,13 @@ pub fn main() !void {
|
||||
},
|
||||
};
|
||||
|
||||
graph.cache.addPrefix(.{ .path = null, .handle = std.fs.cwd() });
|
||||
graph.cache.addPrefix(build_root_directory);
|
||||
graph.cache.addPrefix(local_cache_directory);
|
||||
graph.cache.addPrefix(global_cache_directory);
|
||||
graph.cache.hash.addBytes(builtin.zig_version_string);
|
||||
|
||||
const builder = try std.Build.create(
|
||||
&graph,
|
||||
build_root_directory,
|
||||
local_cache_directory,
|
||||
dependencies.root_deps,
|
||||
);
|
||||
|
||||
var targets = ArrayList([]const u8).init(arena);
|
||||
var debug_log_scopes = ArrayList([]const u8).init(arena);
|
||||
var thread_pool_options: std.Thread.Pool.Options = .{ .allocator = arena };
|
||||
var thread_pool_options: std.Thread.Pool.Options = .{ .allocator = gpa };
|
||||
var options_args: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||
|
||||
var install_prefix: ?[]const u8 = null;
|
||||
var dir_list = std.Build.DirList{};
|
||||
var install_paths: std.Build.InstallPaths = .{};
|
||||
var summary: ?Summary = null;
|
||||
var max_rss: u64 = 0;
|
||||
var skip_oom_steps = false;
|
||||
@ -108,32 +90,32 @@ pub fn main() !void {
|
||||
var prominent_compile_errors = false;
|
||||
var help_menu = false;
|
||||
var steps_menu = false;
|
||||
var output_tmp_nonce: ?[16]u8 = null;
|
||||
var watch = false;
|
||||
var fuzz = false;
|
||||
var debounce_interval_ms: u16 = 50;
|
||||
var listen_port: u16 = 0;
|
||||
var remaining_args: ?[]const []const u8 = null;
|
||||
|
||||
var build_file: ?[]const u8 = null;
|
||||
var reference_trace: ?u32 = null;
|
||||
var debug_compile_errors = false;
|
||||
var verbose_link = (native_os != .wasi or builtin.link_libc) and std.zig.EnvVar.ZIG_VERBOSE_LINK.isSet();
|
||||
var verbose_cc = (native_os != .wasi or builtin.link_libc) and std.zig.EnvVar.ZIG_VERBOSE_CC.isSet();
|
||||
var verbose_air = false;
|
||||
var verbose_intern_pool = false;
|
||||
var verbose_generic_instances = false;
|
||||
var verbose_llvm_ir: ?[]const u8 = null;
|
||||
var verbose_llvm_bc: ?[]const u8 = null;
|
||||
var verbose_cimport = false;
|
||||
var verbose_llvm_cpu_features = false;
|
||||
var fetch_only = false;
|
||||
|
||||
while (nextArg(args, &arg_idx)) |arg| {
|
||||
if (mem.startsWith(u8, arg, "-Z")) {
|
||||
if (arg.len != 18) fatalWithHint("bad argument: '{s}'", .{arg});
|
||||
output_tmp_nonce = arg[2..18].*;
|
||||
} else if (mem.startsWith(u8, arg, "-D")) {
|
||||
const option_contents = arg[2..];
|
||||
if (option_contents.len == 0)
|
||||
fatalWithHint("expected option name after '-D'", .{});
|
||||
if (mem.indexOfScalar(u8, option_contents, '=')) |name_end| {
|
||||
const option_name = option_contents[0..name_end];
|
||||
const option_value = option_contents[name_end + 1 ..];
|
||||
if (try builder.addUserInputOption(option_name, option_value))
|
||||
fatal(" access the help menu with 'zig build -h'", .{});
|
||||
} else {
|
||||
if (try builder.addUserInputFlag(option_contents))
|
||||
fatal(" access the help menu with 'zig build -h'", .{});
|
||||
}
|
||||
if (mem.startsWith(u8, arg, "-D")) {
|
||||
try options_args.append(arena, arg);
|
||||
} else if (mem.startsWith(u8, arg, "-")) {
|
||||
if (mem.eql(u8, arg, "--verbose")) {
|
||||
builder.verbose = true;
|
||||
graph.verbose = true;
|
||||
} else if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
|
||||
help_menu = true;
|
||||
} else if (mem.eql(u8, arg, "-p") or mem.eql(u8, arg, "--prefix")) {
|
||||
@ -147,22 +129,22 @@ pub fn main() !void {
|
||||
const name = arg["-fno-sys=".len..];
|
||||
graph.system_library_options.put(arena, name, .user_disabled) catch @panic("OOM");
|
||||
} else if (mem.eql(u8, arg, "--release")) {
|
||||
builder.release_mode = .any;
|
||||
graph.release_mode = .any;
|
||||
} else if (mem.startsWith(u8, arg, "--release=")) {
|
||||
const text = arg["--release=".len..];
|
||||
builder.release_mode = std.meta.stringToEnum(std.Build.ReleaseMode, text) orelse {
|
||||
graph.release_mode = std.meta.stringToEnum(std.Build.ReleaseMode, text) orelse {
|
||||
fatalWithHint("expected [off|any|fast|safe|small] in '{s}', found '{s}'", .{
|
||||
arg, text,
|
||||
});
|
||||
};
|
||||
} else if (mem.eql(u8, arg, "--prefix-lib-dir")) {
|
||||
dir_list.lib_dir = nextArgOrFatal(args, &arg_idx);
|
||||
install_paths.lib_dir = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--prefix-exe-dir")) {
|
||||
dir_list.exe_dir = nextArgOrFatal(args, &arg_idx);
|
||||
install_paths.exe_dir = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--prefix-include-dir")) {
|
||||
dir_list.include_dir = nextArgOrFatal(args, &arg_idx);
|
||||
install_paths.include_dir = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--sysroot")) {
|
||||
builder.sysroot = nextArgOrFatal(args, &arg_idx);
|
||||
graph.sysroot = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--maxrss")) {
|
||||
const max_rss_text = nextArgOrFatal(args, &arg_idx);
|
||||
max_rss = std.fmt.parseIntSizeSuffix(max_rss_text, 10) catch |err| {
|
||||
@ -175,9 +157,11 @@ pub fn main() !void {
|
||||
skip_oom_steps = true;
|
||||
} else if (mem.eql(u8, arg, "--search-prefix")) {
|
||||
const search_prefix = nextArgOrFatal(args, &arg_idx);
|
||||
builder.addSearchPrefix(search_prefix);
|
||||
graph.addSearchPrefix(search_prefix);
|
||||
} else if (mem.eql(u8, arg, "--libc")) {
|
||||
builder.libc_file = nextArgOrFatal(args, &arg_idx);
|
||||
graph.libc_file = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--build-file")) {
|
||||
build_file = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--color")) {
|
||||
const next_arg = nextArg(args, &arg_idx) orelse
|
||||
fatalWithHint("expected [auto|on|off] after '{s}'", .{arg});
|
||||
@ -222,78 +206,77 @@ pub fn main() !void {
|
||||
const next_arg = nextArgOrFatal(args, &arg_idx);
|
||||
try debug_log_scopes.append(next_arg);
|
||||
} else if (mem.eql(u8, arg, "--debug-pkg-config")) {
|
||||
builder.debug_pkg_config = true;
|
||||
graph.debug_pkg_config = true;
|
||||
} else if (mem.eql(u8, arg, "--debug-rt")) {
|
||||
graph.debug_compiler_runtime_libs = true;
|
||||
} else if (mem.eql(u8, arg, "--debug-compile-errors")) {
|
||||
builder.debug_compile_errors = true;
|
||||
graph.debug_compile_errors = true;
|
||||
} else if (mem.eql(u8, arg, "--system")) {
|
||||
// The usage text shows another argument after this parameter
|
||||
// but it is handled by the parent process. The build runner
|
||||
// only sees this flag.
|
||||
graph.system_package_mode = true;
|
||||
graph.system_package_mode = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--glibc-runtimes")) {
|
||||
builder.glibc_runtimes_dir = nextArgOrFatal(args, &arg_idx);
|
||||
graph.glibc_runtimes_dir = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--verbose-link")) {
|
||||
builder.verbose_link = true;
|
||||
graph.verbose_link = true;
|
||||
} else if (mem.eql(u8, arg, "--verbose-air")) {
|
||||
builder.verbose_air = true;
|
||||
graph.verbose_air = true;
|
||||
} else if (mem.eql(u8, arg, "--verbose-llvm-ir")) {
|
||||
builder.verbose_llvm_ir = "-";
|
||||
graph.verbose_llvm_ir = "-";
|
||||
} else if (mem.startsWith(u8, arg, "--verbose-llvm-ir=")) {
|
||||
builder.verbose_llvm_ir = arg["--verbose-llvm-ir=".len..];
|
||||
graph.verbose_llvm_ir = arg["--verbose-llvm-ir=".len..];
|
||||
} else if (mem.eql(u8, arg, "--verbose-llvm-bc=")) {
|
||||
builder.verbose_llvm_bc = arg["--verbose-llvm-bc=".len..];
|
||||
graph.verbose_llvm_bc = arg["--verbose-llvm-bc=".len..];
|
||||
} else if (mem.eql(u8, arg, "--verbose-cimport")) {
|
||||
builder.verbose_cimport = true;
|
||||
graph.verbose_cimport = true;
|
||||
} else if (mem.eql(u8, arg, "--verbose-cc")) {
|
||||
builder.verbose_cc = true;
|
||||
graph.verbose_cc = true;
|
||||
} else if (mem.eql(u8, arg, "--verbose-llvm-cpu-features")) {
|
||||
builder.verbose_llvm_cpu_features = true;
|
||||
graph.verbose_llvm_cpu_features = true;
|
||||
} else if (mem.eql(u8, arg, "--prominent-compile-errors")) {
|
||||
prominent_compile_errors = true;
|
||||
} else if (mem.eql(u8, arg, "--watch")) {
|
||||
watch = true;
|
||||
} else if (mem.eql(u8, arg, "--fuzz")) {
|
||||
fuzz = true;
|
||||
} else if (mem.eql(u8, arg, "--fetch")) {
|
||||
fetch_only = true;
|
||||
} else if (mem.eql(u8, arg, "-fincremental")) {
|
||||
graph.incremental = true;
|
||||
} else if (mem.eql(u8, arg, "-fno-incremental")) {
|
||||
graph.incremental = false;
|
||||
} else if (mem.eql(u8, arg, "-fwine")) {
|
||||
builder.enable_wine = true;
|
||||
graph.enable_wine = true;
|
||||
} else if (mem.eql(u8, arg, "-fno-wine")) {
|
||||
builder.enable_wine = false;
|
||||
graph.enable_wine = false;
|
||||
} else if (mem.eql(u8, arg, "-fqemu")) {
|
||||
builder.enable_qemu = true;
|
||||
graph.enable_qemu = true;
|
||||
} else if (mem.eql(u8, arg, "-fno-qemu")) {
|
||||
builder.enable_qemu = false;
|
||||
graph.enable_qemu = false;
|
||||
} else if (mem.eql(u8, arg, "-fwasmtime")) {
|
||||
builder.enable_wasmtime = true;
|
||||
graph.enable_wasmtime = true;
|
||||
} else if (mem.eql(u8, arg, "-fno-wasmtime")) {
|
||||
builder.enable_wasmtime = false;
|
||||
graph.enable_wasmtime = false;
|
||||
} else if (mem.eql(u8, arg, "-frosetta")) {
|
||||
builder.enable_rosetta = true;
|
||||
graph.enable_rosetta = true;
|
||||
} else if (mem.eql(u8, arg, "-fno-rosetta")) {
|
||||
builder.enable_rosetta = false;
|
||||
graph.enable_rosetta = false;
|
||||
} else if (mem.eql(u8, arg, "-fdarling")) {
|
||||
builder.enable_darling = true;
|
||||
graph.enable_darling = true;
|
||||
} else if (mem.eql(u8, arg, "-fno-darling")) {
|
||||
builder.enable_darling = false;
|
||||
graph.enable_darling = false;
|
||||
} else if (mem.eql(u8, arg, "-fallow-so-scripts")) {
|
||||
graph.allow_so_scripts = true;
|
||||
} else if (mem.eql(u8, arg, "-fno-allow-so-scripts")) {
|
||||
graph.allow_so_scripts = false;
|
||||
} else if (mem.eql(u8, arg, "-freference-trace")) {
|
||||
builder.reference_trace = 256;
|
||||
graph.reference_trace = 256;
|
||||
} else if (mem.startsWith(u8, arg, "-freference-trace=")) {
|
||||
const num = arg["-freference-trace=".len..];
|
||||
builder.reference_trace = std.fmt.parseUnsigned(u32, num, 10) catch |err| {
|
||||
graph.reference_trace = std.fmt.parseUnsigned(u32, num, 10) catch |err| {
|
||||
std.debug.print("unable to parse reference_trace count '{s}': {s}", .{ num, @errorName(err) });
|
||||
process.exit(1);
|
||||
};
|
||||
} else if (mem.eql(u8, arg, "-fno-reference-trace")) {
|
||||
builder.reference_trace = null;
|
||||
graph.reference_trace = null;
|
||||
} else if (mem.startsWith(u8, arg, "-j")) {
|
||||
const num = arg["-j".len..];
|
||||
const n_jobs = std.fmt.parseUnsigned(u32, num, 10) catch |err| {
|
||||
@ -308,7 +291,7 @@ pub fn main() !void {
|
||||
}
|
||||
thread_pool_options.n_jobs = n_jobs;
|
||||
} else if (mem.eql(u8, arg, "--")) {
|
||||
builder.args = argsRest(args, arg_idx);
|
||||
remaining_args = argsRest(args, arg_idx);
|
||||
break;
|
||||
} else {
|
||||
fatalWithHint("unrecognized argument: '{s}'", .{arg});
|
||||
@ -317,6 +300,20 @@ pub fn main() !void {
|
||||
try targets.append(arg);
|
||||
}
|
||||
}
|
||||
graph.debug_log_scopes = debug_log_scopes.items;
|
||||
|
||||
const cwd_path = try process.getCwdAlloc(arena);
|
||||
const build_root = try Package.findBuildRoot(arena, .{
|
||||
.cwd_path = cwd_path,
|
||||
.build_file = build_file,
|
||||
});
|
||||
|
||||
graph.cache.addPrefix(.{ .path = null, .handle = std.fs.cwd() });
|
||||
graph.cache.addPrefix(build_root.directory);
|
||||
graph.cache.addPrefix(local_cache_directory);
|
||||
graph.cache.addPrefix(global_cache_directory);
|
||||
graph.cache.hash.addBytes(builtin.zig_version_string);
|
||||
|
||||
|
||||
const stderr = std.io.getStdErr();
|
||||
const ttyconf = get_tty_conf(color, stderr);
|
||||
@ -331,13 +328,307 @@ pub fn main() !void {
|
||||
});
|
||||
defer main_progress_node.end();
|
||||
|
||||
builder.debug_log_scopes = debug_log_scopes.items;
|
||||
builder.resolveInstallPrefix(install_prefix, dir_list);
|
||||
var thread_pool: std.Thread.Pool = undefined;
|
||||
try thread_pool.init(thread_pool_options);
|
||||
defer thread_pool.deinit();
|
||||
|
||||
|
||||
{
|
||||
var prog_node = main_progress_node.start("Configure", 0);
|
||||
var compile_argv: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||
defer compile_argv.deinit(gpa);
|
||||
|
||||
var run_argv: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||
defer run_argv.deinit(gpa);
|
||||
|
||||
var cli_modules: std.StringArrayHashMapUnmanaged(CliModule) = .empty;
|
||||
defer cli_modules.deinit(gpa);
|
||||
|
||||
const configure_runner_module = try std.fmt.allocPrint(arena, "-Mroot={s}/lib/configure_runner.zig", .{
|
||||
zig_lib_dir,
|
||||
});
|
||||
const build_zig_module = try std.fmt.allocPrint(arena, "-M@build={}/{s}", .{
|
||||
build_root.directory, build_root.build_zig_basename,
|
||||
});
|
||||
|
||||
const exe_basename = try std.zig.binNameAlloc(arena, .{
|
||||
.root_name = "configure",
|
||||
.target = graph.host.result,
|
||||
.output_mode = .Exe,
|
||||
});
|
||||
var http_client: std.http.Client = .{ .allocator = gpa };
|
||||
defer http_client.deinit();
|
||||
|
||||
var unlazy_set: Package.Fetch.JobQueue.UnlazySet = .{};
|
||||
|
||||
// This loop is re-evaluated when the build script exits with an indication that it
|
||||
// could not continue due to missing lazy dependencies.
|
||||
while (true) {
|
||||
// We want to release all the locks before executing the child process, so we make a nice
|
||||
// big block here to ensure the cleanup gets run when we extract out our argv.
|
||||
{
|
||||
{
|
||||
cli_modules.clearRetainingCapacity();
|
||||
const root_mod = try addCliModule(gpa, arena, &cli_modules, configure_runner_module);
|
||||
const build_mod = try addCliModule(gpa, arena, &cli_modules, build_zig_module);
|
||||
|
||||
const fetch_prog_node = main_progress_node.start("Fetch Packages", 0);
|
||||
defer fetch_prog_node.end();
|
||||
|
||||
const work_around_btrfs_bug = native_os == .linux and
|
||||
std.zig.EnvVar.ZIG_BTRFS_WORKAROUND.isSet();
|
||||
|
||||
var job_queue: Package.Fetch.JobQueue = .{
|
||||
.http_client = &http_client,
|
||||
.thread_pool = &thread_pool,
|
||||
.global_cache = global_cache_directory,
|
||||
.read_only = false,
|
||||
.recursive = true,
|
||||
.debug_hash = false,
|
||||
.work_around_btrfs_bug = work_around_btrfs_bug,
|
||||
.unlazy_set = unlazy_set,
|
||||
};
|
||||
defer job_queue.deinit();
|
||||
|
||||
if (graph.system_package_mode) |p| {
|
||||
job_queue.global_cache = p;
|
||||
job_queue.read_only = true;
|
||||
} else {
|
||||
try http_client.initDefaultProxies(arena);
|
||||
}
|
||||
|
||||
try job_queue.all_fetches.ensureUnusedCapacity(gpa, 1);
|
||||
try job_queue.table.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
var fetch: Package.Fetch = .{
|
||||
.arena = std.heap.ArenaAllocator.init(gpa),
|
||||
.location = .{ .relative_path = build_root.directory },
|
||||
.location_tok = 0,
|
||||
.hash_tok = .none,
|
||||
.name_tok = 0,
|
||||
.lazy_status = .eager,
|
||||
.parent_package_root = build_root.directory,
|
||||
.parent_manifest_ast = null,
|
||||
.prog_node = fetch_prog_node,
|
||||
.job_queue = &job_queue,
|
||||
.omit_missing_hash_error = true,
|
||||
.allow_missing_paths_field = false,
|
||||
.allow_missing_fingerprint = false,
|
||||
.allow_name_string = false,
|
||||
.use_latest_commit = false,
|
||||
|
||||
.package_root = undefined,
|
||||
.error_bundle = undefined,
|
||||
.manifest = null,
|
||||
.manifest_ast = undefined,
|
||||
.computed_hash = undefined,
|
||||
.has_build_zig = true,
|
||||
.oom_flag = false,
|
||||
.latest_commit = null,
|
||||
|
||||
.userdata = build_mod,
|
||||
};
|
||||
job_queue.all_fetches.appendAssumeCapacity(&fetch);
|
||||
|
||||
job_queue.table.putAssumeCapacityNoClobber(
|
||||
Package.Fetch.relativePathDigest(build_root.directory, global_cache_directory),
|
||||
&fetch,
|
||||
);
|
||||
|
||||
job_queue.thread_pool.spawnWg(&job_queue.wait_group, Package.Fetch.workerRun, .{
|
||||
&fetch, "root",
|
||||
});
|
||||
job_queue.wait_group.wait();
|
||||
|
||||
try job_queue.consolidateErrors();
|
||||
|
||||
if (fetch.error_bundle.root_list.items.len > 0) {
|
||||
var errors = try fetch.error_bundle.toOwnedBundle("");
|
||||
errors.renderToStdErr(color.renderOptions());
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (fetch_only) return std.process.cleanExit();
|
||||
|
||||
var source_buf = std.ArrayList(u8).init(gpa);
|
||||
defer source_buf.deinit();
|
||||
try job_queue.createDependenciesSource(&source_buf);
|
||||
const deps_mod = try createDependenciesModule(
|
||||
arena,
|
||||
source_buf.items,
|
||||
root_mod,
|
||||
global_cache_directory,
|
||||
local_cache_directory,
|
||||
builtin_mod,
|
||||
config,
|
||||
);
|
||||
|
||||
{
|
||||
// We need a Module for each package's build.zig.
|
||||
const hashes = job_queue.table.keys();
|
||||
const fetches = job_queue.table.values();
|
||||
try deps_mod.deps.ensureUnusedCapacity(arena, @intCast(hashes.len));
|
||||
for (hashes, fetches) |*hash, f| {
|
||||
if (f == &fetch) {
|
||||
// The first one is a dummy package for the current project.
|
||||
continue;
|
||||
}
|
||||
if (!f.has_build_zig)
|
||||
continue;
|
||||
const hash_slice = hash.toSlice();
|
||||
const m = try Package.Module.create(arena, .{
|
||||
.global_cache_directory = global_cache_directory,
|
||||
.paths = .{
|
||||
.root = try f.package_root.clone(arena),
|
||||
.root_src_path = Package.build_zig_basename,
|
||||
},
|
||||
.fully_qualified_name = try std.fmt.allocPrint(
|
||||
arena,
|
||||
"root.@dependencies.{s}",
|
||||
.{hash_slice},
|
||||
),
|
||||
.cc_argv = &.{},
|
||||
.inherited = .{},
|
||||
.global = config,
|
||||
.parent = root_mod,
|
||||
.builtin_mod = builtin_mod,
|
||||
.builtin_modules = null, // `builtin_mod` is specified
|
||||
});
|
||||
const hash_cloned = try arena.dupe(u8, hash_slice);
|
||||
deps_mod.deps.putAssumeCapacityNoClobber(hash_cloned, m);
|
||||
f.module = m;
|
||||
}
|
||||
|
||||
// Each build.zig module needs access to each of its
|
||||
// dependencies' build.zig modules by name.
|
||||
for (fetches) |f| {
|
||||
const mod = f.module orelse continue;
|
||||
const man = f.manifest orelse continue;
|
||||
const dep_names = man.dependencies.keys();
|
||||
try mod.deps.ensureUnusedCapacity(arena, @intCast(dep_names.len));
|
||||
for (dep_names, man.dependencies.values()) |name, dep| {
|
||||
const dep_digest = Package.Fetch.depDigest(
|
||||
f.package_root,
|
||||
global_cache_directory,
|
||||
dep,
|
||||
) orelse continue;
|
||||
const dep_mod = job_queue.table.get(dep_digest).?.module orelse continue;
|
||||
const name_cloned = try arena.dupe(u8, name);
|
||||
mod.deps.putAssumeCapacityNoClobber(name_cloned, dep_mod);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try root_mod.deps.put(arena, "@build", Package.build_zig_basename);
|
||||
|
||||
const keep_alive = false;
|
||||
var prog_node = main_progress_node.start("Compile Build Script", 0);
|
||||
defer prog_node.end();
|
||||
try builder.runBuild(root);
|
||||
createModuleDependencies(builder) catch @panic("OOM");
|
||||
|
||||
try child_argv.appendSlice(gpa, &.{
|
||||
zig_exe, "build-exe", build_zig_module,
|
||||
"--dep", "@build", configure_runner_module,
|
||||
"--listen=-",
|
||||
});
|
||||
const maybe_output_dir = try evalZigProcess(step, child_argv.items, prog_node, keep_alive);
|
||||
const configure_exe_path = try maybe_output_dir.?.joinString(arena, exe_basename);
|
||||
|
||||
prog_node.end();
|
||||
prog_node = main_progress_node.start("Run Build Script", 0);
|
||||
|
||||
child_argv.clearRetainingCapacity();
|
||||
child_argv.appendSliceAssumeCapacity(&.{
|
||||
configure_exe_path,
|
||||
zig_exe,
|
||||
zig_lib_dir,
|
||||
cache_root,
|
||||
global_cache_root,
|
||||
build_root,
|
||||
});
|
||||
|
||||
|
||||
child_argv.items[argv_index_exe] =
|
||||
try local_cache_directory.join(arena, &.{comp.cache_use.whole.bin_sub_path.?});
|
||||
}
|
||||
|
||||
if (process.can_spawn) {
|
||||
var child = std.process.Child.init(child_argv.items, gpa);
|
||||
child.stdin_behavior = .Inherit;
|
||||
child.stdout_behavior = .Inherit;
|
||||
child.stderr_behavior = .Inherit;
|
||||
|
||||
const term = t: {
|
||||
std.debug.lockStdErr();
|
||||
defer std.debug.unlockStdErr();
|
||||
break :t child.spawnAndWait() catch |err| {
|
||||
fatal("failed to spawn build runner {s}: {s}", .{ child_argv.items[0], @errorName(err) });
|
||||
};
|
||||
};
|
||||
|
||||
switch (term) {
|
||||
.Exited => |code| {
|
||||
if (code == 0) return cleanExit();
|
||||
// Indicates that the build runner has reported compile errors
|
||||
// and this parent process does not need to report any further
|
||||
// diagnostics.
|
||||
if (code == 2) process.exit(2);
|
||||
|
||||
if (code == 3) {
|
||||
if (!dev.env.supports(.fetch_command)) process.exit(3);
|
||||
// Indicates the configure phase failed due to missing lazy
|
||||
// dependencies and stdout contains the hashes of the ones
|
||||
// that are missing.
|
||||
const s = fs.path.sep_str;
|
||||
const tmp_sub_path = "tmp" ++ s ++ results_tmp_file_nonce;
|
||||
const stdout = local_cache_directory.handle.readFileAlloc(arena, tmp_sub_path, 50 * 1024 * 1024) catch |err| {
|
||||
fatal("unable to read results of configure phase from '{}{s}': {s}", .{
|
||||
local_cache_directory, tmp_sub_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
local_cache_directory.handle.deleteFile(tmp_sub_path) catch {};
|
||||
|
||||
var it = mem.splitScalar(u8, stdout, '\n');
|
||||
var any_errors = false;
|
||||
while (it.next()) |hash| {
|
||||
if (hash.len == 0) continue;
|
||||
if (hash.len > Package.Hash.max_len) {
|
||||
std.log.err("invalid digest (length {d} exceeds maximum): '{s}'", .{
|
||||
hash.len, hash,
|
||||
});
|
||||
any_errors = true;
|
||||
continue;
|
||||
}
|
||||
try unlazy_set.put(arena, .fromSlice(hash), {});
|
||||
}
|
||||
if (any_errors) process.exit(3);
|
||||
if (graph.system_package_mode) |p| {
|
||||
// In this mode, the system needs to provide these packages; they
|
||||
// cannot be fetched by Zig.
|
||||
for (unlazy_set.keys()) |*hash| {
|
||||
std.log.err("lazy dependency package not found: {s}" ++ s ++ "{s}", .{
|
||||
p, hash.toSlice(),
|
||||
});
|
||||
}
|
||||
std.log.info("remote package fetching disabled due to --system mode", .{});
|
||||
std.log.info("dependencies might be avoidable depending on build configuration", .{});
|
||||
process.exit(3);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const cmd = try std.mem.join(arena, " ", child_argv.items);
|
||||
fatal("the following build command failed with exit code {d}:\n{s}", .{ code, cmd });
|
||||
},
|
||||
else => {
|
||||
const cmd = try std.mem.join(arena, " ", child_argv.items);
|
||||
fatal("the following build command crashed:\n{s}", .{cmd});
|
||||
},
|
||||
}
|
||||
} else {
|
||||
const cmd = try std.mem.join(arena, " ", child_argv.items);
|
||||
fatal("the following command cannot be executed ({s} does not support spawning a child process):\n{s}", .{ @tagName(native_os), cmd });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (graph.needed_lazy_dependencies.entries.len != 0) {
|
||||
@ -389,7 +680,7 @@ pub fn main() !void {
|
||||
.summary = summary orelse if (watch) .new else .failures,
|
||||
.ttyconf = ttyconf,
|
||||
.stderr = stderr,
|
||||
.thread_pool = undefined,
|
||||
.thread_pool = thread_pool,
|
||||
};
|
||||
|
||||
if (run.max_rss == 0) {
|
||||
@ -405,9 +696,6 @@ pub fn main() !void {
|
||||
|
||||
var w = if (watch) try Watch.init() else undefined;
|
||||
|
||||
try run.thread_pool.init(thread_pool_options);
|
||||
defer run.thread_pool.deinit();
|
||||
|
||||
rebuild: while (true) {
|
||||
runStepNames(
|
||||
gpa,
|
||||
@ -486,6 +774,14 @@ pub fn main() !void {
|
||||
}
|
||||
}
|
||||
|
||||
const CliModule = struct {
|
||||
deps: std.StringArrayHashMapUnmanaged(*CliModule),
|
||||
};
|
||||
|
||||
fn addCliModule(gpa: Allocator, arena: Allocator, aoeu
|
||||
const build_mod = try addCliModule(gpa, arena, &cli_modules, build_zig_module);
|
||||
|
||||
|
||||
fn markFailedStepsDirty(gpa: Allocator, all_steps: []const *Step) void {
|
||||
for (all_steps) |step| switch (step.state) {
|
||||
.dependency_failure, .failure, .skipped => step.recursiveReset(gpa),
|
||||
@ -517,7 +813,7 @@ const Run = struct {
|
||||
memory_blocked_steps: std.ArrayList(*Step),
|
||||
step_stack: std.AutoArrayHashMapUnmanaged(*Step, void),
|
||||
prominent_compile_errors: bool,
|
||||
thread_pool: std.Thread.Pool,
|
||||
thread_pool: *std.Thread.Pool,
|
||||
|
||||
claimed_rss: usize,
|
||||
summary: Summary,
|
||||
@ -526,7 +822,7 @@ const Run = struct {
|
||||
|
||||
fn cleanExit(run: Run) void {
|
||||
if (run.watch or run.fuzz) return;
|
||||
return runner.cleanExit();
|
||||
return std.process.cleanExit();
|
||||
}
|
||||
};
|
||||
|
||||
@ -1396,14 +1692,6 @@ fn argsRest(args: []const [:0]const u8, idx: usize) ?[]const [:0]const u8 {
|
||||
return args[idx..];
|
||||
}
|
||||
|
||||
/// Perhaps in the future there could be an Advanced Options flag such as
|
||||
/// --debug-build-runner-leaks which would make this function return instead of
|
||||
/// calling exit.
|
||||
fn cleanExit() void {
|
||||
std.debug.lockStdErr();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
/// Perhaps in the future there could be an Advanced Options flag such as
|
||||
/// --debug-build-runner-leaks which would make this function return instead of
|
||||
/// calling exit.
|
||||
@ -1447,79 +1735,56 @@ fn validateSystemLibraryOptions(b: *std.Build) void {
|
||||
}
|
||||
}
|
||||
|
||||
/// Starting from all top-level steps in `b`, traverses the entire step graph
|
||||
/// and adds all step dependencies implied by module graphs.
|
||||
fn createModuleDependencies(b: *std.Build) Allocator.Error!void {
|
||||
const arena = b.graph.arena;
|
||||
|
||||
var all_steps: std.AutoArrayHashMapUnmanaged(*Step, void) = .empty;
|
||||
var next_step_idx: usize = 0;
|
||||
|
||||
try all_steps.ensureUnusedCapacity(arena, b.top_level_steps.count());
|
||||
for (b.top_level_steps.values()) |tls| {
|
||||
all_steps.putAssumeCapacityNoClobber(&tls.step, {});
|
||||
/// Creates the dependencies.zig file and corresponding `Module` for the
|
||||
/// build runner to obtain via `@import("@dependencies")`.
|
||||
fn createDependenciesModule(
|
||||
arena: Allocator,
|
||||
source: []const u8,
|
||||
main_mod: *CliModule,
|
||||
global_cache_directory: Directory,
|
||||
local_cache_directory: Directory,
|
||||
builtin_mod: *Module,
|
||||
global_options: Compilation.Config,
|
||||
) !*CliModule {
|
||||
// Atomically create the file in a directory named after the hash of its contents.
|
||||
const basename = "dependencies.zig";
|
||||
const rand_int = std.crypto.random.int(u64);
|
||||
const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++ std.fmt.hex(rand_int);
|
||||
{
|
||||
var tmp_dir = try local_cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{});
|
||||
defer tmp_dir.close();
|
||||
try tmp_dir.writeFile(.{ .sub_path = basename, .data = source });
|
||||
}
|
||||
|
||||
while (next_step_idx < all_steps.count()) {
|
||||
const step = all_steps.keys()[next_step_idx];
|
||||
next_step_idx += 1;
|
||||
var hh: Cache.HashHelper = .{};
|
||||
hh.addBytes(build_options.version);
|
||||
hh.addBytes(source);
|
||||
const hex_digest = hh.final();
|
||||
|
||||
// Set up any implied dependencies for this step. It's important that we do this first, so
|
||||
// that the loop below discovers steps implied by the module graph.
|
||||
try createModuleDependenciesForStep(step);
|
||||
const o_dir_sub_path = try arena.dupe(u8, "o" ++ fs.path.sep_str ++ hex_digest);
|
||||
try Package.Fetch.renameTmpIntoCache(
|
||||
local_cache_directory.handle,
|
||||
tmp_dir_sub_path,
|
||||
o_dir_sub_path,
|
||||
);
|
||||
|
||||
try all_steps.ensureUnusedCapacity(arena, step.dependencies.items.len);
|
||||
for (step.dependencies.items) |other_step| {
|
||||
all_steps.putAssumeCapacity(other_step, {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// If the given `Step` is a `Step.Compile`, adds any dependencies for that step which
|
||||
/// are implied by the module graph rooted at `step.cast(Step.Compile).?.root_module`.
|
||||
fn createModuleDependenciesForStep(step: *Step) Allocator.Error!void {
|
||||
const root_module = if (step.cast(Step.Compile)) |cs| root: {
|
||||
break :root cs.root_module;
|
||||
} else return; // not a compile step so no module dependencies
|
||||
|
||||
// Starting from `root_module`, discover all modules in this graph.
|
||||
const modules = root_module.getGraph().modules;
|
||||
|
||||
// For each of those modules, set up the implied step dependencies.
|
||||
for (modules) |mod| {
|
||||
if (mod.root_source_file) |lp| lp.addStepDependencies(step);
|
||||
for (mod.include_dirs.items) |include_dir| switch (include_dir) {
|
||||
.path,
|
||||
.path_system,
|
||||
.path_after,
|
||||
.framework_path,
|
||||
.framework_path_system,
|
||||
=> |lp| lp.addStepDependencies(step),
|
||||
|
||||
.other_step => |other| {
|
||||
other.getEmittedIncludeTree().addStepDependencies(step);
|
||||
step.dependOn(&other.step);
|
||||
const deps_mod = try Module.create(arena, .{
|
||||
.global_cache_directory = global_cache_directory,
|
||||
.paths = .{
|
||||
.root = .{
|
||||
.root_dir = local_cache_directory,
|
||||
.sub_path = o_dir_sub_path,
|
||||
},
|
||||
|
||||
.config_header_step => |other| step.dependOn(&other.step),
|
||||
};
|
||||
for (mod.lib_paths.items) |lp| lp.addStepDependencies(step);
|
||||
for (mod.rpaths.items) |rpath| switch (rpath) {
|
||||
.lazy_path => |lp| lp.addStepDependencies(step),
|
||||
.special => {},
|
||||
};
|
||||
for (mod.link_objects.items) |link_object| switch (link_object) {
|
||||
.static_path,
|
||||
.assembly_file,
|
||||
=> |lp| lp.addStepDependencies(step),
|
||||
.other_step => |other| step.dependOn(&other.step),
|
||||
.system_lib => {},
|
||||
.c_source_file => |source| source.file.addStepDependencies(step),
|
||||
.c_source_files => |source_files| source_files.root.addStepDependencies(step),
|
||||
.win32_resource_file => |rc_source| {
|
||||
rc_source.file.addStepDependencies(step);
|
||||
for (rc_source.include_paths) |lp| lp.addStepDependencies(step);
|
||||
.root_src_path = basename,
|
||||
},
|
||||
};
|
||||
}
|
||||
.fully_qualified_name = "root.@dependencies",
|
||||
.parent = main_mod,
|
||||
.cc_argv = &.{},
|
||||
.inherited = .{},
|
||||
.global = global_options,
|
||||
.builtin_mod = builtin_mod,
|
||||
.builtin_modules = null, // `builtin_mod` is specified
|
||||
});
|
||||
try main_mod.deps.put(arena, "@dependencies", deps_mod);
|
||||
return deps_mod;
|
||||
}
|
||||
214
lib/compiler/configure_runner.zig
Normal file
214
lib/compiler/configure_runner.zig
Normal file
@ -0,0 +1,214 @@
|
||||
const builtin = @import("builtin");
|
||||
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const fatal = std.process.fatal;
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Step = std.Build.Step;
|
||||
|
||||
pub const root = @import("@build");
|
||||
pub const dependencies = @import("@dependencies");
|
||||
|
||||
pub const std_options: std.Options = .{
|
||||
.side_channels_mitigations = .none,
|
||||
.http_disable_tls = true,
|
||||
.crypto_fork_safety = false,
|
||||
};
|
||||
|
||||
comptime {
|
||||
assert(builtin.single_threaded);
|
||||
}
|
||||
|
||||
pub fn main() !void {
|
||||
var single_threaded_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer single_threaded_arena.deinit();
|
||||
const arena = single_threaded_arena.allocator();
|
||||
|
||||
const args = try std.process.argsAlloc(arena);
|
||||
|
||||
// skip my own exe name
|
||||
var arg_idx: usize = 1;
|
||||
|
||||
const zig_exe = nextArg(args, &arg_idx) orelse fatal("missing zig compiler path", .{});
|
||||
const zig_lib_dir = nextArg(args, &arg_idx) orelse fatal("missing zig lib directory path", .{});
|
||||
const cache_root = nextArg(args, &arg_idx) orelse fatal("missing cache root directory path", .{});
|
||||
const global_cache_root = nextArg(args, &arg_idx) orelse fatal("missing global cache root directory path", .{});
|
||||
const build_root = nextArg(args, &arg_idx) orelse fatal("missing build root directory path", .{});
|
||||
|
||||
const zig_lib_directory: std.Build.Cache.Directory = .{
|
||||
.path = zig_lib_dir,
|
||||
.handle = try std.fs.cwd().openDir(zig_lib_dir, .{}),
|
||||
};
|
||||
|
||||
const build_root_directory: std.Build.Cache.Directory = .{
|
||||
.path = build_root,
|
||||
.handle = try std.fs.cwd().openDir(build_root, .{}),
|
||||
};
|
||||
|
||||
const local_cache_directory: std.Build.Cache.Directory = .{
|
||||
.path = cache_root,
|
||||
.handle = try std.fs.cwd().makeOpenPath(cache_root, .{}),
|
||||
};
|
||||
|
||||
const global_cache_directory: std.Build.Cache.Directory = .{
|
||||
.path = global_cache_root,
|
||||
.handle = try std.fs.cwd().makeOpenPath(global_cache_root, .{}),
|
||||
};
|
||||
|
||||
var graph: std.Build.Graph = .{
|
||||
.arena = arena,
|
||||
.cache = .{
|
||||
.gpa = arena,
|
||||
.manifest_dir = try local_cache_directory.handle.makeOpenPath("h", .{}),
|
||||
},
|
||||
.zig_exe = zig_exe,
|
||||
.env_map = try std.process.getEnvMap(arena),
|
||||
.global_cache_root = global_cache_directory,
|
||||
.zig_lib_directory = zig_lib_directory,
|
||||
.host = .{
|
||||
.query = .{},
|
||||
.result = try std.zig.system.resolveTargetQuery(.{}),
|
||||
},
|
||||
};
|
||||
|
||||
graph.cache.addPrefix(.{ .path = null, .handle = std.fs.cwd() });
|
||||
graph.cache.addPrefix(build_root_directory);
|
||||
graph.cache.addPrefix(local_cache_directory);
|
||||
graph.cache.addPrefix(global_cache_directory);
|
||||
graph.cache.hash.addBytes(builtin.zig_version_string);
|
||||
|
||||
const builder = try std.Build.create(
|
||||
&graph,
|
||||
build_root_directory,
|
||||
local_cache_directory,
|
||||
dependencies.root_deps,
|
||||
);
|
||||
|
||||
var install_prefix: ?std.Build.Cache.Path = null;
|
||||
var install_paths: std.Build.InstallPaths = .{};
|
||||
|
||||
while (nextArg(args, &arg_idx)) |arg| {
|
||||
if (mem.startsWith(u8, arg, "-D")) {
|
||||
const option_contents = arg[2..];
|
||||
if (option_contents.len == 0)
|
||||
fatal("expected option name after '-D'", .{});
|
||||
if (mem.indexOfScalar(u8, option_contents, '=')) |name_end| {
|
||||
const option_name = option_contents[0..name_end];
|
||||
const option_value = option_contents[name_end + 1 ..];
|
||||
if (try builder.addUserInputOption(option_name, option_value))
|
||||
fatal(" access the help menu with 'zig build -h'", .{});
|
||||
} else {
|
||||
if (try builder.addUserInputFlag(option_contents))
|
||||
fatal(" access the help menu with 'zig build -h'", .{});
|
||||
}
|
||||
} else if (mem.startsWith(u8, arg, "-")) {
|
||||
if (mem.eql(u8, arg, "-p") or mem.eql(u8, arg, "--prefix")) {
|
||||
install_prefix = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--prefix-lib-dir")) {
|
||||
install_paths.lib_dir = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--prefix-exe-dir")) {
|
||||
install_paths.exe_dir = nextArgOrFatal(args, &arg_idx);
|
||||
} else if (mem.eql(u8, arg, "--prefix-include-dir")) {
|
||||
install_paths.include_dir = nextArgOrFatal(args, &arg_idx);
|
||||
} else {
|
||||
fatal("unrecognized argument: '{s}'", .{arg});
|
||||
}
|
||||
} else {
|
||||
fatal("unrecognized argument: '{s}'", .{arg});
|
||||
}
|
||||
}
|
||||
|
||||
builder.resolveInstallPrefix(install_prefix, install_paths);
|
||||
try builder.runBuild(root);
|
||||
createModuleDependencies(builder) catch @panic("OOM");
|
||||
|
||||
try std.io.getStdOut().writeAll("TODO\n");
|
||||
}
|
||||
|
||||
fn nextArg(args: []const [:0]const u8, idx: *usize) ?[:0]const u8 {
|
||||
if (idx.* >= args.len) return null;
|
||||
defer idx.* += 1;
|
||||
return args[idx.*];
|
||||
}
|
||||
|
||||
fn nextArgOrFatal(args: []const [:0]const u8, idx: *usize) [:0]const u8 {
|
||||
return nextArg(args, idx) orelse fatal("expected argument after '{s}'", .{args[idx.* - 1]});
|
||||
}
|
||||
|
||||
/// Starting from all top-level steps in `b`, traverses the entire step graph
|
||||
/// and adds all step dependencies implied by module graphs.
|
||||
fn createModuleDependencies(b: *std.Build) Allocator.Error!void {
|
||||
const arena = b.graph.arena;
|
||||
|
||||
var all_steps: std.AutoArrayHashMapUnmanaged(*Step, void) = .empty;
|
||||
var next_step_idx: usize = 0;
|
||||
|
||||
try all_steps.ensureUnusedCapacity(arena, b.top_level_steps.count());
|
||||
for (b.top_level_steps.values()) |tls| {
|
||||
all_steps.putAssumeCapacityNoClobber(&tls.step, {});
|
||||
}
|
||||
|
||||
while (next_step_idx < all_steps.count()) {
|
||||
const step = all_steps.keys()[next_step_idx];
|
||||
next_step_idx += 1;
|
||||
|
||||
// Set up any implied dependencies for this step. It's important that we do this first, so
|
||||
// that the loop below discovers steps implied by the module graph.
|
||||
try createModuleDependenciesForStep(step);
|
||||
|
||||
try all_steps.ensureUnusedCapacity(arena, step.dependencies.items.len);
|
||||
for (step.dependencies.items) |other_step| {
|
||||
all_steps.putAssumeCapacity(other_step, {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// If the given `Step` is a `Step.Compile`, adds any dependencies for that step which
|
||||
/// are implied by the module graph rooted at `step.cast(Step.Compile).?.root_module`.
|
||||
fn createModuleDependenciesForStep(step: *Step) Allocator.Error!void {
|
||||
const root_module = if (step.cast(Step.Compile)) |cs| root: {
|
||||
break :root cs.root_module;
|
||||
} else return; // not a compile step so no module dependencies
|
||||
|
||||
// Starting from `root_module`, discover all modules in this graph.
|
||||
const modules = root_module.getGraph().modules;
|
||||
|
||||
// For each of those modules, set up the implied step dependencies.
|
||||
for (modules) |mod| {
|
||||
if (mod.root_source_file) |lp| lp.addStepDependencies(step);
|
||||
for (mod.include_dirs.items) |include_dir| switch (include_dir) {
|
||||
.path,
|
||||
.path_system,
|
||||
.path_after,
|
||||
.framework_path,
|
||||
.framework_path_system,
|
||||
=> |lp| lp.addStepDependencies(step),
|
||||
|
||||
.other_step => |other| {
|
||||
other.getEmittedIncludeTree().addStepDependencies(step);
|
||||
step.dependOn(&other.step);
|
||||
},
|
||||
|
||||
.config_header_step => |other| step.dependOn(&other.step),
|
||||
};
|
||||
for (mod.lib_paths.items) |lp| lp.addStepDependencies(step);
|
||||
for (mod.rpaths.items) |rpath| switch (rpath) {
|
||||
.lazy_path => |lp| lp.addStepDependencies(step),
|
||||
.special => {},
|
||||
};
|
||||
for (mod.link_objects.items) |link_object| switch (link_object) {
|
||||
.static_path,
|
||||
.assembly_file,
|
||||
=> |lp| lp.addStepDependencies(step),
|
||||
.other_step => |other| step.dependOn(&other.step),
|
||||
.system_lib => {},
|
||||
.c_source_file => |source| source.file.addStepDependencies(step),
|
||||
.c_source_files => |source_files| source_files.root.addStepDependencies(step),
|
||||
.win32_resource_file => |rc_source| {
|
||||
rc_source.file.addStepDependencies(step);
|
||||
for (rc_source.include_paths) |lp| lp.addStepDependencies(step);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
382
lib/compiler/fetch.zig
Normal file
382
lib/compiler/fetch.zig
Normal file
@ -0,0 +1,382 @@
|
||||
const builtin = @import("builtin");
|
||||
const native_os = builtin.os.tag;
|
||||
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const fs = std.fs;
|
||||
const process = std.process;
|
||||
const fatal = std.process.fatal;
|
||||
const Path = std.Build.Cache.Path;
|
||||
const Directory = std.Build.Cache.Directory;
|
||||
const Package = std.zig.Package;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const usage =
|
||||
\\Usage: zig fetch [options] <url>
|
||||
\\Usage: zig fetch [options] <path>
|
||||
\\
|
||||
\\ Copy a package into the global cache and print its hash.
|
||||
\\ <url> must point to one of the following:
|
||||
\\ - A git+http / git+https server for the package
|
||||
\\ - A tarball file (with or without compression) containing
|
||||
\\ package source
|
||||
\\ - A git bundle file containing package source
|
||||
\\
|
||||
\\Examples:
|
||||
\\
|
||||
\\ zig fetch --save git+https://example.com/andrewrk/fun-example-tool.git
|
||||
\\ zig fetch --save https://example.com/andrewrk/fun-example-tool/archive/refs/heads/master.tar.gz
|
||||
\\
|
||||
\\Options:
|
||||
\\ -h, --help Print this help and exit
|
||||
\\ --global-cache-dir [path] Override path to global Zig cache directory
|
||||
\\ --debug-hash Print verbose hash information to stdout
|
||||
\\ --save Add the fetched package to build.zig.zon
|
||||
\\ --save=[name] Add the fetched package to build.zig.zon as name
|
||||
\\ --save-exact Add the fetched package to build.zig.zon, storing the URL verbatim
|
||||
\\ --save-exact=[name] Add the fetched package to build.zig.zon as name, storing the URL verbatim
|
||||
\\
|
||||
;
|
||||
|
||||
pub const std_options: std.Options = .{
|
||||
.side_channels_mitigations = .none,
|
||||
.crypto_fork_safety = false,
|
||||
};
|
||||
|
||||
pub fn main() !void {
|
||||
var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena_instance.deinit();
|
||||
const arena = arena_instance.allocator();
|
||||
|
||||
const gpa = arena;
|
||||
|
||||
const args = try process.argsAlloc(arena);
|
||||
|
||||
var zig_lib_directory: Directory = .{
|
||||
.handle = try std.fs.cwd().openDir(args[1], .{}),
|
||||
};
|
||||
defer zig_lib_directory.handle.close();
|
||||
|
||||
var global_cache_directory: Directory = .{
|
||||
.handle = try std.fs.cwd().openDir(args[2], .{}),
|
||||
};
|
||||
defer global_cache_directory.handle.close();
|
||||
|
||||
const color: std.zig.Color = .auto;
|
||||
const work_around_btrfs_bug = native_os == .linux and std.zig.EnvVar.ZIG_BTRFS_WORKAROUND.isSet();
|
||||
var opt_path_or_url: ?[]const u8 = null;
|
||||
var debug_hash: bool = false;
|
||||
var save: union(enum) {
|
||||
no,
|
||||
yes: ?[]const u8,
|
||||
exact: ?[]const u8,
|
||||
} = .no;
|
||||
|
||||
{
|
||||
var i: usize = 3;
|
||||
while (i < args.len) : (i += 1) {
|
||||
const arg = args[i];
|
||||
if (mem.startsWith(u8, arg, "-")) {
|
||||
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
try stdout.writeAll(usage);
|
||||
return process.cleanExit();
|
||||
} else if (mem.eql(u8, arg, "--debug-hash")) {
|
||||
debug_hash = true;
|
||||
} else if (mem.eql(u8, arg, "--save")) {
|
||||
save = .{ .yes = null };
|
||||
} else if (mem.startsWith(u8, arg, "--save=")) {
|
||||
save = .{ .yes = arg["--save=".len..] };
|
||||
} else if (mem.eql(u8, arg, "--save-exact")) {
|
||||
save = .{ .exact = null };
|
||||
} else if (mem.startsWith(u8, arg, "--save-exact=")) {
|
||||
save = .{ .exact = arg["--save-exact=".len..] };
|
||||
} else {
|
||||
fatal("unrecognized parameter: '{s}'", .{arg});
|
||||
}
|
||||
} else if (opt_path_or_url != null) {
|
||||
fatal("unexpected extra parameter: '{s}'", .{arg});
|
||||
} else {
|
||||
opt_path_or_url = arg;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const path_or_url = opt_path_or_url orelse fatal("missing url or path parameter", .{});
|
||||
|
||||
var thread_pool: std.Thread.Pool = undefined;
|
||||
try thread_pool.init(.{ .allocator = gpa });
|
||||
defer thread_pool.deinit();
|
||||
|
||||
var http_client: std.http.Client = .{ .allocator = gpa };
|
||||
defer http_client.deinit();
|
||||
|
||||
try http_client.initDefaultProxies(arena);
|
||||
|
||||
var root_prog_node = std.Progress.start(.{
|
||||
.root_name = "Fetch",
|
||||
});
|
||||
defer root_prog_node.end();
|
||||
|
||||
var job_queue: Package.Fetch.JobQueue = .{
|
||||
.http_client = &http_client,
|
||||
.thread_pool = &thread_pool,
|
||||
.global_cache = global_cache_directory,
|
||||
.recursive = false,
|
||||
.read_only = false,
|
||||
.debug_hash = debug_hash,
|
||||
.work_around_btrfs_bug = work_around_btrfs_bug,
|
||||
};
|
||||
defer job_queue.deinit();
|
||||
|
||||
var fetch: Package.Fetch = .{
|
||||
.arena = std.heap.ArenaAllocator.init(gpa),
|
||||
.location = .{ .path_or_url = path_or_url },
|
||||
.location_tok = 0,
|
||||
.hash_tok = .none,
|
||||
.name_tok = 0,
|
||||
.lazy_status = .eager,
|
||||
.parent_package_root = undefined,
|
||||
.parent_manifest_ast = null,
|
||||
.prog_node = root_prog_node,
|
||||
.job_queue = &job_queue,
|
||||
.omit_missing_hash_error = true,
|
||||
.allow_missing_paths_field = false,
|
||||
.allow_missing_fingerprint = true,
|
||||
.allow_name_string = true,
|
||||
.use_latest_commit = true,
|
||||
|
||||
.package_root = undefined,
|
||||
.error_bundle = undefined,
|
||||
.manifest = null,
|
||||
.manifest_ast = undefined,
|
||||
.computed_hash = undefined,
|
||||
.has_build_zig = false,
|
||||
.oom_flag = false,
|
||||
.latest_commit = null,
|
||||
};
|
||||
defer fetch.deinit();
|
||||
|
||||
fetch.run() catch |err| switch (err) {
|
||||
error.OutOfMemory => fatal("out of memory", .{}),
|
||||
error.FetchFailed => {}, // error bundle checked below
|
||||
};
|
||||
|
||||
if (fetch.error_bundle.root_list.items.len > 0) {
|
||||
var errors = try fetch.error_bundle.toOwnedBundle("");
|
||||
errors.renderToStdErr(color.renderOptions());
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const package_hash = fetch.computedPackageHash();
|
||||
const package_hash_slice = package_hash.toSlice();
|
||||
|
||||
root_prog_node.end();
|
||||
root_prog_node = .{ .index = .none };
|
||||
|
||||
const name = switch (save) {
|
||||
.no => {
|
||||
try std.io.getStdOut().writer().print("{s}\n", .{package_hash_slice});
|
||||
return process.cleanExit();
|
||||
},
|
||||
.yes, .exact => |name| name: {
|
||||
if (name) |n| break :name n;
|
||||
const fetched_manifest = fetch.manifest orelse
|
||||
fatal("unable to determine name; fetched package has no build.zig.zon file", .{});
|
||||
break :name fetched_manifest.name;
|
||||
},
|
||||
};
|
||||
|
||||
const cwd_path = try process.getCwdAlloc(arena);
|
||||
|
||||
var build_root = try Package.findBuildRoot(arena, .{
|
||||
.cwd_path = cwd_path,
|
||||
});
|
||||
defer build_root.deinit();
|
||||
|
||||
// The name to use in case the manifest file needs to be created now.
|
||||
const init_root_name = std.fs.path.basename(build_root.directory.path orelse cwd_path);
|
||||
var manifest, var ast = try loadManifest(gpa, arena, zig_lib_directory, .{
|
||||
.root_name = try Package.sanitizeExampleName(arena, init_root_name),
|
||||
.dir = build_root.directory.handle,
|
||||
.color = color,
|
||||
});
|
||||
defer {
|
||||
manifest.deinit(gpa);
|
||||
ast.deinit(gpa);
|
||||
}
|
||||
|
||||
var fixups: std.zig.Ast.Fixups = .{};
|
||||
defer fixups.deinit(gpa);
|
||||
|
||||
var saved_path_or_url = path_or_url;
|
||||
|
||||
if (fetch.latest_commit) |latest_commit| resolved: {
|
||||
const latest_commit_hex = try std.fmt.allocPrint(arena, "{}", .{latest_commit});
|
||||
|
||||
var uri = try std.Uri.parse(path_or_url);
|
||||
|
||||
if (uri.fragment) |fragment| {
|
||||
const target_ref = try fragment.toRawMaybeAlloc(arena);
|
||||
|
||||
// the refspec may already be fully resolved
|
||||
if (std.mem.eql(u8, target_ref, latest_commit_hex)) break :resolved;
|
||||
|
||||
std.log.info("resolved ref '{s}' to commit {s}", .{ target_ref, latest_commit_hex });
|
||||
|
||||
// include the original refspec in a query parameter, could be used to check for updates
|
||||
uri.query = .{ .percent_encoded = try std.fmt.allocPrint(arena, "ref={%}", .{fragment}) };
|
||||
} else {
|
||||
std.log.info("resolved to commit {s}", .{latest_commit_hex});
|
||||
}
|
||||
|
||||
// replace the refspec with the resolved commit SHA
|
||||
uri.fragment = .{ .raw = latest_commit_hex };
|
||||
|
||||
switch (save) {
|
||||
.yes => saved_path_or_url = try std.fmt.allocPrint(arena, "{}", .{uri}),
|
||||
.no, .exact => {}, // keep the original URL
|
||||
}
|
||||
}
|
||||
|
||||
const new_node_init = try std.fmt.allocPrint(arena,
|
||||
\\.{{
|
||||
\\ .url = "{}",
|
||||
\\ .hash = "{}",
|
||||
\\ }}
|
||||
, .{
|
||||
std.zig.fmtEscapes(saved_path_or_url),
|
||||
std.zig.fmtEscapes(package_hash_slice),
|
||||
});
|
||||
|
||||
const new_node_text = try std.fmt.allocPrint(arena, ".{p_} = {s},\n", .{
|
||||
std.zig.fmtId(name), new_node_init,
|
||||
});
|
||||
|
||||
const dependencies_init = try std.fmt.allocPrint(arena, ".{{\n {s} }}", .{
|
||||
new_node_text,
|
||||
});
|
||||
|
||||
const dependencies_text = try std.fmt.allocPrint(arena, ".dependencies = {s},\n", .{
|
||||
dependencies_init,
|
||||
});
|
||||
|
||||
if (manifest.dependencies.get(name)) |dep| {
|
||||
if (dep.hash) |h| {
|
||||
switch (dep.location) {
|
||||
.url => |u| {
|
||||
if (mem.eql(u8, h, package_hash_slice) and mem.eql(u8, u, saved_path_or_url)) {
|
||||
std.log.info("existing dependency named '{s}' is up-to-date", .{name});
|
||||
process.exit(0);
|
||||
}
|
||||
},
|
||||
.path => {},
|
||||
}
|
||||
}
|
||||
|
||||
const location_replace = try std.fmt.allocPrint(
|
||||
arena,
|
||||
"\"{}\"",
|
||||
.{std.zig.fmtEscapes(saved_path_or_url)},
|
||||
);
|
||||
const hash_replace = try std.fmt.allocPrint(
|
||||
arena,
|
||||
"\"{}\"",
|
||||
.{std.zig.fmtEscapes(package_hash_slice)},
|
||||
);
|
||||
|
||||
std.log.warn("overwriting existing dependency named '{s}'", .{name});
|
||||
try fixups.replace_nodes_with_string.put(gpa, dep.location_node, location_replace);
|
||||
if (dep.hash_node.unwrap()) |hash_node| {
|
||||
try fixups.replace_nodes_with_string.put(gpa, hash_node, hash_replace);
|
||||
} else {
|
||||
// https://github.com/ziglang/zig/issues/21690
|
||||
}
|
||||
} else if (manifest.dependencies.count() > 0) {
|
||||
// Add fixup for adding another dependency.
|
||||
const deps = manifest.dependencies.values();
|
||||
const last_dep_node = deps[deps.len - 1].node;
|
||||
try fixups.append_string_after_node.put(gpa, last_dep_node, new_node_text);
|
||||
} else if (manifest.dependencies_node.unwrap()) |dependencies_node| {
|
||||
// Add fixup for replacing the entire dependencies struct.
|
||||
try fixups.replace_nodes_with_string.put(gpa, dependencies_node, dependencies_init);
|
||||
} else {
|
||||
// Add fixup for adding dependencies struct.
|
||||
try fixups.append_string_after_node.put(gpa, manifest.version_node, dependencies_text);
|
||||
}
|
||||
|
||||
var rendered = std.ArrayList(u8).init(gpa);
|
||||
defer rendered.deinit();
|
||||
try ast.renderToArrayList(&rendered, fixups);
|
||||
|
||||
build_root.directory.handle.writeFile(.{ .sub_path = Package.Manifest.basename, .data = rendered.items }) catch |err| {
|
||||
fatal("unable to write {s} file: {s}", .{ Package.Manifest.basename, @errorName(err) });
|
||||
};
|
||||
|
||||
return process.cleanExit();
|
||||
}
|
||||
|
||||
const LoadManifestOptions = struct {
|
||||
root_name: []const u8,
|
||||
dir: fs.Dir,
|
||||
color: std.zig.Color,
|
||||
};
|
||||
|
||||
fn loadManifest(
|
||||
gpa: Allocator,
|
||||
arena: Allocator,
|
||||
zig_lib_directory: Directory,
|
||||
options: LoadManifestOptions,
|
||||
) !struct { Package.Manifest, std.zig.Ast } {
|
||||
const manifest_bytes = while (true) {
|
||||
break options.dir.readFileAllocOptions(
|
||||
arena,
|
||||
Package.Manifest.basename,
|
||||
Package.Manifest.max_bytes,
|
||||
null,
|
||||
1,
|
||||
0,
|
||||
) catch |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
const fingerprint: Package.Fingerprint = .generate(options.root_name);
|
||||
var templates = Package.Templates.find(gpa, zig_lib_directory);
|
||||
defer templates.deinit(gpa);
|
||||
templates.write(arena, options.dir, options.root_name, Package.Manifest.basename, fingerprint) catch |e| {
|
||||
fatal("unable to write {s}: {s}", .{
|
||||
Package.Manifest.basename, @errorName(e),
|
||||
});
|
||||
};
|
||||
continue;
|
||||
},
|
||||
else => |e| fatal("unable to load {s}: {s}", .{
|
||||
Package.Manifest.basename, @errorName(e),
|
||||
}),
|
||||
};
|
||||
};
|
||||
var ast = try std.zig.Ast.parse(gpa, manifest_bytes, .zon);
|
||||
errdefer ast.deinit(gpa);
|
||||
|
||||
if (ast.errors.len > 0) {
|
||||
try std.zig.printAstErrorsToStderr(gpa, ast, Package.Manifest.basename, options.color);
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
var manifest = try Package.Manifest.parse(gpa, ast, .{});
|
||||
errdefer manifest.deinit(gpa);
|
||||
|
||||
if (manifest.errors.len > 0) {
|
||||
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
||||
try wip_errors.init(gpa);
|
||||
defer wip_errors.deinit();
|
||||
|
||||
const src_path = try wip_errors.addString(Package.Manifest.basename);
|
||||
try manifest.copyErrorsIntoBundle(ast, src_path, &wip_errors);
|
||||
|
||||
var error_bundle = try wip_errors.toOwnedBundle("");
|
||||
defer error_bundle.deinit(gpa);
|
||||
error_bundle.renderToStdErr(options.color.renderOptions());
|
||||
|
||||
process.exit(2);
|
||||
}
|
||||
return .{ manifest, ast };
|
||||
}
|
||||
@ -31,35 +31,17 @@ allocator: Allocator,
|
||||
user_input_options: UserInputOptionsMap,
|
||||
available_options_map: AvailableOptionsMap,
|
||||
available_options_list: ArrayList(AvailableOption),
|
||||
verbose: bool,
|
||||
verbose_link: bool,
|
||||
verbose_cc: bool,
|
||||
verbose_air: bool,
|
||||
verbose_llvm_ir: ?[]const u8,
|
||||
verbose_llvm_bc: ?[]const u8,
|
||||
verbose_cimport: bool,
|
||||
verbose_llvm_cpu_features: bool,
|
||||
reference_trace: ?u32 = null,
|
||||
invalid_user_input: bool,
|
||||
default_step: *Step,
|
||||
top_level_steps: std.StringArrayHashMapUnmanaged(*TopLevelStep),
|
||||
install_prefix: []const u8,
|
||||
dest_dir: ?[]const u8,
|
||||
lib_dir: []const u8,
|
||||
exe_dir: []const u8,
|
||||
h_dir: []const u8,
|
||||
install_path: []const u8,
|
||||
sysroot: ?[]const u8 = null,
|
||||
search_prefixes: std.ArrayListUnmanaged([]const u8),
|
||||
libc_file: ?[]const u8 = null,
|
||||
install_prefix: Cache.Path,
|
||||
install_lib_path: Cache.Path,
|
||||
install_exe_path: Cache.Path,
|
||||
install_include_path: Cache.Path,
|
||||
/// Path to the directory containing build.zig.
|
||||
build_root: Cache.Directory,
|
||||
cache_root: Cache.Directory,
|
||||
pkg_config_pkg_list: ?(PkgConfigError![]const PkgConfigPkg) = null,
|
||||
args: ?[]const []const u8 = null,
|
||||
debug_log_scopes: []const []const u8 = &.{},
|
||||
debug_compile_errors: bool = false,
|
||||
debug_pkg_config: bool = false,
|
||||
/// Number of stack frames captured when a `StackTrace` is recorded for debug purposes,
|
||||
/// in particular at `Step` creation.
|
||||
/// Set to 0 to disable stack collection.
|
||||
@ -75,11 +57,6 @@ enable_rosetta: bool = false,
|
||||
enable_wasmtime: bool = false,
|
||||
/// Use system Wine installation to run cross compiled Windows build artifacts.
|
||||
enable_wine: bool = false,
|
||||
/// After following the steps in https://github.com/ziglang/zig/wiki/Updating-libc#glibc,
|
||||
/// this will be the directory $glibc-build-dir/install/glibcs
|
||||
/// Given the example of the aarch64 target, this is the directory
|
||||
/// that contains the path `aarch64-linux-gnu/lib/ld-linux-aarch64.so.1`.
|
||||
glibc_runtimes_dir: ?[]const u8 = null,
|
||||
|
||||
dep_prefix: []const u8 = "",
|
||||
|
||||
@ -92,8 +69,6 @@ pkg_hash: []const u8,
|
||||
/// A mapping from dependency names to package hashes.
|
||||
available_deps: AvailableDeps,
|
||||
|
||||
release_mode: ReleaseMode,
|
||||
|
||||
pub const ReleaseMode = enum {
|
||||
off,
|
||||
any,
|
||||
@ -107,7 +82,7 @@ pub const ReleaseMode = enum {
|
||||
pub const Graph = struct {
|
||||
arena: Allocator,
|
||||
system_library_options: std.StringArrayHashMapUnmanaged(SystemLibraryMode) = .empty,
|
||||
system_package_mode: bool = false,
|
||||
system_package_mode: ?Cache.Directory = null,
|
||||
debug_compiler_runtime_libs: bool = false,
|
||||
cache: Cache,
|
||||
zig_exe: [:0]const u8,
|
||||
@ -121,6 +96,31 @@ pub const Graph = struct {
|
||||
random_seed: u32 = 0,
|
||||
dependency_cache: InitializedDepMap = .empty,
|
||||
allow_so_scripts: ?bool = null,
|
||||
|
||||
release_mode: ReleaseMode,
|
||||
sysroot: ?[]const u8 = null,
|
||||
search_prefixes: std.ArrayListUnmanaged([]const u8),
|
||||
libc_file: ?[]const u8 = null,
|
||||
debug_compile_errors: bool = false,
|
||||
/// After following the steps in https://github.com/ziglang/zig/wiki/Updating-libc#glibc,
|
||||
/// this will be the directory $glibc-build-dir/install/glibcs
|
||||
/// Given the example of the aarch64 target, this is the directory
|
||||
/// that contains the path `aarch64-linux-gnu/lib/ld-linux-aarch64.so.1`.
|
||||
glibc_runtimes_dir: ?[]const u8 = null,
|
||||
verbose: bool,
|
||||
verbose_link: bool,
|
||||
verbose_cc: bool,
|
||||
verbose_air: bool,
|
||||
verbose_llvm_ir: ?[]const u8,
|
||||
verbose_llvm_bc: ?[]const u8,
|
||||
verbose_cimport: bool,
|
||||
verbose_llvm_cpu_features: bool,
|
||||
reference_trace: ?u32 = null,
|
||||
debug_log_scopes: []const []const u8 = &.{},
|
||||
|
||||
pub fn addSearchPrefix(b: *Build, search_prefix: []const u8) void {
|
||||
b.search_prefixes.append(b.allocator, b.dupePath(search_prefix)) catch @panic("OOM");
|
||||
}
|
||||
};
|
||||
|
||||
const AvailableDeps = []const struct { []const u8, []const u8 };
|
||||
@ -239,10 +239,10 @@ const TopLevelStep = struct {
|
||||
description: []const u8,
|
||||
};
|
||||
|
||||
pub const DirList = struct {
|
||||
lib_dir: ?[]const u8 = null,
|
||||
exe_dir: ?[]const u8 = null,
|
||||
include_dir: ?[]const u8 = null,
|
||||
pub const InstallPaths = struct {
|
||||
lib_path: ?Cache.Path = null,
|
||||
exe_path: ?Cache.Path = null,
|
||||
include_path: ?Cache.Path = null,
|
||||
};
|
||||
|
||||
pub fn create(
|
||||
@ -259,13 +259,6 @@ pub fn create(
|
||||
.build_root = build_root,
|
||||
.cache_root = cache_root,
|
||||
.verbose = false,
|
||||
.verbose_link = false,
|
||||
.verbose_cc = false,
|
||||
.verbose_air = false,
|
||||
.verbose_llvm_ir = null,
|
||||
.verbose_llvm_bc = null,
|
||||
.verbose_cimport = false,
|
||||
.verbose_llvm_cpu_features = false,
|
||||
.invalid_user_input = false,
|
||||
.allocator = arena,
|
||||
.user_input_options = UserInputOptionsMap.init(arena),
|
||||
@ -273,12 +266,10 @@ pub fn create(
|
||||
.available_options_list = ArrayList(AvailableOption).init(arena),
|
||||
.top_level_steps = .{},
|
||||
.default_step = undefined,
|
||||
.search_prefixes = .{},
|
||||
.install_prefix = undefined,
|
||||
.lib_dir = undefined,
|
||||
.exe_dir = undefined,
|
||||
.h_dir = undefined,
|
||||
.dest_dir = graph.env_map.get("DESTDIR"),
|
||||
.install_tls = .{
|
||||
.step = Step.init(.{
|
||||
.id = TopLevelStep.base_id,
|
||||
@ -297,7 +288,6 @@ pub fn create(
|
||||
.description = "Remove build artifacts from prefix path",
|
||||
},
|
||||
.install_path = undefined,
|
||||
.args = null,
|
||||
.modules = .init(arena),
|
||||
.named_writefiles = .init(arena),
|
||||
.named_lazy_paths = .init(arena),
|
||||
@ -358,37 +348,22 @@ fn createChildOnly(
|
||||
.available_options_map = AvailableOptionsMap.init(allocator),
|
||||
.available_options_list = ArrayList(AvailableOption).init(allocator),
|
||||
.verbose = parent.verbose,
|
||||
.verbose_link = parent.verbose_link,
|
||||
.verbose_cc = parent.verbose_cc,
|
||||
.verbose_air = parent.verbose_air,
|
||||
.verbose_llvm_ir = parent.verbose_llvm_ir,
|
||||
.verbose_llvm_bc = parent.verbose_llvm_bc,
|
||||
.verbose_cimport = parent.verbose_cimport,
|
||||
.verbose_llvm_cpu_features = parent.verbose_llvm_cpu_features,
|
||||
.reference_trace = parent.reference_trace,
|
||||
.invalid_user_input = false,
|
||||
.default_step = undefined,
|
||||
.top_level_steps = .{},
|
||||
.install_prefix = undefined,
|
||||
.dest_dir = parent.dest_dir,
|
||||
.lib_dir = parent.lib_dir,
|
||||
.exe_dir = parent.exe_dir,
|
||||
.h_dir = parent.h_dir,
|
||||
.install_path = parent.install_path,
|
||||
.sysroot = parent.sysroot,
|
||||
.search_prefixes = parent.search_prefixes,
|
||||
.libc_file = parent.libc_file,
|
||||
.build_root = build_root,
|
||||
.cache_root = parent.cache_root,
|
||||
.debug_log_scopes = parent.debug_log_scopes,
|
||||
.debug_compile_errors = parent.debug_compile_errors,
|
||||
.debug_pkg_config = parent.debug_pkg_config,
|
||||
.enable_darling = parent.enable_darling,
|
||||
.enable_qemu = parent.enable_qemu,
|
||||
.enable_rosetta = parent.enable_rosetta,
|
||||
.enable_wasmtime = parent.enable_wasmtime,
|
||||
.enable_wine = parent.enable_wine,
|
||||
.glibc_runtimes_dir = parent.glibc_runtimes_dir,
|
||||
.dep_prefix = parent.fmt("{s}{s}.", .{ parent.dep_prefix, dep_name }),
|
||||
.modules = .init(allocator),
|
||||
.named_writefiles = .init(allocator),
|
||||
@ -638,42 +613,16 @@ fn determineAndApplyInstallPrefix(b: *Build) error{OutOfMemory}!void {
|
||||
|
||||
const digest = hash.final();
|
||||
const install_prefix = try b.cache_root.join(b.allocator, &.{ "i", &digest });
|
||||
b.resolveInstallPrefix(install_prefix, .{});
|
||||
try b.resolveInstallPrefix(install_prefix, .{});
|
||||
}
|
||||
|
||||
/// This function is intended to be called by lib/build_runner.zig, not a build.zig file.
|
||||
pub fn resolveInstallPrefix(b: *Build, install_prefix: ?[]const u8, dir_list: DirList) void {
|
||||
if (b.dest_dir) |dest_dir| {
|
||||
b.install_prefix = install_prefix orelse "/usr";
|
||||
b.install_path = b.pathJoin(&.{ dest_dir, b.install_prefix });
|
||||
} else {
|
||||
b.install_prefix = install_prefix orelse
|
||||
(b.build_root.join(b.allocator, &.{"zig-out"}) catch @panic("unhandled error"));
|
||||
b.install_path = b.install_prefix;
|
||||
}
|
||||
fn resolveInstallPrefix(b: *Build, install_prefix: Cache.Path, paths: InstallPaths) !void {
|
||||
const arena = b.allocator;
|
||||
|
||||
var lib_list = [_][]const u8{ b.install_path, "lib" };
|
||||
var exe_list = [_][]const u8{ b.install_path, "bin" };
|
||||
var h_list = [_][]const u8{ b.install_path, "include" };
|
||||
|
||||
if (dir_list.lib_dir) |dir| {
|
||||
if (fs.path.isAbsolute(dir)) lib_list[0] = b.dest_dir orelse "";
|
||||
lib_list[1] = dir;
|
||||
}
|
||||
|
||||
if (dir_list.exe_dir) |dir| {
|
||||
if (fs.path.isAbsolute(dir)) exe_list[0] = b.dest_dir orelse "";
|
||||
exe_list[1] = dir;
|
||||
}
|
||||
|
||||
if (dir_list.include_dir) |dir| {
|
||||
if (fs.path.isAbsolute(dir)) h_list[0] = b.dest_dir orelse "";
|
||||
h_list[1] = dir;
|
||||
}
|
||||
|
||||
b.lib_dir = b.pathJoin(&lib_list);
|
||||
b.exe_dir = b.pathJoin(&exe_list);
|
||||
b.h_dir = b.pathJoin(&h_list);
|
||||
b.install_prefix = install_prefix;
|
||||
b.install_lib_path = paths.lib_path orelse try install_prefix.join(arena, "lib");
|
||||
b.install_exe_path = paths.exe_path orelse try install_prefix.join(arena, "bin");
|
||||
b.install_include_path = paths.include_path orelse try install_prefix.join(arena, "include");
|
||||
}
|
||||
|
||||
/// Create a set of key-value pairs that can be converted into a Zig source
|
||||
@ -1990,38 +1939,6 @@ fn tryFindProgram(b: *Build, full_path: []const u8) ?[]const u8 {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn findProgram(b: *Build, names: []const []const u8, paths: []const []const u8) error{FileNotFound}![]const u8 {
|
||||
// TODO report error for ambiguous situations
|
||||
for (b.search_prefixes.items) |search_prefix| {
|
||||
for (names) |name| {
|
||||
if (fs.path.isAbsolute(name)) {
|
||||
return name;
|
||||
}
|
||||
return tryFindProgram(b, b.pathJoin(&.{ search_prefix, "bin", name })) orelse continue;
|
||||
}
|
||||
}
|
||||
if (b.graph.env_map.get("PATH")) |PATH| {
|
||||
for (names) |name| {
|
||||
if (fs.path.isAbsolute(name)) {
|
||||
return name;
|
||||
}
|
||||
var it = mem.tokenizeScalar(u8, PATH, fs.path.delimiter);
|
||||
while (it.next()) |p| {
|
||||
return tryFindProgram(b, b.pathJoin(&.{ p, name })) orelse continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (names) |name| {
|
||||
if (fs.path.isAbsolute(name)) {
|
||||
return name;
|
||||
}
|
||||
for (paths) |p| {
|
||||
return tryFindProgram(b, b.pathJoin(&.{ p, name })) orelse continue;
|
||||
}
|
||||
}
|
||||
return error.FileNotFound;
|
||||
}
|
||||
|
||||
pub fn runAllowFail(
|
||||
b: *Build,
|
||||
argv: []const []const u8,
|
||||
@ -2085,10 +2002,6 @@ pub fn run(b: *Build, argv: []const []const u8) []u8 {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn addSearchPrefix(b: *Build, search_prefix: []const u8) void {
|
||||
b.search_prefixes.append(b.allocator, b.dupePath(search_prefix)) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn getInstallPath(b: *Build, dir: InstallDir, dest_rel_path: []const u8) []const u8 {
|
||||
assert(!fs.path.isAbsolute(dest_rel_path)); // Install paths must be relative to the prefix
|
||||
const base_dir = switch (dir) {
|
||||
|
||||
@ -25,6 +25,7 @@ pub const WindowsSdk = @import("zig/WindowsSdk.zig");
|
||||
pub const LibCDirs = @import("zig/LibCDirs.zig");
|
||||
pub const target = @import("zig/target.zig");
|
||||
pub const llvm = @import("zig/llvm.zig");
|
||||
pub const Package = @import("zig/Package.zig");
|
||||
|
||||
// Character literal parsing
|
||||
pub const ParsedCharLiteral = string_literal.ParsedCharLiteral;
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
pub const Module = @import("Package/Module.zig");
|
||||
pub const Fetch = @import("Package/Fetch.zig");
|
||||
pub const build_zig_basename = "build.zig";
|
||||
pub const Manifest = @import("Package/Manifest.zig");
|
||||
@ -195,6 +195,113 @@ test Hash {
|
||||
try std.testing.expectEqualStrings("nasm-2.16.1-3-vrr-ygAAoADH9XG3tOdvPNuHen_d-XeHndOG-nNXmved", result.toSlice());
|
||||
}
|
||||
|
||||
pub fn sanitizeExampleName(arena: Allocator, bytes: []const u8) error{OutOfMemory}![]const u8 {
|
||||
var result: std.ArrayListUnmanaged(u8) = .empty;
|
||||
for (bytes, 0..) |byte, i| switch (byte) {
|
||||
'0'...'9' => {
|
||||
if (i == 0) try result.append(arena, '_');
|
||||
try result.append(arena, byte);
|
||||
},
|
||||
'_', 'a'...'z', 'A'...'Z' => try result.append(arena, byte),
|
||||
'-', '.', ' ' => try result.append(arena, '_'),
|
||||
else => continue,
|
||||
};
|
||||
if (!std.zig.isValidId(result.items)) return "foo";
|
||||
if (result.items.len > Manifest.max_name_len)
|
||||
result.shrinkRetainingCapacity(Manifest.max_name_len);
|
||||
|
||||
return result.toOwnedSlice(arena);
|
||||
}
|
||||
|
||||
test sanitizeExampleName {
|
||||
var arena_instance = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_instance.deinit();
|
||||
const arena = arena_instance.allocator();
|
||||
|
||||
try std.testing.expectEqualStrings("foo_bar", try sanitizeExampleName(arena, "foo bar+"));
|
||||
try std.testing.expectEqualStrings("foo", try sanitizeExampleName(arena, ""));
|
||||
try std.testing.expectEqualStrings("foo", try sanitizeExampleName(arena, "!"));
|
||||
try std.testing.expectEqualStrings("a", try sanitizeExampleName(arena, "!a"));
|
||||
try std.testing.expectEqualStrings("a_b", try sanitizeExampleName(arena, "a.b!"));
|
||||
try std.testing.expectEqualStrings("_01234", try sanitizeExampleName(arena, "01234"));
|
||||
try std.testing.expectEqualStrings("foo", try sanitizeExampleName(arena, "error"));
|
||||
try std.testing.expectEqualStrings("foo", try sanitizeExampleName(arena, "test"));
|
||||
try std.testing.expectEqualStrings("tests", try sanitizeExampleName(arena, "tests"));
|
||||
try std.testing.expectEqualStrings("test_project", try sanitizeExampleName(arena, "test project"));
|
||||
}
|
||||
|
||||
pub const BuildRoot = struct {
|
||||
directory: std.Build.Cache.Directory,
|
||||
build_zig_basename: []const u8,
|
||||
cleanup_build_dir: ?std.fs.Dir,
|
||||
|
||||
fn deinit(br: *BuildRoot) void {
|
||||
if (br.cleanup_build_dir) |*dir| dir.close();
|
||||
br.* = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
pub const FindBuildRootOptions = struct {
|
||||
build_file: ?[]const u8 = null,
|
||||
cwd_path: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
pub fn findBuildRoot(arena: Allocator, options: FindBuildRootOptions) !BuildRoot {
|
||||
const cwd_path = options.cwd_path orelse try std.process.getCwdAlloc(arena);
|
||||
const basename = if (options.build_file) |bf| std.fs.path.basename(bf) else build_zig_basename;
|
||||
|
||||
if (options.build_file) |bf| {
|
||||
if (std.fs.path.dirname(bf)) |dirname| {
|
||||
const dir = std.fs.cwd().openDir(dirname, .{}) catch |err| {
|
||||
std.process.fatal("unable to open directory to build file from argument 'build-file', '{s}': {s}", .{ dirname, @errorName(err) });
|
||||
};
|
||||
return .{
|
||||
.build_zig_basename = basename,
|
||||
.directory = .{ .path = dirname, .handle = dir },
|
||||
.cleanup_build_dir = dir,
|
||||
};
|
||||
}
|
||||
|
||||
return .{
|
||||
.build_zig_basename = basename,
|
||||
.directory = .{ .path = null, .handle = std.fs.cwd() },
|
||||
.cleanup_build_dir = null,
|
||||
};
|
||||
}
|
||||
// Search up parent directories until we find build.zig.
|
||||
var dirname: []const u8 = cwd_path;
|
||||
while (true) {
|
||||
const joined_path = try std.fs.path.join(arena, &[_][]const u8{ dirname, basename });
|
||||
if (std.fs.cwd().access(joined_path, .{})) |_| {
|
||||
const dir = std.fs.cwd().openDir(dirname, .{}) catch |err| {
|
||||
std.process.fatal("unable to open directory while searching for {s} file, '{s}': {s}", .{
|
||||
basename, dirname, @errorName(err),
|
||||
});
|
||||
};
|
||||
return .{
|
||||
.build_zig_basename = basename,
|
||||
.directory = .{
|
||||
.path = dirname,
|
||||
.handle = dir,
|
||||
},
|
||||
.cleanup_build_dir = dir,
|
||||
};
|
||||
} else |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
dirname = std.fs.path.dirname(dirname) orelse {
|
||||
std.log.info("initialize {s} template file with 'zig init'", .{basename});
|
||||
std.log.info("see 'zig --help' for more options", .{});
|
||||
std.process.fatal("no {s} file found, in the current directory or any parent directories", .{
|
||||
basename,
|
||||
});
|
||||
};
|
||||
continue;
|
||||
},
|
||||
else => |e| return e,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
_ = Fetch;
|
||||
}
|
||||
@ -68,11 +68,7 @@ oom_flag: bool,
|
||||
/// will be left unchanged.
|
||||
latest_commit: ?git.Oid,
|
||||
|
||||
// This field is used by the CLI only, untouched by this file.
|
||||
|
||||
/// The module for this `Fetch` tasks's package, which exposes `build.zig` as
|
||||
/// the root source file.
|
||||
module: ?*Package.Module,
|
||||
userdata: ?*anyopaque = null,
|
||||
|
||||
pub const LazyStatus = enum {
|
||||
/// Not lazy.
|
||||
@ -773,8 +769,6 @@ fn queueJobsForDeps(f: *Fetch) RunError!void {
|
||||
.has_build_zig = false,
|
||||
.oom_flag = false,
|
||||
.latest_commit = null,
|
||||
|
||||
.module = null,
|
||||
};
|
||||
}
|
||||
|
||||
@ -2352,8 +2346,6 @@ const TestFetchBuilder = struct {
|
||||
.has_build_zig = false,
|
||||
.oom_flag = false,
|
||||
.latest_commit = null,
|
||||
|
||||
.module = null,
|
||||
};
|
||||
return &self.fetch;
|
||||
}
|
||||
90
lib/std/zig/Package/Templates.zig
Normal file
90
lib/std/zig/Package/Templates.zig
Normal file
@ -0,0 +1,90 @@
|
||||
const std = @import("../../std.zig");
|
||||
const Directory = std.Build.Cache.Directory;
|
||||
const fs = std.fs;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const fatal = std.process.fatal;
|
||||
|
||||
const Templates = @This();
|
||||
|
||||
zig_lib_directory: Directory,
|
||||
dir: fs.Dir,
|
||||
buffer: std.ArrayListUnmanaged(u8),
|
||||
|
||||
fn find(gpa: Allocator, zig_lib_directory: Directory) Templates {
|
||||
const s = fs.path.sep_str;
|
||||
const template_sub_path = "init";
|
||||
const template_dir = zig_lib_directory.handle.openDir(template_sub_path, .{}) catch |err| {
|
||||
const path = zig_lib_directory.path orelse ".";
|
||||
fatal("unable to open zig project template directory '{s}{s}{s}': {s}", .{
|
||||
path, s, template_sub_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
|
||||
return .{
|
||||
.zig_lib_directory = zig_lib_directory,
|
||||
.dir = template_dir,
|
||||
.buffer = std.ArrayListUnmanaged(u8).init(gpa),
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(templates: *Templates, gpa: Allocator) void {
|
||||
templates.zig_lib_directory.handle.close();
|
||||
templates.dir.close();
|
||||
templates.buffer.deinit(gpa);
|
||||
templates.* = undefined;
|
||||
}
|
||||
|
||||
fn write(
|
||||
templates: *Templates,
|
||||
gpa: Allocator,
|
||||
out_dir: fs.Dir,
|
||||
root_name: []const u8,
|
||||
template_path: []const u8,
|
||||
fingerprint: std.zig.Package.Fingerprint,
|
||||
zig_version_string: []const u8,
|
||||
) !void {
|
||||
if (fs.path.dirname(template_path)) |dirname| {
|
||||
out_dir.makePath(dirname) catch |err| {
|
||||
fatal("unable to make path '{s}': {s}", .{ dirname, @errorName(err) });
|
||||
};
|
||||
}
|
||||
|
||||
const max_bytes = 10 * 1024 * 1024;
|
||||
const contents = templates.dir.readFileAlloc(gpa, template_path, max_bytes) catch |err| {
|
||||
fatal("unable to read template file '{s}': {s}", .{ template_path, @errorName(err) });
|
||||
};
|
||||
defer gpa.free(contents);
|
||||
templates.buffer.clearRetainingCapacity();
|
||||
try templates.buffer.ensureUnusedCapacity(gpa, contents.len);
|
||||
var i: usize = 0;
|
||||
while (i < contents.len) {
|
||||
if (contents[i] == '.') {
|
||||
if (std.mem.startsWith(u8, contents[i..], ".LITNAME")) {
|
||||
try templates.buffer.append(gpa, '.');
|
||||
try templates.buffer.appendSlice(gpa, root_name);
|
||||
i += ".LITNAME".len;
|
||||
continue;
|
||||
} else if (std.mem.startsWith(u8, contents[i..], ".NAME")) {
|
||||
try templates.buffer.appendSlice(gpa, root_name);
|
||||
i += ".NAME".len;
|
||||
continue;
|
||||
} else if (std.mem.startsWith(u8, contents[i..], ".FINGERPRINT")) {
|
||||
try templates.buffer.writer(gpa).print("0x{x}", .{fingerprint.int()});
|
||||
i += ".FINGERPRINT".len;
|
||||
continue;
|
||||
} else if (std.mem.startsWith(u8, contents[i..], ".ZIGVER")) {
|
||||
try templates.buffer.appendSlice(gpa, zig_version_string);
|
||||
i += ".ZIGVER".len;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
try templates.buffer.append(gpa, contents[i]);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return out_dir.writeFile(.{
|
||||
.sub_path = template_path,
|
||||
.data = templates.buffer.items,
|
||||
.flags = .{ .exclusive = true },
|
||||
});
|
||||
}
|
||||
@ -54,7 +54,6 @@ pub const Env = enum {
|
||||
.test_command,
|
||||
.run_command,
|
||||
.ar_command,
|
||||
.build_command,
|
||||
.clang_command,
|
||||
.stdio_listen,
|
||||
.build_import_lib,
|
||||
@ -87,7 +86,6 @@ pub const Env = enum {
|
||||
.translate_c_command,
|
||||
.fmt_command,
|
||||
.jit_command,
|
||||
.fetch_command,
|
||||
.init_command,
|
||||
.targets_command,
|
||||
.version_command,
|
||||
@ -135,7 +133,6 @@ pub const Env = enum {
|
||||
else => Env.ast_gen.supports(feature),
|
||||
},
|
||||
.@"x86_64-linux" => switch (feature) {
|
||||
.build_command,
|
||||
.stdio_listen,
|
||||
.incremental,
|
||||
.x86_64_backend,
|
||||
@ -178,13 +175,11 @@ pub const Feature = enum {
|
||||
test_command,
|
||||
run_command,
|
||||
ar_command,
|
||||
build_command,
|
||||
clang_command,
|
||||
cc_command,
|
||||
translate_c_command,
|
||||
fmt_command,
|
||||
jit_command,
|
||||
fetch_command,
|
||||
init_command,
|
||||
targets_command,
|
||||
version_command,
|
||||
|
||||
1399
src/main.zig
1399
src/main.zig
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user