mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
get zig fetch working with the new system
* start renaming "package" to "module" (see #14307) - build system gains `main_mod_path` and `main_pkg_path` is still there but it is deprecated. * eliminate the object-oriented memory management style of what was previously `*Package`. Now it is `*Package.Module` and all pointers point to externally managed memory. * fixes to get the new Fetch.zig code working. The previous commit was work-in-progress. There are still two commented out code paths, the one that leads to `Compilation.create` and the one for `zig build` that fetches the entire dependency tree and creates the required modules for the build runner.
This commit is contained in:
parent
88bbec8f9b
commit
d0bcc390e8
@ -88,7 +88,7 @@ pub fn build(b: *std.Build) !void {
|
|||||||
.name = "check-case",
|
.name = "check-case",
|
||||||
.root_source_file = .{ .path = "test/src/Cases.zig" },
|
.root_source_file = .{ .path = "test/src/Cases.zig" },
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
.main_pkg_path = .{ .path = "." },
|
.main_mod_path = .{ .path = "." },
|
||||||
});
|
});
|
||||||
check_case_exe.stack_size = stack_size;
|
check_case_exe.stack_size = stack_size;
|
||||||
check_case_exe.single_threaded = single_threaded;
|
check_case_exe.single_threaded = single_threaded;
|
||||||
|
|||||||
@ -634,6 +634,9 @@ pub const ExecutableOptions = struct {
|
|||||||
use_llvm: ?bool = null,
|
use_llvm: ?bool = null,
|
||||||
use_lld: ?bool = null,
|
use_lld: ?bool = null,
|
||||||
zig_lib_dir: ?LazyPath = null,
|
zig_lib_dir: ?LazyPath = null,
|
||||||
|
main_mod_path: ?LazyPath = null,
|
||||||
|
|
||||||
|
/// Deprecated; use `main_mod_path`.
|
||||||
main_pkg_path: ?LazyPath = null,
|
main_pkg_path: ?LazyPath = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -652,7 +655,7 @@ pub fn addExecutable(b: *Build, options: ExecutableOptions) *Step.Compile {
|
|||||||
.use_llvm = options.use_llvm,
|
.use_llvm = options.use_llvm,
|
||||||
.use_lld = options.use_lld,
|
.use_lld = options.use_lld,
|
||||||
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
||||||
.main_pkg_path = options.main_pkg_path,
|
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -667,6 +670,9 @@ pub const ObjectOptions = struct {
|
|||||||
use_llvm: ?bool = null,
|
use_llvm: ?bool = null,
|
||||||
use_lld: ?bool = null,
|
use_lld: ?bool = null,
|
||||||
zig_lib_dir: ?LazyPath = null,
|
zig_lib_dir: ?LazyPath = null,
|
||||||
|
main_mod_path: ?LazyPath = null,
|
||||||
|
|
||||||
|
/// Deprecated; use `main_mod_path`.
|
||||||
main_pkg_path: ?LazyPath = null,
|
main_pkg_path: ?LazyPath = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -683,7 +689,7 @@ pub fn addObject(b: *Build, options: ObjectOptions) *Step.Compile {
|
|||||||
.use_llvm = options.use_llvm,
|
.use_llvm = options.use_llvm,
|
||||||
.use_lld = options.use_lld,
|
.use_lld = options.use_lld,
|
||||||
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
||||||
.main_pkg_path = options.main_pkg_path,
|
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -699,6 +705,9 @@ pub const SharedLibraryOptions = struct {
|
|||||||
use_llvm: ?bool = null,
|
use_llvm: ?bool = null,
|
||||||
use_lld: ?bool = null,
|
use_lld: ?bool = null,
|
||||||
zig_lib_dir: ?LazyPath = null,
|
zig_lib_dir: ?LazyPath = null,
|
||||||
|
main_mod_path: ?LazyPath = null,
|
||||||
|
|
||||||
|
/// Deprecated; use `main_mod_path`.
|
||||||
main_pkg_path: ?LazyPath = null,
|
main_pkg_path: ?LazyPath = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -717,7 +726,7 @@ pub fn addSharedLibrary(b: *Build, options: SharedLibraryOptions) *Step.Compile
|
|||||||
.use_llvm = options.use_llvm,
|
.use_llvm = options.use_llvm,
|
||||||
.use_lld = options.use_lld,
|
.use_lld = options.use_lld,
|
||||||
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
||||||
.main_pkg_path = options.main_pkg_path,
|
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -733,6 +742,9 @@ pub const StaticLibraryOptions = struct {
|
|||||||
use_llvm: ?bool = null,
|
use_llvm: ?bool = null,
|
||||||
use_lld: ?bool = null,
|
use_lld: ?bool = null,
|
||||||
zig_lib_dir: ?LazyPath = null,
|
zig_lib_dir: ?LazyPath = null,
|
||||||
|
main_mod_path: ?LazyPath = null,
|
||||||
|
|
||||||
|
/// Deprecated; use `main_mod_path`.
|
||||||
main_pkg_path: ?LazyPath = null,
|
main_pkg_path: ?LazyPath = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -751,7 +763,7 @@ pub fn addStaticLibrary(b: *Build, options: StaticLibraryOptions) *Step.Compile
|
|||||||
.use_llvm = options.use_llvm,
|
.use_llvm = options.use_llvm,
|
||||||
.use_lld = options.use_lld,
|
.use_lld = options.use_lld,
|
||||||
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
||||||
.main_pkg_path = options.main_pkg_path,
|
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -769,6 +781,9 @@ pub const TestOptions = struct {
|
|||||||
use_llvm: ?bool = null,
|
use_llvm: ?bool = null,
|
||||||
use_lld: ?bool = null,
|
use_lld: ?bool = null,
|
||||||
zig_lib_dir: ?LazyPath = null,
|
zig_lib_dir: ?LazyPath = null,
|
||||||
|
main_mod_path: ?LazyPath = null,
|
||||||
|
|
||||||
|
/// Deprecated; use `main_mod_path`.
|
||||||
main_pkg_path: ?LazyPath = null,
|
main_pkg_path: ?LazyPath = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -787,7 +802,7 @@ pub fn addTest(b: *Build, options: TestOptions) *Step.Compile {
|
|||||||
.use_llvm = options.use_llvm,
|
.use_llvm = options.use_llvm,
|
||||||
.use_lld = options.use_lld,
|
.use_lld = options.use_lld,
|
||||||
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
|
||||||
.main_pkg_path = options.main_pkg_path,
|
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -9,6 +9,13 @@ pub const Directory = struct {
|
|||||||
path: ?[]const u8,
|
path: ?[]const u8,
|
||||||
handle: fs.Dir,
|
handle: fs.Dir,
|
||||||
|
|
||||||
|
pub fn cwd() Directory {
|
||||||
|
return .{
|
||||||
|
.path = null,
|
||||||
|
.handle = fs.cwd(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn join(self: Directory, allocator: Allocator, paths: []const []const u8) ![]u8 {
|
pub fn join(self: Directory, allocator: Allocator, paths: []const []const u8) ![]u8 {
|
||||||
if (self.path) |p| {
|
if (self.path) |p| {
|
||||||
// TODO clean way to do this with only 1 allocation
|
// TODO clean way to do this with only 1 allocation
|
||||||
@ -53,6 +60,10 @@ pub const Directory = struct {
|
|||||||
try writer.writeAll(fs.path.sep_str);
|
try writer.writeAll(fs.path.sep_str);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn eql(self: Directory, other: Directory) bool {
|
||||||
|
return self.handle.fd == other.handle.fd;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
gpa: Allocator,
|
gpa: Allocator,
|
||||||
|
|||||||
@ -68,7 +68,7 @@ c_std: std.Build.CStd,
|
|||||||
/// Set via options; intended to be read-only after that.
|
/// Set via options; intended to be read-only after that.
|
||||||
zig_lib_dir: ?LazyPath,
|
zig_lib_dir: ?LazyPath,
|
||||||
/// Set via options; intended to be read-only after that.
|
/// Set via options; intended to be read-only after that.
|
||||||
main_pkg_path: ?LazyPath,
|
main_mod_path: ?LazyPath,
|
||||||
exec_cmd_args: ?[]const ?[]const u8,
|
exec_cmd_args: ?[]const ?[]const u8,
|
||||||
filter: ?[]const u8,
|
filter: ?[]const u8,
|
||||||
test_evented_io: bool = false,
|
test_evented_io: bool = false,
|
||||||
@ -316,6 +316,9 @@ pub const Options = struct {
|
|||||||
use_llvm: ?bool = null,
|
use_llvm: ?bool = null,
|
||||||
use_lld: ?bool = null,
|
use_lld: ?bool = null,
|
||||||
zig_lib_dir: ?LazyPath = null,
|
zig_lib_dir: ?LazyPath = null,
|
||||||
|
main_mod_path: ?LazyPath = null,
|
||||||
|
|
||||||
|
/// deprecated; use `main_mod_path`.
|
||||||
main_pkg_path: ?LazyPath = null,
|
main_pkg_path: ?LazyPath = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -480,7 +483,7 @@ pub fn create(owner: *std.Build, options: Options) *Compile {
|
|||||||
.installed_headers = ArrayList(*Step).init(owner.allocator),
|
.installed_headers = ArrayList(*Step).init(owner.allocator),
|
||||||
.c_std = std.Build.CStd.C99,
|
.c_std = std.Build.CStd.C99,
|
||||||
.zig_lib_dir = null,
|
.zig_lib_dir = null,
|
||||||
.main_pkg_path = null,
|
.main_mod_path = null,
|
||||||
.exec_cmd_args = null,
|
.exec_cmd_args = null,
|
||||||
.filter = options.filter,
|
.filter = options.filter,
|
||||||
.test_runner = options.test_runner,
|
.test_runner = options.test_runner,
|
||||||
@ -515,8 +518,8 @@ pub fn create(owner: *std.Build, options: Options) *Compile {
|
|||||||
lp.addStepDependencies(&self.step);
|
lp.addStepDependencies(&self.step);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options.main_pkg_path) |lp| {
|
if (options.main_mod_path orelse options.main_pkg_path) |lp| {
|
||||||
self.main_pkg_path = lp.dupe(self.step.owner);
|
self.main_mod_path = lp.dupe(self.step.owner);
|
||||||
lp.addStepDependencies(&self.step);
|
lp.addStepDependencies(&self.step);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1998,8 +2001,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
|||||||
try zig_args.append(dir.getPath(b));
|
try zig_args.append(dir.getPath(b));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (self.main_pkg_path) |dir| {
|
if (self.main_mod_path) |dir| {
|
||||||
try zig_args.append("--main-pkg-path");
|
try zig_args.append("--main-mod-path");
|
||||||
try zig_args.append(dir.getPath(b));
|
try zig_args.append(dir.getPath(b));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -273,8 +273,8 @@ const Job = union(enum) {
|
|||||||
/// The source file containing the Decl has been updated, and so the
|
/// The source file containing the Decl has been updated, and so the
|
||||||
/// Decl may need its line number information updated in the debug info.
|
/// Decl may need its line number information updated in the debug info.
|
||||||
update_line_number: Module.Decl.Index,
|
update_line_number: Module.Decl.Index,
|
||||||
/// The main source file for the package needs to be analyzed.
|
/// The main source file for the module needs to be analyzed.
|
||||||
analyze_pkg: *Package,
|
analyze_mod: *Package.Module,
|
||||||
|
|
||||||
/// one of the glibc static objects
|
/// one of the glibc static objects
|
||||||
glibc_crt_file: glibc.CRTFile,
|
glibc_crt_file: glibc.CRTFile,
|
||||||
@ -414,7 +414,7 @@ pub const MiscTask = enum {
|
|||||||
compiler_rt,
|
compiler_rt,
|
||||||
libssp,
|
libssp,
|
||||||
zig_libc,
|
zig_libc,
|
||||||
analyze_pkg,
|
analyze_mod,
|
||||||
|
|
||||||
@"musl crti.o",
|
@"musl crti.o",
|
||||||
@"musl crtn.o",
|
@"musl crtn.o",
|
||||||
@ -544,7 +544,7 @@ pub const InitOptions = struct {
|
|||||||
global_cache_directory: Directory,
|
global_cache_directory: Directory,
|
||||||
target: Target,
|
target: Target,
|
||||||
root_name: []const u8,
|
root_name: []const u8,
|
||||||
main_pkg: ?*Package,
|
main_mod: ?*Package.Module,
|
||||||
output_mode: std.builtin.OutputMode,
|
output_mode: std.builtin.OutputMode,
|
||||||
thread_pool: *ThreadPool,
|
thread_pool: *ThreadPool,
|
||||||
dynamic_linker: ?[]const u8 = null,
|
dynamic_linker: ?[]const u8 = null,
|
||||||
@ -736,53 +736,53 @@ pub const InitOptions = struct {
|
|||||||
pdb_out_path: ?[]const u8 = null,
|
pdb_out_path: ?[]const u8 = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn addPackageTableToCacheHash(
|
fn addModuleTableToCacheHash(
|
||||||
hash: *Cache.HashHelper,
|
hash: *Cache.HashHelper,
|
||||||
arena: *std.heap.ArenaAllocator,
|
arena: *std.heap.ArenaAllocator,
|
||||||
pkg_table: Package.Table,
|
mod_table: Package.Module.Deps,
|
||||||
seen_table: *std.AutoHashMap(*Package, void),
|
seen_table: *std.AutoHashMap(*Package.Module, void),
|
||||||
hash_type: union(enum) { path_bytes, files: *Cache.Manifest },
|
hash_type: union(enum) { path_bytes, files: *Cache.Manifest },
|
||||||
) (error{OutOfMemory} || std.os.GetCwdError)!void {
|
) (error{OutOfMemory} || std.os.GetCwdError)!void {
|
||||||
const allocator = arena.allocator();
|
const allocator = arena.allocator();
|
||||||
|
|
||||||
const packages = try allocator.alloc(Package.Table.KV, pkg_table.count());
|
const modules = try allocator.alloc(Package.Module.Deps.KV, mod_table.count());
|
||||||
{
|
{
|
||||||
// Copy over the hashmap entries to our slice
|
// Copy over the hashmap entries to our slice
|
||||||
var table_it = pkg_table.iterator();
|
var table_it = mod_table.iterator();
|
||||||
var idx: usize = 0;
|
var idx: usize = 0;
|
||||||
while (table_it.next()) |entry| : (idx += 1) {
|
while (table_it.next()) |entry| : (idx += 1) {
|
||||||
packages[idx] = .{
|
modules[idx] = .{
|
||||||
.key = entry.key_ptr.*,
|
.key = entry.key_ptr.*,
|
||||||
.value = entry.value_ptr.*,
|
.value = entry.value_ptr.*,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Sort the slice by package name
|
// Sort the slice by package name
|
||||||
mem.sort(Package.Table.KV, packages, {}, struct {
|
mem.sortUnstable(Package.Module.Deps.KV, modules, {}, struct {
|
||||||
fn lessThan(_: void, lhs: Package.Table.KV, rhs: Package.Table.KV) bool {
|
fn lessThan(_: void, lhs: Package.Module.Deps.KV, rhs: Package.Module.Deps.KV) bool {
|
||||||
return std.mem.lessThan(u8, lhs.key, rhs.key);
|
return std.mem.lessThan(u8, lhs.key, rhs.key);
|
||||||
}
|
}
|
||||||
}.lessThan);
|
}.lessThan);
|
||||||
|
|
||||||
for (packages) |pkg| {
|
for (modules) |mod| {
|
||||||
if ((try seen_table.getOrPut(pkg.value)).found_existing) continue;
|
if ((try seen_table.getOrPut(mod.value)).found_existing) continue;
|
||||||
|
|
||||||
// Finally insert the package name and path to the cache hash.
|
// Finally insert the package name and path to the cache hash.
|
||||||
hash.addBytes(pkg.key);
|
hash.addBytes(mod.key);
|
||||||
switch (hash_type) {
|
switch (hash_type) {
|
||||||
.path_bytes => {
|
.path_bytes => {
|
||||||
hash.addBytes(pkg.value.root_src_path);
|
hash.addBytes(mod.value.root_src_path);
|
||||||
hash.addOptionalBytes(pkg.value.root_src_directory.path);
|
hash.addOptionalBytes(mod.value.root_src_directory.path);
|
||||||
},
|
},
|
||||||
.files => |man| {
|
.files => |man| {
|
||||||
const pkg_zig_file = try pkg.value.root_src_directory.join(allocator, &[_][]const u8{
|
const pkg_zig_file = try mod.value.root_src_directory.join(allocator, &[_][]const u8{
|
||||||
pkg.value.root_src_path,
|
mod.value.root_src_path,
|
||||||
});
|
});
|
||||||
_ = try man.addFile(pkg_zig_file, null);
|
_ = try man.addFile(pkg_zig_file, null);
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
// Recurse to handle the package's dependencies
|
// Recurse to handle the module's dependencies
|
||||||
try addPackageTableToCacheHash(hash, arena, pkg.value.table, seen_table, hash_type);
|
try addModuleTableToCacheHash(hash, arena, mod.value.deps, seen_table, hash_type);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -839,7 +839,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
break :blk true;
|
break :blk true;
|
||||||
|
|
||||||
// If we have no zig code to compile, no need for LLVM.
|
// If we have no zig code to compile, no need for LLVM.
|
||||||
if (options.main_pkg == null)
|
if (options.main_mod == null)
|
||||||
break :blk false;
|
break :blk false;
|
||||||
|
|
||||||
// If LLVM does not support the target, then we can't use it.
|
// If LLVM does not support the target, then we can't use it.
|
||||||
@ -869,7 +869,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
// compiler state, the second clause here can be removed so that incremental
|
// compiler state, the second clause here can be removed so that incremental
|
||||||
// cache mode is used for LLVM backend too. We need some fuzz testing before
|
// cache mode is used for LLVM backend too. We need some fuzz testing before
|
||||||
// that can be enabled.
|
// that can be enabled.
|
||||||
const cache_mode = if ((use_llvm or options.main_pkg == null) and !options.disable_lld_caching)
|
const cache_mode = if ((use_llvm or options.main_mod == null) and !options.disable_lld_caching)
|
||||||
CacheMode.whole
|
CacheMode.whole
|
||||||
else
|
else
|
||||||
options.cache_mode;
|
options.cache_mode;
|
||||||
@ -925,7 +925,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
if (use_llvm) {
|
if (use_llvm) {
|
||||||
// If stage1 generates an object file, self-hosted linker is not
|
// If stage1 generates an object file, self-hosted linker is not
|
||||||
// yet sophisticated enough to handle that.
|
// yet sophisticated enough to handle that.
|
||||||
break :blk options.main_pkg != null;
|
break :blk options.main_mod != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
break :blk false;
|
break :blk false;
|
||||||
@ -1210,7 +1210,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
if (options.target.os.tag == .wasi) cache.hash.add(wasi_exec_model);
|
if (options.target.os.tag == .wasi) cache.hash.add(wasi_exec_model);
|
||||||
// TODO audit this and make sure everything is in it
|
// TODO audit this and make sure everything is in it
|
||||||
|
|
||||||
const module: ?*Module = if (options.main_pkg) |main_pkg| blk: {
|
const module: ?*Module = if (options.main_mod) |main_mod| blk: {
|
||||||
// Options that are specific to zig source files, that cannot be
|
// Options that are specific to zig source files, that cannot be
|
||||||
// modified between incremental updates.
|
// modified between incremental updates.
|
||||||
var hash = cache.hash;
|
var hash = cache.hash;
|
||||||
@ -1223,11 +1223,12 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
// do want to namespace different source file names because they are
|
// do want to namespace different source file names because they are
|
||||||
// likely different compilations and therefore this would be likely to
|
// likely different compilations and therefore this would be likely to
|
||||||
// cause cache hits.
|
// cause cache hits.
|
||||||
hash.addBytes(main_pkg.root_src_path);
|
hash.addBytes(main_mod.root_src_path);
|
||||||
hash.addOptionalBytes(main_pkg.root_src_directory.path);
|
hash.addOptionalBytes(main_mod.root.root_dir.path);
|
||||||
|
hash.addBytes(main_mod.root.sub_path);
|
||||||
{
|
{
|
||||||
var seen_table = std.AutoHashMap(*Package, void).init(arena);
|
var seen_table = std.AutoHashMap(*Package.Module, void).init(arena);
|
||||||
try addPackageTableToCacheHash(&hash, &arena_allocator, main_pkg.table, &seen_table, .path_bytes);
|
try addModuleTableToCacheHash(&hash, &arena_allocator, main_mod.deps, &seen_table, .path_bytes);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.whole => {
|
.whole => {
|
||||||
@ -1283,34 +1284,31 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
.path = try options.local_cache_directory.join(arena, &[_][]const u8{artifact_sub_dir}),
|
.path = try options.local_cache_directory.join(arena, &[_][]const u8{artifact_sub_dir}),
|
||||||
};
|
};
|
||||||
|
|
||||||
const builtin_pkg = try Package.createWithDir(
|
const builtin_mod = try Package.Module.create(arena, .{
|
||||||
gpa,
|
.root = .{ .root_dir = zig_cache_artifact_directory },
|
||||||
zig_cache_artifact_directory,
|
.root_src_path = "builtin.zig",
|
||||||
null,
|
});
|
||||||
"builtin.zig",
|
|
||||||
);
|
|
||||||
errdefer builtin_pkg.destroy(gpa);
|
|
||||||
|
|
||||||
// When you're testing std, the main module is std. In that case, we'll just set the std
|
// When you're testing std, the main module is std. In that case,
|
||||||
// module to the main one, since avoiding the errors caused by duplicating it is more
|
// we'll just set the std module to the main one, since avoiding
|
||||||
// effort than it's worth.
|
// the errors caused by duplicating it is more effort than it's
|
||||||
const main_pkg_is_std = m: {
|
// worth.
|
||||||
|
const main_mod_is_std = m: {
|
||||||
const std_path = try std.fs.path.resolve(arena, &[_][]const u8{
|
const std_path = try std.fs.path.resolve(arena, &[_][]const u8{
|
||||||
options.zig_lib_directory.path orelse ".",
|
options.zig_lib_directory.path orelse ".",
|
||||||
"std",
|
"std",
|
||||||
"std.zig",
|
"std.zig",
|
||||||
});
|
});
|
||||||
defer arena.free(std_path);
|
|
||||||
const main_path = try std.fs.path.resolve(arena, &[_][]const u8{
|
const main_path = try std.fs.path.resolve(arena, &[_][]const u8{
|
||||||
main_pkg.root_src_directory.path orelse ".",
|
main_mod.root.root_dir.path orelse ".",
|
||||||
main_pkg.root_src_path,
|
main_mod.root.sub_path,
|
||||||
|
main_mod.root_src_path,
|
||||||
});
|
});
|
||||||
defer arena.free(main_path);
|
|
||||||
break :m mem.eql(u8, main_path, std_path);
|
break :m mem.eql(u8, main_path, std_path);
|
||||||
};
|
};
|
||||||
|
|
||||||
const std_pkg = if (main_pkg_is_std)
|
const std_mod = if (main_mod_is_std)
|
||||||
main_pkg
|
main_mod
|
||||||
else
|
else
|
||||||
try Package.createWithDir(
|
try Package.createWithDir(
|
||||||
gpa,
|
gpa,
|
||||||
@ -1319,16 +1317,16 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
"std.zig",
|
"std.zig",
|
||||||
);
|
);
|
||||||
|
|
||||||
errdefer if (!main_pkg_is_std) std_pkg.destroy(gpa);
|
errdefer if (!main_mod_is_std) std_mod.destroy(gpa);
|
||||||
|
|
||||||
const root_pkg = if (options.is_test) root_pkg: {
|
const root_mod = if (options.is_test) root_mod: {
|
||||||
const test_pkg = if (options.test_runner_path) |test_runner| test_pkg: {
|
const test_pkg = if (options.test_runner_path) |test_runner| test_pkg: {
|
||||||
const test_dir = std.fs.path.dirname(test_runner);
|
const test_dir = std.fs.path.dirname(test_runner);
|
||||||
const basename = std.fs.path.basename(test_runner);
|
const basename = std.fs.path.basename(test_runner);
|
||||||
const pkg = try Package.create(gpa, test_dir, basename);
|
const pkg = try Package.create(gpa, test_dir, basename);
|
||||||
|
|
||||||
// copy package table from main_pkg to root_pkg
|
// copy module table from main_mod to root_mod
|
||||||
pkg.table = try main_pkg.table.clone(gpa);
|
pkg.deps = try main_mod.deps.clone(gpa);
|
||||||
break :test_pkg pkg;
|
break :test_pkg pkg;
|
||||||
} else try Package.createWithDir(
|
} else try Package.createWithDir(
|
||||||
gpa,
|
gpa,
|
||||||
@ -1338,26 +1336,26 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
);
|
);
|
||||||
errdefer test_pkg.destroy(gpa);
|
errdefer test_pkg.destroy(gpa);
|
||||||
|
|
||||||
break :root_pkg test_pkg;
|
break :root_mod test_pkg;
|
||||||
} else main_pkg;
|
} else main_mod;
|
||||||
errdefer if (options.is_test) root_pkg.destroy(gpa);
|
errdefer if (options.is_test) root_mod.destroy(gpa);
|
||||||
|
|
||||||
const compiler_rt_pkg = if (include_compiler_rt and options.output_mode == .Obj) compiler_rt_pkg: {
|
const compiler_rt_mod = if (include_compiler_rt and options.output_mode == .Obj) compiler_rt_mod: {
|
||||||
break :compiler_rt_pkg try Package.createWithDir(
|
break :compiler_rt_mod try Package.createWithDir(
|
||||||
gpa,
|
gpa,
|
||||||
options.zig_lib_directory,
|
options.zig_lib_directory,
|
||||||
null,
|
null,
|
||||||
"compiler_rt.zig",
|
"compiler_rt.zig",
|
||||||
);
|
);
|
||||||
} else null;
|
} else null;
|
||||||
errdefer if (compiler_rt_pkg) |p| p.destroy(gpa);
|
errdefer if (compiler_rt_mod) |p| p.destroy(gpa);
|
||||||
|
|
||||||
try main_pkg.add(gpa, "builtin", builtin_pkg);
|
try main_mod.add(gpa, "builtin", builtin_mod);
|
||||||
try main_pkg.add(gpa, "root", root_pkg);
|
try main_mod.add(gpa, "root", root_mod);
|
||||||
try main_pkg.add(gpa, "std", std_pkg);
|
try main_mod.add(gpa, "std", std_mod);
|
||||||
|
|
||||||
if (compiler_rt_pkg) |p| {
|
if (compiler_rt_mod) |p| {
|
||||||
try main_pkg.add(gpa, "compiler_rt", p);
|
try main_mod.add(gpa, "compiler_rt", p);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pre-open the directory handles for cached ZIR code so that it does not need
|
// Pre-open the directory handles for cached ZIR code so that it does not need
|
||||||
@ -1395,8 +1393,8 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
|
|||||||
module.* = .{
|
module.* = .{
|
||||||
.gpa = gpa,
|
.gpa = gpa,
|
||||||
.comp = comp,
|
.comp = comp,
|
||||||
.main_pkg = main_pkg,
|
.main_mod = main_mod,
|
||||||
.root_pkg = root_pkg,
|
.root_mod = root_mod,
|
||||||
.zig_cache_artifact_directory = zig_cache_artifact_directory,
|
.zig_cache_artifact_directory = zig_cache_artifact_directory,
|
||||||
.global_zir_cache = global_zir_cache,
|
.global_zir_cache = global_zir_cache,
|
||||||
.local_zir_cache = local_zir_cache,
|
.local_zir_cache = local_zir_cache,
|
||||||
@ -2005,8 +2003,8 @@ fn restorePrevZigCacheArtifactDirectory(comp: *Compilation, directory: *Director
|
|||||||
// This is only for cleanup purposes; Module.deinit calls close
|
// This is only for cleanup purposes; Module.deinit calls close
|
||||||
// on the handle of zig_cache_artifact_directory.
|
// on the handle of zig_cache_artifact_directory.
|
||||||
if (comp.bin_file.options.module) |module| {
|
if (comp.bin_file.options.module) |module| {
|
||||||
const builtin_pkg = module.main_pkg.table.get("builtin").?;
|
const builtin_mod = module.main_mod.deps.get("builtin").?;
|
||||||
module.zig_cache_artifact_directory = builtin_pkg.root_src_directory;
|
module.zig_cache_artifact_directory = builtin_mod.root_src_directory;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2148,8 +2146,8 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
|
|||||||
|
|
||||||
// Make sure std.zig is inside the import_table. We unconditionally need
|
// Make sure std.zig is inside the import_table. We unconditionally need
|
||||||
// it for start.zig.
|
// it for start.zig.
|
||||||
const std_pkg = module.main_pkg.table.get("std").?;
|
const std_mod = module.main_mod.deps.get("std").?;
|
||||||
_ = try module.importPkg(std_pkg);
|
_ = try module.importPkg(std_mod);
|
||||||
|
|
||||||
// Normally we rely on importing std to in turn import the root source file
|
// Normally we rely on importing std to in turn import the root source file
|
||||||
// in the start code, but when using the stage1 backend that won't happen,
|
// in the start code, but when using the stage1 backend that won't happen,
|
||||||
@ -2158,11 +2156,11 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
|
|||||||
// Likewise, in the case of `zig test`, the test runner is the root source file,
|
// Likewise, in the case of `zig test`, the test runner is the root source file,
|
||||||
// and so there is nothing to import the main file.
|
// and so there is nothing to import the main file.
|
||||||
if (comp.bin_file.options.is_test) {
|
if (comp.bin_file.options.is_test) {
|
||||||
_ = try module.importPkg(module.main_pkg);
|
_ = try module.importPkg(module.main_mod);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (module.main_pkg.table.get("compiler_rt")) |compiler_rt_pkg| {
|
if (module.main_mod.deps.get("compiler_rt")) |compiler_rt_mod| {
|
||||||
_ = try module.importPkg(compiler_rt_pkg);
|
_ = try module.importPkg(compiler_rt_mod);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Put a work item in for every known source file to detect if
|
// Put a work item in for every known source file to detect if
|
||||||
@ -2185,13 +2183,13 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try comp.work_queue.writeItem(.{ .analyze_pkg = std_pkg });
|
try comp.work_queue.writeItem(.{ .analyze_mod = std_mod });
|
||||||
if (comp.bin_file.options.is_test) {
|
if (comp.bin_file.options.is_test) {
|
||||||
try comp.work_queue.writeItem(.{ .analyze_pkg = module.main_pkg });
|
try comp.work_queue.writeItem(.{ .analyze_mod = module.main_mod });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (module.main_pkg.table.get("compiler_rt")) |compiler_rt_pkg| {
|
if (module.main_mod.deps.get("compiler_rt")) |compiler_rt_mod| {
|
||||||
try comp.work_queue.writeItem(.{ .analyze_pkg = compiler_rt_pkg });
|
try comp.work_queue.writeItem(.{ .analyze_mod = compiler_rt_mod });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2420,19 +2418,19 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes
|
|||||||
comptime assert(link_hash_implementation_version == 10);
|
comptime assert(link_hash_implementation_version == 10);
|
||||||
|
|
||||||
if (comp.bin_file.options.module) |mod| {
|
if (comp.bin_file.options.module) |mod| {
|
||||||
const main_zig_file = try mod.main_pkg.root_src_directory.join(arena, &[_][]const u8{
|
const main_zig_file = try mod.main_mod.root_src_directory.join(arena, &[_][]const u8{
|
||||||
mod.main_pkg.root_src_path,
|
mod.main_mod.root_src_path,
|
||||||
});
|
});
|
||||||
_ = try man.addFile(main_zig_file, null);
|
_ = try man.addFile(main_zig_file, null);
|
||||||
{
|
{
|
||||||
var seen_table = std.AutoHashMap(*Package, void).init(arena);
|
var seen_table = std.AutoHashMap(*Package.Module, void).init(arena);
|
||||||
|
|
||||||
// Skip builtin.zig; it is useless as an input, and we don't want to have to
|
// Skip builtin.zig; it is useless as an input, and we don't want to have to
|
||||||
// write it before checking for a cache hit.
|
// write it before checking for a cache hit.
|
||||||
const builtin_pkg = mod.main_pkg.table.get("builtin").?;
|
const builtin_mod = mod.main_mod.deps.get("builtin").?;
|
||||||
try seen_table.put(builtin_pkg, {});
|
try seen_table.put(builtin_mod, {});
|
||||||
|
|
||||||
try addPackageTableToCacheHash(&man.hash, &arena_allocator, mod.main_pkg.table, &seen_table, .{ .files = man });
|
try addModuleTableToCacheHash(&man.hash, &arena_allocator, mod.main_mod.deps, &seen_table, .{ .files = man });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Synchronize with other matching comments: ZigOnlyHashStuff
|
// Synchronize with other matching comments: ZigOnlyHashStuff
|
||||||
@ -3564,8 +3562,8 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
|
|||||||
decl.analysis = .codegen_failure_retryable;
|
decl.analysis = .codegen_failure_retryable;
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
.analyze_pkg => |pkg| {
|
.analyze_mod => |pkg| {
|
||||||
const named_frame = tracy.namedFrame("analyze_pkg");
|
const named_frame = tracy.namedFrame("analyze_mod");
|
||||||
defer named_frame.end();
|
defer named_frame.end();
|
||||||
|
|
||||||
const module = comp.bin_file.options.module.?;
|
const module = comp.bin_file.options.module.?;
|
||||||
@ -6379,11 +6377,11 @@ fn buildOutputFromZig(
|
|||||||
|
|
||||||
std.debug.assert(output_mode != .Exe);
|
std.debug.assert(output_mode != .Exe);
|
||||||
|
|
||||||
var main_pkg: Package = .{
|
var main_mod: Package = .{
|
||||||
.root_src_directory = comp.zig_lib_directory,
|
.root_src_directory = comp.zig_lib_directory,
|
||||||
.root_src_path = src_basename,
|
.root_src_path = src_basename,
|
||||||
};
|
};
|
||||||
defer main_pkg.deinitTable(comp.gpa);
|
defer main_mod.deinitTable(comp.gpa);
|
||||||
const root_name = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len];
|
const root_name = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len];
|
||||||
const target = comp.getTarget();
|
const target = comp.getTarget();
|
||||||
const bin_basename = try std.zig.binNameAlloc(comp.gpa, .{
|
const bin_basename = try std.zig.binNameAlloc(comp.gpa, .{
|
||||||
@ -6404,7 +6402,7 @@ fn buildOutputFromZig(
|
|||||||
.cache_mode = .whole,
|
.cache_mode = .whole,
|
||||||
.target = target,
|
.target = target,
|
||||||
.root_name = root_name,
|
.root_name = root_name,
|
||||||
.main_pkg = &main_pkg,
|
.main_mod = &main_mod,
|
||||||
.output_mode = output_mode,
|
.output_mode = output_mode,
|
||||||
.thread_pool = comp.thread_pool,
|
.thread_pool = comp.thread_pool,
|
||||||
.libc_installation = comp.bin_file.options.libc_installation,
|
.libc_installation = comp.bin_file.options.libc_installation,
|
||||||
@ -6481,7 +6479,7 @@ pub fn build_crt_file(
|
|||||||
.cache_mode = .whole,
|
.cache_mode = .whole,
|
||||||
.target = target,
|
.target = target,
|
||||||
.root_name = root_name,
|
.root_name = root_name,
|
||||||
.main_pkg = null,
|
.main_mod = null,
|
||||||
.output_mode = output_mode,
|
.output_mode = output_mode,
|
||||||
.thread_pool = comp.thread_pool,
|
.thread_pool = comp.thread_pool,
|
||||||
.libc_installation = comp.bin_file.options.libc_installation,
|
.libc_installation = comp.bin_file.options.libc_installation,
|
||||||
|
|||||||
@ -1,6 +1,10 @@
|
|||||||
pub const max_bytes = 10 * 1024 * 1024;
|
pub const max_bytes = 10 * 1024 * 1024;
|
||||||
pub const basename = "build.zig.zon";
|
pub const basename = "build.zig.zon";
|
||||||
pub const Hash = std.crypto.hash.sha2.Sha256;
|
pub const Hash = std.crypto.hash.sha2.Sha256;
|
||||||
|
pub const Digest = [Hash.digest_length]u8;
|
||||||
|
pub const multihash_len = 1 + 1 + Hash.digest_length;
|
||||||
|
pub const multihash_hex_digest_len = 2 * multihash_len;
|
||||||
|
pub const MultiHashHexDigest = [multihash_hex_digest_len]u8;
|
||||||
|
|
||||||
pub const Dependency = struct {
|
pub const Dependency = struct {
|
||||||
location: union(enum) {
|
location: union(enum) {
|
||||||
@ -46,7 +50,6 @@ comptime {
|
|||||||
assert(@intFromEnum(multihash_function) < 127);
|
assert(@intFromEnum(multihash_function) < 127);
|
||||||
assert(Hash.digest_length < 127);
|
assert(Hash.digest_length < 127);
|
||||||
}
|
}
|
||||||
pub const multihash_len = 1 + 1 + Hash.digest_length;
|
|
||||||
|
|
||||||
name: []const u8,
|
name: []const u8,
|
||||||
version: std.SemanticVersion,
|
version: std.SemanticVersion,
|
||||||
@ -122,8 +125,8 @@ test hex64 {
|
|||||||
try std.testing.expectEqualStrings("[00efcdab78563412]", s);
|
try std.testing.expectEqualStrings("[00efcdab78563412]", s);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hexDigest(digest: [Hash.digest_length]u8) [multihash_len * 2]u8 {
|
pub fn hexDigest(digest: Digest) MultiHashHexDigest {
|
||||||
var result: [multihash_len * 2]u8 = undefined;
|
var result: MultiHashHexDigest = undefined;
|
||||||
|
|
||||||
result[0] = hex_charset[@intFromEnum(multihash_function) >> 4];
|
result[0] = hex_charset[@intFromEnum(multihash_function) >> 4];
|
||||||
result[1] = hex_charset[@intFromEnum(multihash_function) & 15];
|
result[1] = hex_charset[@intFromEnum(multihash_function) & 15];
|
||||||
@ -339,10 +342,9 @@ const Parse = struct {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const hex_multihash_len = 2 * Manifest.multihash_len;
|
if (h.len != multihash_hex_digest_len) {
|
||||||
if (h.len != hex_multihash_len) {
|
|
||||||
return fail(p, tok, "wrong hash size. expected: {d}, found: {d}", .{
|
return fail(p, tok, "wrong hash size. expected: {d}, found: {d}", .{
|
||||||
hex_multihash_len, h.len,
|
multihash_hex_digest_len, h.len,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -55,10 +55,10 @@ comp: *Compilation,
|
|||||||
/// Where build artifacts and incremental compilation metadata serialization go.
|
/// Where build artifacts and incremental compilation metadata serialization go.
|
||||||
zig_cache_artifact_directory: Compilation.Directory,
|
zig_cache_artifact_directory: Compilation.Directory,
|
||||||
/// Pointer to externally managed resource.
|
/// Pointer to externally managed resource.
|
||||||
root_pkg: *Package,
|
root_mod: *Package.Module,
|
||||||
/// Normally, `main_pkg` and `root_pkg` are the same. The exception is `zig test`, in which
|
/// Normally, `main_mod` and `root_mod` are the same. The exception is `zig test`, in which
|
||||||
/// `root_pkg` is the test runner, and `main_pkg` is the user's source file which has the tests.
|
/// `root_mod` is the test runner, and `main_mod` is the user's source file which has the tests.
|
||||||
main_pkg: *Package,
|
main_mod: *Package.Module,
|
||||||
sema_prog_node: std.Progress.Node = undefined,
|
sema_prog_node: std.Progress.Node = undefined,
|
||||||
|
|
||||||
/// Used by AstGen worker to load and store ZIR cache.
|
/// Used by AstGen worker to load and store ZIR cache.
|
||||||
@ -973,8 +973,8 @@ pub const File = struct {
|
|||||||
tree: Ast,
|
tree: Ast,
|
||||||
/// Whether this is populated or not depends on `zir_loaded`.
|
/// Whether this is populated or not depends on `zir_loaded`.
|
||||||
zir: Zir,
|
zir: Zir,
|
||||||
/// Package that this file is a part of, managed externally.
|
/// Module that this file is a part of, managed externally.
|
||||||
pkg: *Package,
|
mod: *Package.Module,
|
||||||
/// Whether this file is a part of multiple packages. This is an error condition which will be reported after AstGen.
|
/// Whether this file is a part of multiple packages. This is an error condition which will be reported after AstGen.
|
||||||
multi_pkg: bool = false,
|
multi_pkg: bool = false,
|
||||||
/// List of references to this file, used for multi-package errors.
|
/// List of references to this file, used for multi-package errors.
|
||||||
@ -1058,14 +1058,9 @@ pub const File = struct {
|
|||||||
.stat = file.stat,
|
.stat = file.stat,
|
||||||
};
|
};
|
||||||
|
|
||||||
const root_dir_path = file.pkg.root_src_directory.path orelse ".";
|
|
||||||
log.debug("File.getSource, not cached. pkgdir={s} sub_file_path={s}", .{
|
|
||||||
root_dir_path, file.sub_file_path,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Keep track of inode, file size, mtime, hash so we can detect which files
|
// Keep track of inode, file size, mtime, hash so we can detect which files
|
||||||
// have been modified when an incremental update is requested.
|
// have been modified when an incremental update is requested.
|
||||||
var f = try file.pkg.root_src_directory.handle.openFile(file.sub_file_path, .{});
|
var f = try file.mod.root.openFile(file.sub_file_path, .{});
|
||||||
defer f.close();
|
defer f.close();
|
||||||
|
|
||||||
const stat = try f.stat();
|
const stat = try f.stat();
|
||||||
@ -1134,14 +1129,12 @@ pub const File = struct {
|
|||||||
return ip.getOrPutTrailingString(mod.gpa, ip.string_bytes.items.len - start);
|
return ip.getOrPutTrailingString(mod.gpa, ip.string_bytes.items.len - start);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the full path to this file relative to its package.
|
|
||||||
pub fn fullPath(file: File, ally: Allocator) ![]u8 {
|
pub fn fullPath(file: File, ally: Allocator) ![]u8 {
|
||||||
return file.pkg.root_src_directory.join(ally, &[_][]const u8{file.sub_file_path});
|
return file.mod.root.joinString(ally, file.sub_file_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the full path to this file relative to its package.
|
|
||||||
pub fn fullPathZ(file: File, ally: Allocator) ![:0]u8 {
|
pub fn fullPathZ(file: File, ally: Allocator) ![:0]u8 {
|
||||||
return file.pkg.root_src_directory.joinZ(ally, &[_][]const u8{file.sub_file_path});
|
return file.mod.root.joinStringZ(ally, file.sub_file_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dumpSrc(file: *File, src: LazySrcLoc) void {
|
pub fn dumpSrc(file: *File, src: LazySrcLoc) void {
|
||||||
@ -2543,25 +2536,25 @@ pub fn deinit(mod: *Module) void {
|
|||||||
|
|
||||||
mod.deletion_set.deinit(gpa);
|
mod.deletion_set.deinit(gpa);
|
||||||
|
|
||||||
// The callsite of `Compilation.create` owns the `main_pkg`, however
|
// The callsite of `Compilation.create` owns the `main_mod`, however
|
||||||
// Module owns the builtin and std packages that it adds.
|
// Module owns the builtin and std packages that it adds.
|
||||||
if (mod.main_pkg.table.fetchRemove("builtin")) |kv| {
|
if (mod.main_mod.table.fetchRemove("builtin")) |kv| {
|
||||||
gpa.free(kv.key);
|
gpa.free(kv.key);
|
||||||
kv.value.destroy(gpa);
|
kv.value.destroy(gpa);
|
||||||
}
|
}
|
||||||
if (mod.main_pkg.table.fetchRemove("std")) |kv| {
|
if (mod.main_mod.table.fetchRemove("std")) |kv| {
|
||||||
gpa.free(kv.key);
|
gpa.free(kv.key);
|
||||||
// It's possible for main_pkg to be std when running 'zig test'! In this case, we must not
|
// It's possible for main_mod to be std when running 'zig test'! In this case, we must not
|
||||||
// destroy it, since it would lead to a double-free.
|
// destroy it, since it would lead to a double-free.
|
||||||
if (kv.value != mod.main_pkg) {
|
if (kv.value != mod.main_mod) {
|
||||||
kv.value.destroy(gpa);
|
kv.value.destroy(gpa);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (mod.main_pkg.table.fetchRemove("root")) |kv| {
|
if (mod.main_mod.table.fetchRemove("root")) |kv| {
|
||||||
gpa.free(kv.key);
|
gpa.free(kv.key);
|
||||||
}
|
}
|
||||||
if (mod.root_pkg != mod.main_pkg) {
|
if (mod.root_mod != mod.main_mod) {
|
||||||
mod.root_pkg.destroy(gpa);
|
mod.root_mod.destroy(gpa);
|
||||||
}
|
}
|
||||||
|
|
||||||
mod.compile_log_text.deinit(gpa);
|
mod.compile_log_text.deinit(gpa);
|
||||||
@ -2715,7 +2708,7 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
|
|||||||
|
|
||||||
const stat = try source_file.stat();
|
const stat = try source_file.stat();
|
||||||
|
|
||||||
const want_local_cache = file.pkg == mod.main_pkg;
|
const want_local_cache = file.pkg == mod.main_mod;
|
||||||
const digest = hash: {
|
const digest = hash: {
|
||||||
var path_hash: Cache.HashHelper = .{};
|
var path_hash: Cache.HashHelper = .{};
|
||||||
path_hash.addBytes(build_options.version);
|
path_hash.addBytes(build_options.version);
|
||||||
@ -3158,23 +3151,23 @@ pub fn populateBuiltinFile(mod: *Module) !void {
|
|||||||
comp.mutex.lock();
|
comp.mutex.lock();
|
||||||
defer comp.mutex.unlock();
|
defer comp.mutex.unlock();
|
||||||
|
|
||||||
const builtin_pkg = mod.main_pkg.table.get("builtin").?;
|
const builtin_mod = mod.main_mod.table.get("builtin").?;
|
||||||
const result = try mod.importPkg(builtin_pkg);
|
const result = try mod.importPkg(builtin_mod);
|
||||||
break :blk .{
|
break :blk .{
|
||||||
.file = result.file,
|
.file = result.file,
|
||||||
.pkg = builtin_pkg,
|
.pkg = builtin_mod,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
const file = pkg_and_file.file;
|
const file = pkg_and_file.file;
|
||||||
const builtin_pkg = pkg_and_file.pkg;
|
const builtin_mod = pkg_and_file.pkg;
|
||||||
const gpa = mod.gpa;
|
const gpa = mod.gpa;
|
||||||
file.source = try comp.generateBuiltinZigSource(gpa);
|
file.source = try comp.generateBuiltinZigSource(gpa);
|
||||||
file.source_loaded = true;
|
file.source_loaded = true;
|
||||||
|
|
||||||
if (builtin_pkg.root_src_directory.handle.statFile(builtin_pkg.root_src_path)) |stat| {
|
if (builtin_mod.root_src_directory.handle.statFile(builtin_mod.root_src_path)) |stat| {
|
||||||
if (stat.size != file.source.len) {
|
if (stat.size != file.source.len) {
|
||||||
const full_path = try builtin_pkg.root_src_directory.join(gpa, &.{
|
const full_path = try builtin_mod.root_src_directory.join(gpa, &.{
|
||||||
builtin_pkg.root_src_path,
|
builtin_mod.root_src_path,
|
||||||
});
|
});
|
||||||
defer gpa.free(full_path);
|
defer gpa.free(full_path);
|
||||||
|
|
||||||
@ -3184,7 +3177,7 @@ pub fn populateBuiltinFile(mod: *Module) !void {
|
|||||||
.{ full_path, file.source.len, stat.size },
|
.{ full_path, file.source.len, stat.size },
|
||||||
);
|
);
|
||||||
|
|
||||||
try writeBuiltinFile(file, builtin_pkg);
|
try writeBuiltinFile(file, builtin_mod);
|
||||||
} else {
|
} else {
|
||||||
file.stat = .{
|
file.stat = .{
|
||||||
.size = stat.size,
|
.size = stat.size,
|
||||||
@ -3198,7 +3191,7 @@ pub fn populateBuiltinFile(mod: *Module) !void {
|
|||||||
error.PipeBusy => unreachable, // it's not a pipe
|
error.PipeBusy => unreachable, // it's not a pipe
|
||||||
error.WouldBlock => unreachable, // not asking for non-blocking I/O
|
error.WouldBlock => unreachable, // not asking for non-blocking I/O
|
||||||
|
|
||||||
error.FileNotFound => try writeBuiltinFile(file, builtin_pkg),
|
error.FileNotFound => try writeBuiltinFile(file, builtin_mod),
|
||||||
|
|
||||||
else => |e| return e,
|
else => |e| return e,
|
||||||
}
|
}
|
||||||
@ -3212,8 +3205,8 @@ pub fn populateBuiltinFile(mod: *Module) !void {
|
|||||||
file.status = .success_zir;
|
file.status = .success_zir;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeBuiltinFile(file: *File, builtin_pkg: *Package) !void {
|
fn writeBuiltinFile(file: *File, builtin_mod: *Package.Module) !void {
|
||||||
var af = try builtin_pkg.root_src_directory.handle.atomicFile(builtin_pkg.root_src_path, .{});
|
var af = try builtin_mod.root_src_directory.handle.atomicFile(builtin_mod.root_src_path, .{});
|
||||||
defer af.deinit();
|
defer af.deinit();
|
||||||
try af.file.writeAll(file.source);
|
try af.file.writeAll(file.source);
|
||||||
try af.finish();
|
try af.finish();
|
||||||
@ -3748,7 +3741,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
|
|||||||
|
|
||||||
// TODO: figure out how this works under incremental changes to builtin.zig!
|
// TODO: figure out how this works under incremental changes to builtin.zig!
|
||||||
const builtin_type_target_index: InternPool.Index = blk: {
|
const builtin_type_target_index: InternPool.Index = blk: {
|
||||||
const std_mod = mod.main_pkg.table.get("std").?;
|
const std_mod = mod.main_mod.table.get("std").?;
|
||||||
if (decl.getFileScope(mod).pkg != std_mod) break :blk .none;
|
if (decl.getFileScope(mod).pkg != std_mod) break :blk .none;
|
||||||
// We're in the std module.
|
// We're in the std module.
|
||||||
const std_file = (try mod.importPkg(std_mod)).file;
|
const std_file = (try mod.importPkg(std_mod)).file;
|
||||||
@ -4100,13 +4093,13 @@ pub fn importFile(
|
|||||||
import_string: []const u8,
|
import_string: []const u8,
|
||||||
) !ImportFileResult {
|
) !ImportFileResult {
|
||||||
if (std.mem.eql(u8, import_string, "std")) {
|
if (std.mem.eql(u8, import_string, "std")) {
|
||||||
return mod.importPkg(mod.main_pkg.table.get("std").?);
|
return mod.importPkg(mod.main_mod.table.get("std").?);
|
||||||
}
|
}
|
||||||
if (std.mem.eql(u8, import_string, "builtin")) {
|
if (std.mem.eql(u8, import_string, "builtin")) {
|
||||||
return mod.importPkg(mod.main_pkg.table.get("builtin").?);
|
return mod.importPkg(mod.main_mod.table.get("builtin").?);
|
||||||
}
|
}
|
||||||
if (std.mem.eql(u8, import_string, "root")) {
|
if (std.mem.eql(u8, import_string, "root")) {
|
||||||
return mod.importPkg(mod.root_pkg);
|
return mod.importPkg(mod.root_mod);
|
||||||
}
|
}
|
||||||
if (cur_file.pkg.table.get(import_string)) |pkg| {
|
if (cur_file.pkg.table.get(import_string)) |pkg| {
|
||||||
return mod.importPkg(pkg);
|
return mod.importPkg(pkg);
|
||||||
@ -4462,14 +4455,14 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
|
|||||||
// test decl with no name. Skip the part where we check against
|
// test decl with no name. Skip the part where we check against
|
||||||
// the test name filter.
|
// the test name filter.
|
||||||
if (!comp.bin_file.options.is_test) break :blk false;
|
if (!comp.bin_file.options.is_test) break :blk false;
|
||||||
if (decl_pkg != mod.main_pkg) break :blk false;
|
if (decl_pkg != mod.main_mod) break :blk false;
|
||||||
try mod.test_functions.put(gpa, new_decl_index, {});
|
try mod.test_functions.put(gpa, new_decl_index, {});
|
||||||
break :blk true;
|
break :blk true;
|
||||||
},
|
},
|
||||||
else => blk: {
|
else => blk: {
|
||||||
if (!is_named_test) break :blk false;
|
if (!is_named_test) break :blk false;
|
||||||
if (!comp.bin_file.options.is_test) break :blk false;
|
if (!comp.bin_file.options.is_test) break :blk false;
|
||||||
if (decl_pkg != mod.main_pkg) break :blk false;
|
if (decl_pkg != mod.main_mod) break :blk false;
|
||||||
if (comp.test_filter) |test_filter| {
|
if (comp.test_filter) |test_filter| {
|
||||||
if (mem.indexOf(u8, ip.stringToSlice(decl_name), test_filter) == null) {
|
if (mem.indexOf(u8, ip.stringToSlice(decl_name), test_filter) == null) {
|
||||||
break :blk false;
|
break :blk false;
|
||||||
@ -5596,8 +5589,8 @@ pub fn populateTestFunctions(
|
|||||||
) !void {
|
) !void {
|
||||||
const gpa = mod.gpa;
|
const gpa = mod.gpa;
|
||||||
const ip = &mod.intern_pool;
|
const ip = &mod.intern_pool;
|
||||||
const builtin_pkg = mod.main_pkg.table.get("builtin").?;
|
const builtin_mod = mod.main_mod.table.get("builtin").?;
|
||||||
const builtin_file = (mod.importPkg(builtin_pkg) catch unreachable).file;
|
const builtin_file = (mod.importPkg(builtin_mod) catch unreachable).file;
|
||||||
const root_decl = mod.declPtr(builtin_file.root_decl.unwrap().?);
|
const root_decl = mod.declPtr(builtin_file.root_decl.unwrap().?);
|
||||||
const builtin_namespace = mod.namespacePtr(root_decl.src_namespace);
|
const builtin_namespace = mod.namespacePtr(root_decl.src_namespace);
|
||||||
const test_functions_str = try ip.getOrPutString(gpa, "test_functions");
|
const test_functions_str = try ip.getOrPutString(gpa, "test_functions");
|
||||||
|
|||||||
326
src/Package.zig
326
src/Package.zig
@ -1,251 +1,87 @@
|
|||||||
const Package = @This();
|
pub const Module = @import("Package/Module.zig");
|
||||||
|
pub const Fetch = @import("Package/Fetch.zig");
|
||||||
|
pub const build_zig_basename = "build.zig";
|
||||||
|
pub const Manifest = @import("Manifest.zig");
|
||||||
|
|
||||||
|
pub const Path = struct {
|
||||||
|
root_dir: Cache.Directory,
|
||||||
|
/// The path, relative to the root dir, that this `Path` represents.
|
||||||
|
/// Empty string means the root_dir is the path.
|
||||||
|
sub_path: []const u8 = "",
|
||||||
|
|
||||||
|
pub fn cwd() Path {
|
||||||
|
return .{ .root_dir = Cache.Directory.cwd() };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn join(p: Path, allocator: Allocator, sub_path: []const u8) Allocator.Error!Path {
|
||||||
|
const parts: []const []const u8 =
|
||||||
|
if (p.sub_path.len == 0) &.{sub_path} else &.{ p.sub_path, sub_path };
|
||||||
|
return .{
|
||||||
|
.root_dir = p.root_dir,
|
||||||
|
.sub_path = try fs.path.join(allocator, parts),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn joinString(p: Path, allocator: Allocator, sub_path: []const u8) Allocator.Error![]u8 {
|
||||||
|
const parts: []const []const u8 =
|
||||||
|
if (p.sub_path.len == 0) &.{sub_path} else &.{ p.sub_path, sub_path };
|
||||||
|
return p.root_dir.join(allocator, parts);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn joinStringZ(p: Path, allocator: Allocator, sub_path: []const u8) Allocator.Error![]u8 {
|
||||||
|
const parts: []const []const u8 =
|
||||||
|
if (p.sub_path.len == 0) &.{sub_path} else &.{ p.sub_path, sub_path };
|
||||||
|
return p.root_dir.joinZ(allocator, parts);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn openFile(
|
||||||
|
p: Path,
|
||||||
|
sub_path: []const u8,
|
||||||
|
flags: fs.File.OpenFlags,
|
||||||
|
) fs.File.OpenError!fs.File {
|
||||||
|
var buf: [fs.MAX_PATH_BYTES]u8 = undefined;
|
||||||
|
const joined_path = if (p.sub_path.len == 0) sub_path else p: {
|
||||||
|
break :p std.fmt.bufPrint(&buf, "{s}" ++ fs.path.sep_str ++ "{s}", .{
|
||||||
|
p.sub_path, sub_path,
|
||||||
|
}) catch return error.NameTooLong;
|
||||||
|
};
|
||||||
|
return p.root_dir.handle.openFile(joined_path, flags);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn makeOpenPath(p: Path, sub_path: []const u8, opts: fs.OpenDirOptions) !fs.Dir {
|
||||||
|
var buf: [fs.MAX_PATH_BYTES]u8 = undefined;
|
||||||
|
const joined_path = if (p.sub_path.len == 0) sub_path else p: {
|
||||||
|
break :p std.fmt.bufPrint(&buf, "{s}" ++ fs.path.sep_str ++ "{s}", .{
|
||||||
|
p.sub_path, sub_path,
|
||||||
|
}) catch return error.NameTooLong;
|
||||||
|
};
|
||||||
|
return p.root_dir.handle.makeOpenPath(joined_path, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format(
|
||||||
|
self: Path,
|
||||||
|
comptime fmt_string: []const u8,
|
||||||
|
options: std.fmt.FormatOptions,
|
||||||
|
writer: anytype,
|
||||||
|
) !void {
|
||||||
|
_ = options;
|
||||||
|
if (fmt_string.len > 0)
|
||||||
|
std.fmt.invalidFmtError(fmt_string, self);
|
||||||
|
if (self.root_dir.path) |p| {
|
||||||
|
try writer.writeAll(p);
|
||||||
|
try writer.writeAll(fs.path.sep_str);
|
||||||
|
}
|
||||||
|
if (self.sub_path.len > 0) {
|
||||||
|
try writer.writeAll(self.sub_path);
|
||||||
|
try writer.writeAll(fs.path.sep_str);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const Package = @This();
|
||||||
const builtin = @import("builtin");
|
const builtin = @import("builtin");
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const fs = std.fs;
|
const fs = std.fs;
|
||||||
const mem = std.mem;
|
const Allocator = std.mem.Allocator;
|
||||||
const Allocator = mem.Allocator;
|
|
||||||
const ascii = std.ascii;
|
|
||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
const log = std.log.scoped(.package);
|
|
||||||
const main = @import("main.zig");
|
|
||||||
const ThreadPool = std.Thread.Pool;
|
|
||||||
|
|
||||||
const Compilation = @import("Compilation.zig");
|
|
||||||
const Module = @import("Module.zig");
|
|
||||||
const Cache = std.Build.Cache;
|
const Cache = std.Build.Cache;
|
||||||
const build_options = @import("build_options");
|
|
||||||
const Fetch = @import("Package/Fetch.zig");
|
|
||||||
|
|
||||||
pub const build_zig_basename = "build.zig";
|
|
||||||
pub const Manifest = @import("Manifest.zig");
|
|
||||||
pub const Table = std.StringHashMapUnmanaged(*Package);
|
|
||||||
|
|
||||||
root_src_directory: Compilation.Directory,
|
|
||||||
/// Relative to `root_src_directory`. May contain path separators.
|
|
||||||
root_src_path: []const u8,
|
|
||||||
/// The dependency table of this module. Shared dependencies such as 'std', 'builtin', and 'root'
|
|
||||||
/// are not specified in every dependency table, but instead only in the table of `main_pkg`.
|
|
||||||
/// `Module.importFile` is responsible for detecting these names and using the correct package.
|
|
||||||
table: Table = .{},
|
|
||||||
/// Whether to free `root_src_directory` on `destroy`.
|
|
||||||
root_src_directory_owned: bool = false,
|
|
||||||
|
|
||||||
/// Allocate a Package. No references to the slices passed are kept.
|
|
||||||
pub fn create(
|
|
||||||
gpa: Allocator,
|
|
||||||
/// Null indicates the current working directory
|
|
||||||
root_src_dir_path: ?[]const u8,
|
|
||||||
/// Relative to root_src_dir_path
|
|
||||||
root_src_path: []const u8,
|
|
||||||
) !*Package {
|
|
||||||
const ptr = try gpa.create(Package);
|
|
||||||
errdefer gpa.destroy(ptr);
|
|
||||||
|
|
||||||
const owned_dir_path = if (root_src_dir_path) |p| try gpa.dupe(u8, p) else null;
|
|
||||||
errdefer if (owned_dir_path) |p| gpa.free(p);
|
|
||||||
|
|
||||||
const owned_src_path = try gpa.dupe(u8, root_src_path);
|
|
||||||
errdefer gpa.free(owned_src_path);
|
|
||||||
|
|
||||||
ptr.* = .{
|
|
||||||
.root_src_directory = .{
|
|
||||||
.path = owned_dir_path,
|
|
||||||
.handle = if (owned_dir_path) |p| try fs.cwd().openDir(p, .{}) else fs.cwd(),
|
|
||||||
},
|
|
||||||
.root_src_path = owned_src_path,
|
|
||||||
.root_src_directory_owned = true,
|
|
||||||
};
|
|
||||||
|
|
||||||
return ptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn createWithDir(
|
|
||||||
gpa: Allocator,
|
|
||||||
directory: Compilation.Directory,
|
|
||||||
/// Relative to `directory`. If null, means `directory` is the root src dir
|
|
||||||
/// and is owned externally.
|
|
||||||
root_src_dir_path: ?[]const u8,
|
|
||||||
/// Relative to root_src_dir_path
|
|
||||||
root_src_path: []const u8,
|
|
||||||
) !*Package {
|
|
||||||
const ptr = try gpa.create(Package);
|
|
||||||
errdefer gpa.destroy(ptr);
|
|
||||||
|
|
||||||
const owned_src_path = try gpa.dupe(u8, root_src_path);
|
|
||||||
errdefer gpa.free(owned_src_path);
|
|
||||||
|
|
||||||
if (root_src_dir_path) |p| {
|
|
||||||
const owned_dir_path = try directory.join(gpa, &[1][]const u8{p});
|
|
||||||
errdefer gpa.free(owned_dir_path);
|
|
||||||
|
|
||||||
ptr.* = .{
|
|
||||||
.root_src_directory = .{
|
|
||||||
.path = owned_dir_path,
|
|
||||||
.handle = try directory.handle.openDir(p, .{}),
|
|
||||||
},
|
|
||||||
.root_src_directory_owned = true,
|
|
||||||
.root_src_path = owned_src_path,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
ptr.* = .{
|
|
||||||
.root_src_directory = directory,
|
|
||||||
.root_src_directory_owned = false,
|
|
||||||
.root_src_path = owned_src_path,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return ptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Free all memory associated with this package. It does not destroy any packages
|
|
||||||
/// inside its table; the caller is responsible for calling destroy() on them.
|
|
||||||
pub fn destroy(pkg: *Package, gpa: Allocator) void {
|
|
||||||
gpa.free(pkg.root_src_path);
|
|
||||||
|
|
||||||
if (pkg.root_src_directory_owned) {
|
|
||||||
// If root_src_directory.path is null then the handle is the cwd()
|
|
||||||
// which shouldn't be closed.
|
|
||||||
if (pkg.root_src_directory.path) |p| {
|
|
||||||
gpa.free(p);
|
|
||||||
pkg.root_src_directory.handle.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pkg.deinitTable(gpa);
|
|
||||||
gpa.destroy(pkg);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Only frees memory associated with the table.
|
|
||||||
pub fn deinitTable(pkg: *Package, gpa: Allocator) void {
|
|
||||||
pkg.table.deinit(gpa);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add(pkg: *Package, gpa: Allocator, name: []const u8, package: *Package) !void {
|
|
||||||
try pkg.table.ensureUnusedCapacity(gpa, 1);
|
|
||||||
const name_dupe = try gpa.dupe(u8, name);
|
|
||||||
pkg.table.putAssumeCapacityNoClobber(name_dupe, package);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Compute a readable name for the package. The returned name should be freed from gpa. This
|
|
||||||
/// function is very slow, as it traverses the whole package hierarchy to find a path to this
|
|
||||||
/// package. It should only be used for error output.
|
|
||||||
pub fn getName(target: *const Package, gpa: Allocator, mod: Module) ![]const u8 {
|
|
||||||
// we'll do a breadth-first search from the root module to try and find a short name for this
|
|
||||||
// module, using a DoublyLinkedList of module/parent pairs. note that the "parent" there is
|
|
||||||
// just the first-found shortest path - a module may be children of arbitrarily many other
|
|
||||||
// modules. This path may vary between executions due to hashmap iteration order, but that
|
|
||||||
// doesn't matter too much.
|
|
||||||
var node_arena = std.heap.ArenaAllocator.init(gpa);
|
|
||||||
defer node_arena.deinit();
|
|
||||||
const Parented = struct {
|
|
||||||
parent: ?*const @This(),
|
|
||||||
mod: *const Package,
|
|
||||||
};
|
|
||||||
const Queue = std.DoublyLinkedList(Parented);
|
|
||||||
var to_check: Queue = .{};
|
|
||||||
|
|
||||||
{
|
|
||||||
const new = try node_arena.allocator().create(Queue.Node);
|
|
||||||
new.* = .{ .data = .{ .parent = null, .mod = mod.root_pkg } };
|
|
||||||
to_check.prepend(new);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mod.main_pkg != mod.root_pkg) {
|
|
||||||
const new = try node_arena.allocator().create(Queue.Node);
|
|
||||||
// TODO: once #12201 is resolved, we may want a way of indicating a different name for this
|
|
||||||
new.* = .{ .data = .{ .parent = null, .mod = mod.main_pkg } };
|
|
||||||
to_check.prepend(new);
|
|
||||||
}
|
|
||||||
|
|
||||||
// set of modules we've already checked to prevent loops
|
|
||||||
var checked = std.AutoHashMap(*const Package, void).init(gpa);
|
|
||||||
defer checked.deinit();
|
|
||||||
|
|
||||||
const linked = while (to_check.pop()) |node| {
|
|
||||||
const check = &node.data;
|
|
||||||
|
|
||||||
if (checked.contains(check.mod)) continue;
|
|
||||||
try checked.put(check.mod, {});
|
|
||||||
|
|
||||||
if (check.mod == target) break check;
|
|
||||||
|
|
||||||
var it = check.mod.table.iterator();
|
|
||||||
while (it.next()) |kv| {
|
|
||||||
var new = try node_arena.allocator().create(Queue.Node);
|
|
||||||
new.* = .{ .data = .{
|
|
||||||
.parent = check,
|
|
||||||
.mod = kv.value_ptr.*,
|
|
||||||
} };
|
|
||||||
to_check.prepend(new);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// this can happen for e.g. @cImport packages
|
|
||||||
return gpa.dupe(u8, "<unnamed>");
|
|
||||||
};
|
|
||||||
|
|
||||||
// we found a path to the module! unfortunately, we can only traverse *up* it, so we have to put
|
|
||||||
// all the names into a buffer so we can then print them in order.
|
|
||||||
var names = std.ArrayList([]const u8).init(gpa);
|
|
||||||
defer names.deinit();
|
|
||||||
|
|
||||||
var cur: *const Parented = linked;
|
|
||||||
while (cur.parent) |parent| : (cur = parent) {
|
|
||||||
// find cur's name in parent
|
|
||||||
var it = parent.mod.table.iterator();
|
|
||||||
const name = while (it.next()) |kv| {
|
|
||||||
if (kv.value_ptr.* == cur.mod) {
|
|
||||||
break kv.key_ptr.*;
|
|
||||||
}
|
|
||||||
} else unreachable;
|
|
||||||
try names.append(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
// finally, print the names into a buffer!
|
|
||||||
var buf = std.ArrayList(u8).init(gpa);
|
|
||||||
defer buf.deinit();
|
|
||||||
try buf.writer().writeAll("root");
|
|
||||||
var i: usize = names.items.len;
|
|
||||||
while (i > 0) {
|
|
||||||
i -= 1;
|
|
||||||
try buf.writer().print(".{s}", .{names.items[i]});
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.toOwnedSlice();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn createFilePkg(
|
|
||||||
gpa: Allocator,
|
|
||||||
cache_directory: Compilation.Directory,
|
|
||||||
basename: []const u8,
|
|
||||||
contents: []const u8,
|
|
||||||
) !*Package {
|
|
||||||
const rand_int = std.crypto.random.int(u64);
|
|
||||||
const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++ Manifest.hex64(rand_int);
|
|
||||||
{
|
|
||||||
var tmp_dir = try cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{});
|
|
||||||
defer tmp_dir.close();
|
|
||||||
try tmp_dir.writeFile(basename, contents);
|
|
||||||
}
|
|
||||||
|
|
||||||
var hh: Cache.HashHelper = .{};
|
|
||||||
hh.addBytes(build_options.version);
|
|
||||||
hh.addBytes(contents);
|
|
||||||
const hex_digest = hh.final();
|
|
||||||
|
|
||||||
const o_dir_sub_path = "o" ++ fs.path.sep_str ++ hex_digest;
|
|
||||||
try Fetch.renameTmpIntoCache(cache_directory.handle, tmp_dir_sub_path, o_dir_sub_path);
|
|
||||||
|
|
||||||
return createWithDir(gpa, cache_directory, o_dir_sub_path, basename);
|
|
||||||
}
|
|
||||||
|
|
||||||
const hex_multihash_len = 2 * Manifest.multihash_len;
|
|
||||||
const MultiHashHexDigest = [hex_multihash_len]u8;
|
|
||||||
|
|
||||||
const DependencyModule = union(enum) {
|
|
||||||
zig_pkg: *Package,
|
|
||||||
non_zig_pkg: *Package,
|
|
||||||
};
|
|
||||||
/// This is to avoid creating multiple modules for the same build.zig file.
|
|
||||||
/// If the value is `null`, the package is a known dependency, but has not yet
|
|
||||||
/// been fetched.
|
|
||||||
pub const AllModules = std.AutoHashMapUnmanaged(MultiHashHexDigest, ?DependencyModule);
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
32
src/Package/Module.zig
Normal file
32
src/Package/Module.zig
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
//! Corresponds to something that Zig source code can `@import`.
|
||||||
|
//! Not to be confused with src/Module.zig which should be renamed
|
||||||
|
//! to something else. https://github.com/ziglang/zig/issues/14307
|
||||||
|
|
||||||
|
/// Only files inside this directory can be imported.
|
||||||
|
root: Package.Path,
|
||||||
|
/// Relative to `root`. May contain path separators.
|
||||||
|
root_src_path: []const u8,
|
||||||
|
/// The dependency table of this module. Shared dependencies such as 'std',
|
||||||
|
/// 'builtin', and 'root' are not specified in every dependency table, but
|
||||||
|
/// instead only in the table of `main_pkg`. `Module.importFile` is
|
||||||
|
/// responsible for detecting these names and using the correct package.
|
||||||
|
deps: Deps = .{},
|
||||||
|
|
||||||
|
pub const Deps = std.StringHashMapUnmanaged(*Module);
|
||||||
|
|
||||||
|
pub const Tree = struct {
|
||||||
|
/// Each `Package` exposes a `Module` with build.zig as its root source file.
|
||||||
|
build_module_table: std.AutoArrayHashMapUnmanaged(MultiHashHexDigest, *Module),
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn create(allocator: Allocator, m: Module) Allocator.Error!*Module {
|
||||||
|
const new = try allocator.create(Module);
|
||||||
|
new.* = m;
|
||||||
|
return new;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Module = @This();
|
||||||
|
const Package = @import("../Package.zig");
|
||||||
|
const std = @import("std");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
const MultiHashHexDigest = Package.Manifest.MultiHashHexDigest;
|
||||||
@ -139,18 +139,22 @@ fn dumpStatusReport() !void {
|
|||||||
|
|
||||||
var crash_heap: [16 * 4096]u8 = undefined;
|
var crash_heap: [16 * 4096]u8 = undefined;
|
||||||
|
|
||||||
fn writeFilePath(file: *Module.File, stream: anytype) !void {
|
fn writeFilePath(file: *Module.File, writer: anytype) !void {
|
||||||
if (file.pkg.root_src_directory.path) |path| {
|
if (file.mod.root.root_dir.path) |path| {
|
||||||
try stream.writeAll(path);
|
try writer.writeAll(path);
|
||||||
try stream.writeAll(std.fs.path.sep_str);
|
try writer.writeAll(std.fs.path.sep_str);
|
||||||
}
|
}
|
||||||
try stream.writeAll(file.sub_file_path);
|
if (file.mod.root.sub_path.len > 0) {
|
||||||
|
try writer.writeAll(file.mod.root.sub_path);
|
||||||
|
try writer.writeAll(std.fs.path.sep_str);
|
||||||
|
}
|
||||||
|
try writer.writeAll(file.sub_file_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeFullyQualifiedDeclWithFile(mod: *Module, decl: *Decl, stream: anytype) !void {
|
fn writeFullyQualifiedDeclWithFile(mod: *Module, decl: *Decl, writer: anytype) !void {
|
||||||
try writeFilePath(decl.getFileScope(mod), stream);
|
try writeFilePath(decl.getFileScope(mod), writer);
|
||||||
try stream.writeAll(": ");
|
try writer.writeAll(": ");
|
||||||
try decl.renderFullyQualifiedDebugName(mod, stream);
|
try decl.renderFullyQualifiedDebugName(mod, writer);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compilerPanic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace, maybe_ret_addr: ?usize) noreturn {
|
pub fn compilerPanic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace, maybe_ret_addr: ?usize) noreturn {
|
||||||
|
|||||||
308
src/main.zig
308
src/main.zig
@ -416,7 +416,7 @@ const usage_build_generic =
|
|||||||
\\ dep: [[import=]name]
|
\\ dep: [[import=]name]
|
||||||
\\ --deps [dep],[dep],... Set dependency names for the root package
|
\\ --deps [dep],[dep],... Set dependency names for the root package
|
||||||
\\ dep: [[import=]name]
|
\\ dep: [[import=]name]
|
||||||
\\ --main-pkg-path Set the directory of the root package
|
\\ --main-mod-path Set the directory of the root module
|
||||||
\\ -fPIC Force-enable Position Independent Code
|
\\ -fPIC Force-enable Position Independent Code
|
||||||
\\ -fno-PIC Force-disable Position Independent Code
|
\\ -fno-PIC Force-disable Position Independent Code
|
||||||
\\ -fPIE Force-enable Position Independent Executable
|
\\ -fPIE Force-enable Position Independent Executable
|
||||||
@ -765,17 +765,11 @@ const Framework = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const CliModule = struct {
|
const CliModule = struct {
|
||||||
mod: *Package,
|
mod: *Package.Module,
|
||||||
/// still in CLI arg format
|
/// still in CLI arg format
|
||||||
deps_str: []const u8,
|
deps_str: []const u8,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn cleanupModules(modules: *std.StringArrayHashMap(CliModule)) void {
|
|
||||||
var it = modules.iterator();
|
|
||||||
while (it.next()) |kv| kv.value_ptr.mod.destroy(modules.allocator);
|
|
||||||
modules.deinit();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn buildOutputType(
|
fn buildOutputType(
|
||||||
gpa: Allocator,
|
gpa: Allocator,
|
||||||
arena: Allocator,
|
arena: Allocator,
|
||||||
@ -950,8 +944,7 @@ fn buildOutputType(
|
|||||||
// Contains every module specified via --mod. The dependencies are added
|
// Contains every module specified via --mod. The dependencies are added
|
||||||
// after argument parsing is completed. We use a StringArrayHashMap to make
|
// after argument parsing is completed. We use a StringArrayHashMap to make
|
||||||
// error output consistent.
|
// error output consistent.
|
||||||
var modules = std.StringArrayHashMap(CliModule).init(gpa);
|
var modules = std.StringArrayHashMap(CliModule).init(arena);
|
||||||
defer cleanupModules(&modules);
|
|
||||||
|
|
||||||
// The dependency string for the root package
|
// The dependency string for the root package
|
||||||
var root_deps_str: ?[]const u8 = null;
|
var root_deps_str: ?[]const u8 = null;
|
||||||
@ -1023,32 +1016,37 @@ fn buildOutputType(
|
|||||||
|
|
||||||
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
|
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
|
||||||
if (mem.eql(u8, mod_name, name)) {
|
if (mem.eql(u8, mod_name, name)) {
|
||||||
fatal("unable to add module '{s}' -> '{s}': conflicts with builtin module", .{ mod_name, root_src });
|
fatal("unable to add module '{s}' -> '{s}': conflicts with builtin module", .{
|
||||||
|
mod_name, root_src,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var mod_it = modules.iterator();
|
var mod_it = modules.iterator();
|
||||||
while (mod_it.next()) |kv| {
|
while (mod_it.next()) |kv| {
|
||||||
if (std.mem.eql(u8, mod_name, kv.key_ptr.*)) {
|
if (std.mem.eql(u8, mod_name, kv.key_ptr.*)) {
|
||||||
fatal("unable to add module '{s}' -> '{s}': already exists as '{s}'", .{ mod_name, root_src, kv.value_ptr.mod.root_src_path });
|
fatal("unable to add module '{s}' -> '{s}': already exists as '{s}'", .{
|
||||||
|
mod_name, root_src, kv.value_ptr.mod.root_src_path,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try modules.ensureUnusedCapacity(1);
|
try modules.put(mod_name, .{
|
||||||
modules.put(mod_name, .{
|
.mod = try Package.Module.create(arena, .{
|
||||||
.mod = try Package.create(
|
.root = .{
|
||||||
gpa,
|
.root_dir = Cache.Directory.cwd(),
|
||||||
fs.path.dirname(root_src),
|
.sub_path = fs.path.dirname(root_src) orelse "",
|
||||||
fs.path.basename(root_src),
|
},
|
||||||
),
|
.root_src_path = fs.path.basename(root_src),
|
||||||
|
}),
|
||||||
.deps_str = deps_str,
|
.deps_str = deps_str,
|
||||||
}) catch unreachable;
|
});
|
||||||
} else if (mem.eql(u8, arg, "--deps")) {
|
} else if (mem.eql(u8, arg, "--deps")) {
|
||||||
if (root_deps_str != null) {
|
if (root_deps_str != null) {
|
||||||
fatal("only one --deps argument is allowed", .{});
|
fatal("only one --deps argument is allowed", .{});
|
||||||
}
|
}
|
||||||
root_deps_str = args_iter.nextOrFatal();
|
root_deps_str = args_iter.nextOrFatal();
|
||||||
} else if (mem.eql(u8, arg, "--main-pkg-path")) {
|
} else if (mem.eql(u8, arg, "--main-mod-path")) {
|
||||||
main_pkg_path = args_iter.nextOrFatal();
|
main_pkg_path = args_iter.nextOrFatal();
|
||||||
} else if (mem.eql(u8, arg, "-cflags")) {
|
} else if (mem.eql(u8, arg, "-cflags")) {
|
||||||
extra_cflags.shrinkRetainingCapacity(0);
|
extra_cflags.shrinkRetainingCapacity(0);
|
||||||
@ -2461,19 +2459,26 @@ fn buildOutputType(
|
|||||||
var deps_it = ModuleDepIterator.init(deps_str);
|
var deps_it = ModuleDepIterator.init(deps_str);
|
||||||
while (deps_it.next()) |dep| {
|
while (deps_it.next()) |dep| {
|
||||||
if (dep.expose.len == 0) {
|
if (dep.expose.len == 0) {
|
||||||
fatal("module '{s}' depends on '{s}' with a blank name", .{ kv.key_ptr.*, dep.name });
|
fatal("module '{s}' depends on '{s}' with a blank name", .{
|
||||||
|
kv.key_ptr.*, dep.name,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
|
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
|
||||||
if (mem.eql(u8, dep.expose, name)) {
|
if (mem.eql(u8, dep.expose, name)) {
|
||||||
fatal("unable to add module '{s}' under name '{s}': conflicts with builtin module", .{ dep.name, dep.expose });
|
fatal("unable to add module '{s}' under name '{s}': conflicts with builtin module", .{
|
||||||
|
dep.name, dep.expose,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const dep_mod = modules.get(dep.name) orelse
|
const dep_mod = modules.get(dep.name) orelse {
|
||||||
fatal("module '{s}' depends on module '{s}' which does not exist", .{ kv.key_ptr.*, dep.name });
|
fatal("module '{s}' depends on module '{s}' which does not exist", .{
|
||||||
|
kv.key_ptr.*, dep.name,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
try kv.value_ptr.mod.add(gpa, dep.expose, dep_mod.mod);
|
try kv.value_ptr.mod.deps.put(arena, dep.expose, dep_mod.mod);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -3229,31 +3234,33 @@ fn buildOutputType(
|
|||||||
};
|
};
|
||||||
defer emit_implib_resolved.deinit();
|
defer emit_implib_resolved.deinit();
|
||||||
|
|
||||||
const main_pkg: ?*Package = if (root_src_file) |unresolved_src_path| blk: {
|
const main_mod: ?*Package.Module = if (root_src_file) |unresolved_src_path| blk: {
|
||||||
const src_path = try introspect.resolvePath(arena, unresolved_src_path);
|
const src_path = try introspect.resolvePath(arena, unresolved_src_path);
|
||||||
if (main_pkg_path) |unresolved_main_pkg_path| {
|
if (main_pkg_path) |unresolved_main_pkg_path| {
|
||||||
const p = try introspect.resolvePath(arena, unresolved_main_pkg_path);
|
const p = try introspect.resolvePath(arena, unresolved_main_pkg_path);
|
||||||
if (p.len == 0) {
|
break :blk try Package.Module.create(arena, .{
|
||||||
break :blk try Package.create(gpa, null, src_path);
|
.root = .{
|
||||||
} else {
|
.root_dir = Cache.Directory.cwd(),
|
||||||
const rel_src_path = try fs.path.relative(arena, p, src_path);
|
.sub_path = p,
|
||||||
break :blk try Package.create(gpa, p, rel_src_path);
|
},
|
||||||
}
|
.root_src_path = if (p.len == 0)
|
||||||
|
src_path
|
||||||
|
else
|
||||||
|
try fs.path.relative(arena, p, src_path),
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
const root_src_dir_path = fs.path.dirname(src_path);
|
break :blk try Package.Module.create(arena, .{
|
||||||
break :blk Package.create(gpa, root_src_dir_path, fs.path.basename(src_path)) catch |err| {
|
.root = .{
|
||||||
if (root_src_dir_path) |p| {
|
.root_dir = Cache.Directory.cwd(),
|
||||||
fatal("unable to open '{s}': {s}", .{ p, @errorName(err) });
|
.sub_path = fs.path.dirname(src_path) orelse "",
|
||||||
} else {
|
},
|
||||||
return err;
|
.root_src_path = fs.path.basename(src_path),
|
||||||
}
|
});
|
||||||
};
|
|
||||||
}
|
}
|
||||||
} else null;
|
} else null;
|
||||||
defer if (main_pkg) |p| p.destroy(gpa);
|
|
||||||
|
|
||||||
// Transfer packages added with --deps to the root package
|
// Transfer packages added with --deps to the root package
|
||||||
if (main_pkg) |mod| {
|
if (main_mod) |mod| {
|
||||||
var it = ModuleDepIterator.init(root_deps_str orelse "");
|
var it = ModuleDepIterator.init(root_deps_str orelse "");
|
||||||
while (it.next()) |dep| {
|
while (it.next()) |dep| {
|
||||||
if (dep.expose.len == 0) {
|
if (dep.expose.len == 0) {
|
||||||
@ -3269,7 +3276,7 @@ fn buildOutputType(
|
|||||||
const dep_mod = modules.get(dep.name) orelse
|
const dep_mod = modules.get(dep.name) orelse
|
||||||
fatal("root module depends on module '{s}' which does not exist", .{dep.name});
|
fatal("root module depends on module '{s}' which does not exist", .{dep.name});
|
||||||
|
|
||||||
try mod.add(gpa, dep.expose, dep_mod.mod);
|
try mod.deps.put(arena, dep.expose, dep_mod.mod);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3310,17 +3317,18 @@ fn buildOutputType(
|
|||||||
if (arg_mode == .run) {
|
if (arg_mode == .run) {
|
||||||
break :l global_cache_directory;
|
break :l global_cache_directory;
|
||||||
}
|
}
|
||||||
if (main_pkg) |pkg| {
|
if (main_mod != null) {
|
||||||
// search upwards from cwd until we find directory with build.zig
|
// search upwards from cwd until we find directory with build.zig
|
||||||
const cwd_path = try process.getCwdAlloc(arena);
|
const cwd_path = try process.getCwdAlloc(arena);
|
||||||
const build_zig = "build.zig";
|
|
||||||
const zig_cache = "zig-cache";
|
const zig_cache = "zig-cache";
|
||||||
var dirname: []const u8 = cwd_path;
|
var dirname: []const u8 = cwd_path;
|
||||||
while (true) {
|
while (true) {
|
||||||
const joined_path = try fs.path.join(arena, &[_][]const u8{ dirname, build_zig });
|
const joined_path = try fs.path.join(arena, &.{
|
||||||
|
dirname, Package.build_zig_basename,
|
||||||
|
});
|
||||||
if (fs.cwd().access(joined_path, .{})) |_| {
|
if (fs.cwd().access(joined_path, .{})) |_| {
|
||||||
const cache_dir_path = try fs.path.join(arena, &[_][]const u8{ dirname, zig_cache });
|
const cache_dir_path = try fs.path.join(arena, &.{ dirname, zig_cache });
|
||||||
const dir = try pkg.root_src_directory.handle.makeOpenPath(cache_dir_path, .{});
|
const dir = try fs.cwd().makeOpenPath(cache_dir_path, .{});
|
||||||
cleanup_local_cache_dir = dir;
|
cleanup_local_cache_dir = dir;
|
||||||
break :l .{ .handle = dir, .path = cache_dir_path };
|
break :l .{ .handle = dir, .path = cache_dir_path };
|
||||||
} else |err| switch (err) {
|
} else |err| switch (err) {
|
||||||
@ -3378,6 +3386,8 @@ fn buildOutputType(
|
|||||||
|
|
||||||
gimmeMoreOfThoseSweetSweetFileDescriptors();
|
gimmeMoreOfThoseSweetSweetFileDescriptors();
|
||||||
|
|
||||||
|
if (true) @panic("TODO restore Compilation logic");
|
||||||
|
|
||||||
const comp = Compilation.create(gpa, .{
|
const comp = Compilation.create(gpa, .{
|
||||||
.zig_lib_directory = zig_lib_directory,
|
.zig_lib_directory = zig_lib_directory,
|
||||||
.local_cache_directory = local_cache_directory,
|
.local_cache_directory = local_cache_directory,
|
||||||
@ -3389,7 +3399,7 @@ fn buildOutputType(
|
|||||||
.dynamic_linker = target_info.dynamic_linker.get(),
|
.dynamic_linker = target_info.dynamic_linker.get(),
|
||||||
.sysroot = sysroot,
|
.sysroot = sysroot,
|
||||||
.output_mode = output_mode,
|
.output_mode = output_mode,
|
||||||
.main_pkg = main_pkg,
|
.main_mod = main_mod,
|
||||||
.emit_bin = emit_bin_loc,
|
.emit_bin = emit_bin_loc,
|
||||||
.emit_h = emit_h_resolved.data,
|
.emit_h = emit_h_resolved.data,
|
||||||
.emit_asm = emit_asm_resolved.data,
|
.emit_asm = emit_asm_resolved.data,
|
||||||
@ -4799,32 +4809,22 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||||||
try thread_pool.init(.{ .allocator = gpa });
|
try thread_pool.init(.{ .allocator = gpa });
|
||||||
defer thread_pool.deinit();
|
defer thread_pool.deinit();
|
||||||
|
|
||||||
var cleanup_build_runner_dir: ?fs.Dir = null;
|
var main_mod: Package.Module = if (override_build_runner) |build_runner_path|
|
||||||
defer if (cleanup_build_runner_dir) |*dir| dir.close();
|
|
||||||
|
|
||||||
var main_pkg: Package = if (override_build_runner) |build_runner_path|
|
|
||||||
.{
|
.{
|
||||||
.root_src_directory = blk: {
|
.root = .{
|
||||||
if (std.fs.path.dirname(build_runner_path)) |dirname| {
|
.root_dir = Cache.Directory.cwd(),
|
||||||
const dir = fs.cwd().openDir(dirname, .{}) catch |err| {
|
.sub_path = fs.path.dirname(build_runner_path) orelse "",
|
||||||
fatal("unable to open directory to build runner from argument 'build-runner', '{s}': {s}", .{ dirname, @errorName(err) });
|
|
||||||
};
|
|
||||||
cleanup_build_runner_dir = dir;
|
|
||||||
break :blk .{ .path = dirname, .handle = dir };
|
|
||||||
}
|
|
||||||
|
|
||||||
break :blk .{ .path = null, .handle = fs.cwd() };
|
|
||||||
},
|
},
|
||||||
.root_src_path = std.fs.path.basename(build_runner_path),
|
.root_src_path = fs.path.basename(build_runner_path),
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
.{
|
.{
|
||||||
.root_src_directory = zig_lib_directory,
|
.root = .{ .root_dir = zig_lib_directory },
|
||||||
.root_src_path = "build_runner.zig",
|
.root_src_path = "build_runner.zig",
|
||||||
};
|
};
|
||||||
|
|
||||||
var build_pkg: Package = .{
|
var build_mod: Package.Module = .{
|
||||||
.root_src_directory = build_directory,
|
.root = .{ .root_dir = build_directory },
|
||||||
.root_src_path = build_zig_basename,
|
.root_src_path = build_zig_basename,
|
||||||
};
|
};
|
||||||
if (build_options.only_core_functionality) {
|
if (build_options.only_core_functionality) {
|
||||||
@ -4833,11 +4833,13 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||||||
\\pub const root_deps: []const struct { []const u8, []const u8 } = &.{};
|
\\pub const root_deps: []const struct { []const u8, []const u8 } = &.{};
|
||||||
\\
|
\\
|
||||||
);
|
);
|
||||||
try main_pkg.add(gpa, "@dependencies", deps_pkg);
|
try main_mod.deps.put(arena, "@dependencies", deps_pkg);
|
||||||
} else {
|
} else {
|
||||||
var http_client: std.http.Client = .{ .allocator = gpa };
|
var http_client: std.http.Client = .{ .allocator = gpa };
|
||||||
defer http_client.deinit();
|
defer http_client.deinit();
|
||||||
|
|
||||||
|
if (true) @panic("TODO restore package fetching logic");
|
||||||
|
|
||||||
// Here we provide an import to the build runner that allows using reflection to find
|
// Here we provide an import to the build runner that allows using reflection to find
|
||||||
// all of the dependencies. Without this, there would be no way to use `@import` to
|
// all of the dependencies. Without this, there would be no way to use `@import` to
|
||||||
// access dependencies by name, since `@import` requires string literals.
|
// access dependencies by name, since `@import` requires string literals.
|
||||||
@ -4857,8 +4859,8 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||||||
|
|
||||||
// Here we borrow main package's table and will replace it with a fresh
|
// Here we borrow main package's table and will replace it with a fresh
|
||||||
// one after this process completes.
|
// one after this process completes.
|
||||||
const fetch_result = build_pkg.fetchAndAddDependencies(
|
const fetch_result = build_mod.fetchAndAddDependencies(
|
||||||
&main_pkg,
|
&main_mod,
|
||||||
arena,
|
arena,
|
||||||
&thread_pool,
|
&thread_pool,
|
||||||
&http_client,
|
&http_client,
|
||||||
@ -4886,10 +4888,10 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||||||
dependencies_source.items,
|
dependencies_source.items,
|
||||||
);
|
);
|
||||||
|
|
||||||
mem.swap(Package.Table, &main_pkg.table, &deps_pkg.table);
|
mem.swap(Package.Table, &main_mod.table, &deps_pkg.table);
|
||||||
try main_pkg.add(gpa, "@dependencies", deps_pkg);
|
try main_mod.add(gpa, "@dependencies", deps_pkg);
|
||||||
}
|
}
|
||||||
try main_pkg.add(gpa, "@build", &build_pkg);
|
try main_mod.add(gpa, "@build", &build_mod);
|
||||||
|
|
||||||
const comp = Compilation.create(gpa, .{
|
const comp = Compilation.create(gpa, .{
|
||||||
.zig_lib_directory = zig_lib_directory,
|
.zig_lib_directory = zig_lib_directory,
|
||||||
@ -4901,7 +4903,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
|
|||||||
.is_native_abi = cross_target.isNativeAbi(),
|
.is_native_abi = cross_target.isNativeAbi(),
|
||||||
.dynamic_linker = target_info.dynamic_linker.get(),
|
.dynamic_linker = target_info.dynamic_linker.get(),
|
||||||
.output_mode = .Exe,
|
.output_mode = .Exe,
|
||||||
.main_pkg = &main_pkg,
|
.main_mod = &main_mod,
|
||||||
.emit_bin = emit_bin,
|
.emit_bin = emit_bin,
|
||||||
.emit_h = null,
|
.emit_h = null,
|
||||||
.optimize_mode = .Debug,
|
.optimize_mode = .Debug,
|
||||||
@ -5115,12 +5117,14 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
|
|||||||
.tree = tree,
|
.tree = tree,
|
||||||
.tree_loaded = true,
|
.tree_loaded = true,
|
||||||
.zir = undefined,
|
.zir = undefined,
|
||||||
.pkg = undefined,
|
.mod = undefined,
|
||||||
.root_decl = .none,
|
.root_decl = .none,
|
||||||
};
|
};
|
||||||
|
|
||||||
file.pkg = try Package.create(gpa, null, file.sub_file_path);
|
file.mod = try Package.Module.create(arena, .{
|
||||||
defer file.pkg.destroy(gpa);
|
.root = Package.Path.cwd(),
|
||||||
|
.root_src_path = file.sub_file_path,
|
||||||
|
});
|
||||||
|
|
||||||
file.zir = try AstGen.generate(gpa, file.tree);
|
file.zir = try AstGen.generate(gpa, file.tree);
|
||||||
file.zir_loaded = true;
|
file.zir_loaded = true;
|
||||||
@ -5321,12 +5325,14 @@ fn fmtPathFile(
|
|||||||
.tree = tree,
|
.tree = tree,
|
||||||
.tree_loaded = true,
|
.tree_loaded = true,
|
||||||
.zir = undefined,
|
.zir = undefined,
|
||||||
.pkg = undefined,
|
.mod = undefined,
|
||||||
.root_decl = .none,
|
.root_decl = .none,
|
||||||
};
|
};
|
||||||
|
|
||||||
file.pkg = try Package.create(gpa, null, file.sub_file_path);
|
file.mod = try Package.Module.create(fmt.arena, .{
|
||||||
defer file.pkg.destroy(gpa);
|
.root = Package.Path.cwd(),
|
||||||
|
.root_src_path = file.sub_file_path,
|
||||||
|
});
|
||||||
|
|
||||||
if (stat.size > max_src_size)
|
if (stat.size > max_src_size)
|
||||||
return error.FileTooBig;
|
return error.FileTooBig;
|
||||||
@ -5387,7 +5393,7 @@ pub fn putAstErrorsIntoBundle(
|
|||||||
tree: Ast,
|
tree: Ast,
|
||||||
path: []const u8,
|
path: []const u8,
|
||||||
wip_errors: *std.zig.ErrorBundle.Wip,
|
wip_errors: *std.zig.ErrorBundle.Wip,
|
||||||
) !void {
|
) Allocator.Error!void {
|
||||||
var file: Module.File = .{
|
var file: Module.File = .{
|
||||||
.status = .never_loaded,
|
.status = .never_loaded,
|
||||||
.source_loaded = true,
|
.source_loaded = true,
|
||||||
@ -5402,12 +5408,15 @@ pub fn putAstErrorsIntoBundle(
|
|||||||
.tree = tree,
|
.tree = tree,
|
||||||
.tree_loaded = true,
|
.tree_loaded = true,
|
||||||
.zir = undefined,
|
.zir = undefined,
|
||||||
.pkg = undefined,
|
.mod = undefined,
|
||||||
.root_decl = .none,
|
.root_decl = .none,
|
||||||
};
|
};
|
||||||
|
|
||||||
file.pkg = try Package.create(gpa, null, path);
|
file.mod = try Package.Module.create(gpa, .{
|
||||||
defer file.pkg.destroy(gpa);
|
.root = Package.Path.cwd(),
|
||||||
|
.root_src_path = file.sub_file_path,
|
||||||
|
});
|
||||||
|
defer gpa.destroy(file.mod);
|
||||||
|
|
||||||
file.zir = try AstGen.generate(gpa, file.tree);
|
file.zir = try AstGen.generate(gpa, file.tree);
|
||||||
file.zir_loaded = true;
|
file.zir_loaded = true;
|
||||||
@ -5933,7 +5942,7 @@ pub fn cmdAstCheck(
|
|||||||
.stat = undefined,
|
.stat = undefined,
|
||||||
.tree = undefined,
|
.tree = undefined,
|
||||||
.zir = undefined,
|
.zir = undefined,
|
||||||
.pkg = undefined,
|
.mod = undefined,
|
||||||
.root_decl = .none,
|
.root_decl = .none,
|
||||||
};
|
};
|
||||||
if (zig_source_file) |file_name| {
|
if (zig_source_file) |file_name| {
|
||||||
@ -5971,8 +5980,10 @@ pub fn cmdAstCheck(
|
|||||||
file.stat.size = source.len;
|
file.stat.size = source.len;
|
||||||
}
|
}
|
||||||
|
|
||||||
file.pkg = try Package.create(gpa, null, file.sub_file_path);
|
file.mod = try Package.Module.create(arena, .{
|
||||||
defer file.pkg.destroy(gpa);
|
.root = Package.Path.cwd(),
|
||||||
|
.root_src_path = file.sub_file_path,
|
||||||
|
});
|
||||||
|
|
||||||
file.tree = try Ast.parse(gpa, file.source, .zig);
|
file.tree = try Ast.parse(gpa, file.source, .zig);
|
||||||
file.tree_loaded = true;
|
file.tree_loaded = true;
|
||||||
@ -6067,7 +6078,7 @@ pub fn cmdDumpZir(
|
|||||||
.stat = undefined,
|
.stat = undefined,
|
||||||
.tree = undefined,
|
.tree = undefined,
|
||||||
.zir = try Module.loadZirCache(gpa, f),
|
.zir = try Module.loadZirCache(gpa, f),
|
||||||
.pkg = undefined,
|
.mod = undefined,
|
||||||
.root_decl = .none,
|
.root_decl = .none,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -6136,12 +6147,14 @@ pub fn cmdChangelist(
|
|||||||
},
|
},
|
||||||
.tree = undefined,
|
.tree = undefined,
|
||||||
.zir = undefined,
|
.zir = undefined,
|
||||||
.pkg = undefined,
|
.mod = undefined,
|
||||||
.root_decl = .none,
|
.root_decl = .none,
|
||||||
};
|
};
|
||||||
|
|
||||||
file.pkg = try Package.create(gpa, null, file.sub_file_path);
|
file.mod = try Package.Module.create(arena, .{
|
||||||
defer file.pkg.destroy(gpa);
|
.root = Package.Path.cwd(),
|
||||||
|
.root_src_path = file.sub_file_path,
|
||||||
|
});
|
||||||
|
|
||||||
const source = try arena.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0);
|
const source = try arena.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0);
|
||||||
const amt = try f.readAll(source);
|
const amt = try f.readAll(source);
|
||||||
@ -6623,8 +6636,11 @@ fn cmdFetch(
|
|||||||
args: []const []const u8,
|
args: []const []const u8,
|
||||||
) !void {
|
) !void {
|
||||||
const color: Color = .auto;
|
const color: Color = .auto;
|
||||||
var opt_url: ?[]const u8 = null;
|
const work_around_btrfs_bug = builtin.os.tag == .linux and
|
||||||
|
std.process.hasEnvVarConstant("ZIG_BTRFS_WORKAROUND");
|
||||||
|
var opt_path_or_url: ?[]const u8 = null;
|
||||||
var override_global_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_GLOBAL_CACHE_DIR");
|
var override_global_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_GLOBAL_CACHE_DIR");
|
||||||
|
var recursive = false;
|
||||||
|
|
||||||
{
|
{
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
@ -6640,18 +6656,21 @@ fn cmdFetch(
|
|||||||
i += 1;
|
i += 1;
|
||||||
override_global_cache_dir = args[i];
|
override_global_cache_dir = args[i];
|
||||||
continue;
|
continue;
|
||||||
|
} else if (mem.eql(u8, arg, "--recursive")) {
|
||||||
|
recursive = true;
|
||||||
|
continue;
|
||||||
} else {
|
} else {
|
||||||
fatal("unrecognized parameter: '{s}'", .{arg});
|
fatal("unrecognized parameter: '{s}'", .{arg});
|
||||||
}
|
}
|
||||||
} else if (opt_url != null) {
|
} else if (opt_path_or_url != null) {
|
||||||
fatal("unexpected extra parameter: '{s}'", .{arg});
|
fatal("unexpected extra parameter: '{s}'", .{arg});
|
||||||
} else {
|
} else {
|
||||||
opt_url = arg;
|
opt_path_or_url = arg;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = opt_url orelse fatal("missing url or path parameter", .{});
|
const path_or_url = opt_path_or_url orelse fatal("missing url or path parameter", .{});
|
||||||
|
|
||||||
var thread_pool: ThreadPool = undefined;
|
var thread_pool: ThreadPool = undefined;
|
||||||
try thread_pool.init(.{ .allocator = gpa });
|
try thread_pool.init(.{ .allocator = gpa });
|
||||||
@ -6664,19 +6683,6 @@ fn cmdFetch(
|
|||||||
const root_prog_node = progress.start("Fetch", 0);
|
const root_prog_node = progress.start("Fetch", 0);
|
||||||
defer root_prog_node.end();
|
defer root_prog_node.end();
|
||||||
|
|
||||||
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
|
||||||
try wip_errors.init(gpa);
|
|
||||||
defer wip_errors.deinit();
|
|
||||||
|
|
||||||
var report: Package.Report = .{
|
|
||||||
.ast = null,
|
|
||||||
.directory = .{
|
|
||||||
.handle = fs.cwd(),
|
|
||||||
.path = null,
|
|
||||||
},
|
|
||||||
.error_bundle = &wip_errors,
|
|
||||||
};
|
|
||||||
|
|
||||||
var global_cache_directory: Compilation.Directory = l: {
|
var global_cache_directory: Compilation.Directory = l: {
|
||||||
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
|
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
|
||||||
break :l .{
|
break :l .{
|
||||||
@ -6686,56 +6692,48 @@ fn cmdFetch(
|
|||||||
};
|
};
|
||||||
defer global_cache_directory.handle.close();
|
defer global_cache_directory.handle.close();
|
||||||
|
|
||||||
var readable_resource: Package.ReadableResource = rr: {
|
var job_queue: Package.Fetch.JobQueue = .{
|
||||||
if (fs.cwd().openIterableDir(url, .{})) |dir| {
|
.http_client = &http_client,
|
||||||
break :rr .{
|
.thread_pool = &thread_pool,
|
||||||
.path = try gpa.dupe(u8, url),
|
.global_cache = global_cache_directory,
|
||||||
.resource = .{ .dir = dir },
|
.recursive = recursive,
|
||||||
};
|
.work_around_btrfs_bug = work_around_btrfs_bug,
|
||||||
} else |dir_err| {
|
|
||||||
const file_err = if (dir_err == error.NotDir) e: {
|
|
||||||
if (fs.cwd().openFile(url, .{})) |f| {
|
|
||||||
break :rr .{
|
|
||||||
.path = try gpa.dupe(u8, url),
|
|
||||||
.resource = .{ .file = f },
|
|
||||||
};
|
|
||||||
} else |err| break :e err;
|
|
||||||
} else dir_err;
|
|
||||||
|
|
||||||
const uri = std.Uri.parse(url) catch |uri_err| {
|
|
||||||
fatal("'{s}' could not be recognized as a file path ({s}) or an URL ({s})", .{
|
|
||||||
url, @errorName(file_err), @errorName(uri_err),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
const fetch_location = try Package.FetchLocation.initUri(uri, 0, report);
|
|
||||||
const cwd: Cache.Directory = .{
|
|
||||||
.handle = fs.cwd(),
|
|
||||||
.path = null,
|
|
||||||
};
|
|
||||||
break :rr try fetch_location.fetch(gpa, cwd, &http_client, 0, report);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
defer readable_resource.deinit(gpa);
|
defer job_queue.deinit();
|
||||||
|
|
||||||
var package_location = readable_resource.unpack(
|
var fetch: Package.Fetch = .{
|
||||||
gpa,
|
.arena = std.heap.ArenaAllocator.init(gpa),
|
||||||
&thread_pool,
|
.location = .{ .path_or_url = path_or_url },
|
||||||
global_cache_directory,
|
.location_tok = 0,
|
||||||
0,
|
.hash_tok = 0,
|
||||||
report,
|
.parent_package_root = undefined,
|
||||||
root_prog_node,
|
.parent_manifest_ast = null,
|
||||||
) catch |err| {
|
.prog_node = root_prog_node,
|
||||||
if (wip_errors.root_list.items.len > 0) {
|
.job_queue = &job_queue,
|
||||||
var errors = try wip_errors.toOwnedBundle("");
|
.omit_missing_hash_error = true,
|
||||||
defer errors.deinit(gpa);
|
|
||||||
errors.renderToStdErr(renderOptions(color));
|
.package_root = undefined,
|
||||||
process.exit(1);
|
.error_bundle = undefined,
|
||||||
}
|
.manifest = null,
|
||||||
fatal("unable to unpack '{s}': {s}", .{ url, @errorName(err) });
|
.manifest_ast = undefined,
|
||||||
|
.actual_hash = undefined,
|
||||||
|
.has_build_zig = false,
|
||||||
|
.oom_flag = false,
|
||||||
};
|
};
|
||||||
defer package_location.deinit(gpa);
|
defer fetch.deinit();
|
||||||
|
|
||||||
const hex_digest = Package.Manifest.hexDigest(package_location.hash);
|
fetch.run() catch |err| switch (err) {
|
||||||
|
error.OutOfMemory => fatal("out of memory", .{}),
|
||||||
|
error.FetchFailed => {}, // error bundle checked below
|
||||||
|
};
|
||||||
|
|
||||||
|
if (fetch.error_bundle.root_list.items.len > 0) {
|
||||||
|
var errors = try fetch.error_bundle.toOwnedBundle("");
|
||||||
|
errors.renderToStdErr(renderOptions(color));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const hex_digest = Package.Manifest.hexDigest(fetch.actual_hash);
|
||||||
|
|
||||||
progress.done = true;
|
progress.done = true;
|
||||||
progress.refresh();
|
progress.refresh();
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user