Merge pull request #17392 from ziglang/fetch

rework package manager
This commit is contained in:
Andrew Kelley 2023-10-09 11:47:37 -07:00 committed by GitHub
commit f7bc55c013
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 2677 additions and 2123 deletions

View File

@ -528,7 +528,7 @@ set(ZIG_STAGE2_SOURCES
"${CMAKE_SOURCE_DIR}/src/Liveness.zig"
"${CMAKE_SOURCE_DIR}/src/Module.zig"
"${CMAKE_SOURCE_DIR}/src/Package.zig"
"${CMAKE_SOURCE_DIR}/src/Package/hash.zig"
"${CMAKE_SOURCE_DIR}/src/Package/Fetch.zig"
"${CMAKE_SOURCE_DIR}/src/RangeSet.zig"
"${CMAKE_SOURCE_DIR}/src/Sema.zig"
"${CMAKE_SOURCE_DIR}/src/TypedValue.zig"

View File

@ -88,7 +88,7 @@ pub fn build(b: *std.Build) !void {
.name = "check-case",
.root_source_file = .{ .path = "test/src/Cases.zig" },
.optimize = optimize,
.main_pkg_path = .{ .path = "." },
.main_mod_path = .{ .path = "." },
});
check_case_exe.stack_size = stack_size;
check_case_exe.single_threaded = single_threaded;

65
doc/build.zig.zon.md Normal file
View File

@ -0,0 +1,65 @@
# build.zig.zon Documentation
This is the manifest file for build.zig scripts. It is named build.zig.zon in
order to make it clear that it is metadata specifically pertaining to
build.zig.
- **build root** - the directory that contains `build.zig`
## Top-Level Fields
### `name`
String. Required.
### `version`
String. Required.
[semver](https://semver.org/)
### `dependencies`
Struct.
Each dependency must either provide a `url` and `hash`, or a `path`.
#### `url`
String.
When updating this field to a new URL, be sure to delete the corresponding
`hash`, otherwise you are communicating that you expect to find the old hash at
the new URL.
#### `hash`
String.
[multihash](https://multiformats.io/multihash/)
This is computed from the file contents of the directory of files that is
obtained after fetching `url` and applying the inclusion rules given by
`paths`.
This field is the source of truth; packages do not come from an `url`; they
come from a `hash`. `url` is just one of many possible mirrors for how to
obtain a package matching this `hash`.
#### `path`
String.
When this is provided, the package is found in a directory relative to the
build root. In this case the package's hash is irrelevant and therefore not
computed.
### `paths`
List. Required.
Specifies the set of files and directories that are included in this package.
Paths are relative to the build root. Use the empty string (`""`) to refer to
the build root itself.
Only files included in the package are used to compute a package's `hash`.

View File

@ -997,6 +997,7 @@ fn usage(builder: *std.Build, already_ran_build: bool, out_stream: anytype) !voi
\\ -j<N> Limit concurrent jobs (default is to use all CPU cores)
\\ --maxrss <bytes> Limit memory usage (default is to use available memory)
\\ --skip-oom-steps Instead of failing, skip steps that would exceed --maxrss
\\ --fetch Exit after fetching dependency tree
\\
\\Project-Specific Options:
\\

View File

@ -634,6 +634,9 @@ pub const ExecutableOptions = struct {
use_llvm: ?bool = null,
use_lld: ?bool = null,
zig_lib_dir: ?LazyPath = null,
main_mod_path: ?LazyPath = null,
/// Deprecated; use `main_mod_path`.
main_pkg_path: ?LazyPath = null,
};
@ -652,7 +655,7 @@ pub fn addExecutable(b: *Build, options: ExecutableOptions) *Step.Compile {
.use_llvm = options.use_llvm,
.use_lld = options.use_lld,
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
.main_pkg_path = options.main_pkg_path,
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
});
}
@ -667,6 +670,9 @@ pub const ObjectOptions = struct {
use_llvm: ?bool = null,
use_lld: ?bool = null,
zig_lib_dir: ?LazyPath = null,
main_mod_path: ?LazyPath = null,
/// Deprecated; use `main_mod_path`.
main_pkg_path: ?LazyPath = null,
};
@ -683,7 +689,7 @@ pub fn addObject(b: *Build, options: ObjectOptions) *Step.Compile {
.use_llvm = options.use_llvm,
.use_lld = options.use_lld,
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
.main_pkg_path = options.main_pkg_path,
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
});
}
@ -699,6 +705,9 @@ pub const SharedLibraryOptions = struct {
use_llvm: ?bool = null,
use_lld: ?bool = null,
zig_lib_dir: ?LazyPath = null,
main_mod_path: ?LazyPath = null,
/// Deprecated; use `main_mod_path`.
main_pkg_path: ?LazyPath = null,
};
@ -717,7 +726,7 @@ pub fn addSharedLibrary(b: *Build, options: SharedLibraryOptions) *Step.Compile
.use_llvm = options.use_llvm,
.use_lld = options.use_lld,
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
.main_pkg_path = options.main_pkg_path,
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
});
}
@ -733,6 +742,9 @@ pub const StaticLibraryOptions = struct {
use_llvm: ?bool = null,
use_lld: ?bool = null,
zig_lib_dir: ?LazyPath = null,
main_mod_path: ?LazyPath = null,
/// Deprecated; use `main_mod_path`.
main_pkg_path: ?LazyPath = null,
};
@ -751,7 +763,7 @@ pub fn addStaticLibrary(b: *Build, options: StaticLibraryOptions) *Step.Compile
.use_llvm = options.use_llvm,
.use_lld = options.use_lld,
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
.main_pkg_path = options.main_pkg_path,
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
});
}
@ -769,6 +781,9 @@ pub const TestOptions = struct {
use_llvm: ?bool = null,
use_lld: ?bool = null,
zig_lib_dir: ?LazyPath = null,
main_mod_path: ?LazyPath = null,
/// Deprecated; use `main_mod_path`.
main_pkg_path: ?LazyPath = null,
};
@ -787,7 +802,7 @@ pub fn addTest(b: *Build, options: TestOptions) *Step.Compile {
.use_llvm = options.use_llvm,
.use_lld = options.use_lld,
.zig_lib_dir = options.zig_lib_dir orelse b.zig_lib_dir,
.main_pkg_path = options.main_pkg_path,
.main_mod_path = options.main_mod_path orelse options.main_pkg_path,
});
}

View File

@ -9,6 +9,20 @@ pub const Directory = struct {
path: ?[]const u8,
handle: fs.Dir,
pub fn clone(d: Directory, arena: Allocator) Allocator.Error!Directory {
return .{
.path = if (d.path) |p| try arena.dupe(u8, p) else null,
.handle = d.handle,
};
}
pub fn cwd() Directory {
return .{
.path = null,
.handle = fs.cwd(),
};
}
pub fn join(self: Directory, allocator: Allocator, paths: []const []const u8) ![]u8 {
if (self.path) |p| {
// TODO clean way to do this with only 1 allocation
@ -47,12 +61,16 @@ pub const Directory = struct {
writer: anytype,
) !void {
_ = options;
if (fmt_string.len != 0) fmt.invalidFmtError(fmt, self);
if (fmt_string.len != 0) fmt.invalidFmtError(fmt_string, self);
if (self.path) |p| {
try writer.writeAll(p);
try writer.writeAll(fs.path.sep_str);
}
}
pub fn eql(self: Directory, other: Directory) bool {
return self.handle.fd == other.handle.fd;
}
};
gpa: Allocator,

View File

@ -68,7 +68,7 @@ c_std: std.Build.CStd,
/// Set via options; intended to be read-only after that.
zig_lib_dir: ?LazyPath,
/// Set via options; intended to be read-only after that.
main_pkg_path: ?LazyPath,
main_mod_path: ?LazyPath,
exec_cmd_args: ?[]const ?[]const u8,
filter: ?[]const u8,
test_evented_io: bool = false,
@ -316,6 +316,9 @@ pub const Options = struct {
use_llvm: ?bool = null,
use_lld: ?bool = null,
zig_lib_dir: ?LazyPath = null,
main_mod_path: ?LazyPath = null,
/// deprecated; use `main_mod_path`.
main_pkg_path: ?LazyPath = null,
};
@ -480,7 +483,7 @@ pub fn create(owner: *std.Build, options: Options) *Compile {
.installed_headers = ArrayList(*Step).init(owner.allocator),
.c_std = std.Build.CStd.C99,
.zig_lib_dir = null,
.main_pkg_path = null,
.main_mod_path = null,
.exec_cmd_args = null,
.filter = options.filter,
.test_runner = options.test_runner,
@ -515,8 +518,8 @@ pub fn create(owner: *std.Build, options: Options) *Compile {
lp.addStepDependencies(&self.step);
}
if (options.main_pkg_path) |lp| {
self.main_pkg_path = lp.dupe(self.step.owner);
if (options.main_mod_path orelse options.main_pkg_path) |lp| {
self.main_mod_path = lp.dupe(self.step.owner);
lp.addStepDependencies(&self.step);
}
@ -1998,8 +2001,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
try zig_args.append(dir.getPath(b));
}
if (self.main_pkg_path) |dir| {
try zig_args.append("--main-pkg-path");
if (self.main_mod_path) |dir| {
try zig_args.append("--main-mod-path");
try zig_args.append(dir.getPath(b));
}

View File

@ -1229,14 +1229,41 @@ pub fn ArrayHashMapUnmanaged(
/// Sorts the entries and then rebuilds the index.
/// `sort_ctx` must have this method:
/// `fn lessThan(ctx: @TypeOf(ctx), a_index: usize, b_index: usize) bool`
/// Uses a stable sorting algorithm.
pub inline fn sort(self: *Self, sort_ctx: anytype) void {
if (@sizeOf(ByIndexContext) != 0)
@compileError("Cannot infer context " ++ @typeName(Context) ++ ", call sortContext instead.");
return self.sortContext(sort_ctx, undefined);
return sortContextInternal(self, .stable, sort_ctx, undefined);
}
pub fn sortContext(self: *Self, sort_ctx: anytype, ctx: Context) void {
self.entries.sort(sort_ctx);
/// Sorts the entries and then rebuilds the index.
/// `sort_ctx` must have this method:
/// `fn lessThan(ctx: @TypeOf(ctx), a_index: usize, b_index: usize) bool`
/// Uses an unstable sorting algorithm.
pub inline fn sortUnstable(self: *Self, sort_ctx: anytype) void {
if (@sizeOf(ByIndexContext) != 0)
@compileError("Cannot infer context " ++ @typeName(Context) ++ ", call sortUnstableContext instead.");
return self.sortContextInternal(.unstable, sort_ctx, undefined);
}
pub inline fn sortContext(self: *Self, sort_ctx: anytype, ctx: Context) void {
return sortContextInternal(self, .stable, sort_ctx, ctx);
}
pub inline fn sortUnstableContext(self: *Self, sort_ctx: anytype, ctx: Context) void {
return sortContextInternal(self, .unstable, sort_ctx, ctx);
}
fn sortContextInternal(
self: *Self,
comptime mode: std.sort.Mode,
sort_ctx: anytype,
ctx: Context,
) void {
switch (mode) {
.stable => self.entries.sort(sort_ctx),
.unstable => self.entries.sortUnstable(sort_ctx),
}
const header = self.index_header orelse return;
header.reset();
self.insertAllEntriesIntoNewHeader(if (store_hash) {} else ctx, header);

View File

@ -728,15 +728,17 @@ pub fn resolvePosix(allocator: Allocator, paths: []const []const u8) Allocator.E
}
}
test "resolve" {
test resolve {
try testResolveWindows(&[_][]const u8{ "a\\b\\c\\", "..\\..\\.." }, ".");
try testResolveWindows(&[_][]const u8{"."}, ".");
try testResolveWindows(&[_][]const u8{""}, ".");
try testResolvePosix(&[_][]const u8{ "a/b/c/", "../../.." }, ".");
try testResolvePosix(&[_][]const u8{"."}, ".");
try testResolvePosix(&[_][]const u8{""}, ".");
}
test "resolveWindows" {
test resolveWindows {
try testResolveWindows(
&[_][]const u8{ "Z:\\", "/usr/local", "lib\\zig\\std\\array_list.zig" },
"Z:\\usr\\local\\lib\\zig\\std\\array_list.zig",
@ -764,7 +766,7 @@ test "resolveWindows" {
try testResolveWindows(&[_][]const u8{"a/b"}, "a\\b");
}
test "resolvePosix" {
test resolvePosix {
try testResolvePosix(&.{ "/a/b", "c" }, "/a/b/c");
try testResolvePosix(&.{ "/a/b", "c", "//d", "e///" }, "/d/e");
try testResolvePosix(&.{ "/a/b/c", "..", "../" }, "/a");

View File

@ -467,7 +467,7 @@ pub fn MultiArrayList(comptime T: type) type {
/// `ctx` has the following method:
/// `fn lessThan(ctx: @TypeOf(ctx), a_index: usize, b_index: usize) bool`
fn sortInternal(self: Self, a: usize, b: usize, ctx: anytype, comptime mode: enum { stable, unstable }) void {
fn sortInternal(self: Self, a: usize, b: usize, ctx: anytype, comptime mode: std.sort.Mode) void {
const sort_context: struct {
sub_ctx: @TypeOf(ctx),
slice: Slice,

View File

@ -46,7 +46,7 @@ pub fn getCwdAlloc(allocator: Allocator) ![]u8 {
}
}
test "getCwdAlloc" {
test getCwdAlloc {
if (builtin.os.tag == .wasi) return error.SkipZigTest;
const cwd = try getCwdAlloc(testing.allocator);

View File

@ -4,6 +4,8 @@ const testing = std.testing;
const mem = std.mem;
const math = std.math;
pub const Mode = enum { stable, unstable };
pub const block = @import("sort/block.zig").block;
pub const pdq = @import("sort/pdq.zig").pdq;
pub const pdqContext = @import("sort/pdq.zig").pdqContext;

View File

@ -3,6 +3,8 @@ pub const Options = struct {
strip_components: u32 = 0,
/// How to handle the "mode" property of files from within the tar file.
mode_mode: ModeMode = .executable_bit_only,
/// Prevents creation of empty directories.
exclude_empty_directories: bool = false,
/// Provide this to receive detailed error messages.
/// When this is provided, some errors which would otherwise be returned immediately
/// will instead be added to this structure. The API user must check the errors
@ -29,6 +31,10 @@ pub const Options = struct {
file_name: []const u8,
link_name: []const u8,
},
unable_to_create_file: struct {
code: anyerror,
file_name: []const u8,
},
unsupported_file_type: struct {
file_name: []const u8,
file_type: Header.FileType,
@ -42,6 +48,9 @@ pub const Options = struct {
d.allocator.free(info.file_name);
d.allocator.free(info.link_name);
},
.unable_to_create_file => |info| {
d.allocator.free(info.file_name);
},
.unsupported_file_type => |info| {
d.allocator.free(info.file_name);
},
@ -201,7 +210,7 @@ pub fn pipeToFileSystem(dir: std.fs.Dir, reader: anytype, options: Options) !voi
switch (header.fileType()) {
.directory => {
const file_name = try stripComponents(unstripped_file_name, options.strip_components);
if (file_name.len != 0) {
if (file_name.len != 0 and !options.exclude_empty_directories) {
try dir.makePath(file_name);
}
},
@ -209,18 +218,34 @@ pub fn pipeToFileSystem(dir: std.fs.Dir, reader: anytype, options: Options) !voi
if (file_size == 0 and unstripped_file_name.len == 0) return;
const file_name = try stripComponents(unstripped_file_name, options.strip_components);
if (std.fs.path.dirname(file_name)) |dir_name| {
try dir.makePath(dir_name);
}
var file = try dir.createFile(file_name, .{});
defer file.close();
var file = dir.createFile(file_name, .{}) catch |err| switch (err) {
error.FileNotFound => again: {
const code = code: {
if (std.fs.path.dirname(file_name)) |dir_name| {
dir.makePath(dir_name) catch |code| break :code code;
break :again dir.createFile(file_name, .{}) catch |code| {
break :code code;
};
}
break :code err;
};
const d = options.diagnostics orelse return error.UnableToCreateFile;
try d.errors.append(d.allocator, .{ .unable_to_create_file = .{
.code = code,
.file_name = try d.allocator.dupe(u8, file_name),
} });
break :again null;
},
else => |e| return e,
};
defer if (file) |f| f.close();
var file_off: usize = 0;
while (true) {
const temp = try buffer.readChunk(reader, @intCast(rounded_file_size + 512 - file_off));
if (temp.len == 0) return error.UnexpectedEndOfStream;
const slice = temp[0..@intCast(@min(file_size - file_off, temp.len))];
try file.writeAll(slice);
if (file) |f| try f.writeAll(slice);
file_off += slice.len;
buffer.advance(slice.len);
@ -273,13 +298,26 @@ pub fn pipeToFileSystem(dir: std.fs.Dir, reader: anytype, options: Options) !voi
},
.hard_link => return error.TarUnsupportedFileType,
.symbolic_link => {
// The file system path of the symbolic link.
const file_name = try stripComponents(unstripped_file_name, options.strip_components);
// The data inside the symbolic link.
const link_name = header.linkName();
dir.symLink(link_name, file_name, .{}) catch |err| {
dir.symLink(link_name, file_name, .{}) catch |err| again: {
const code = code: {
if (err == error.FileNotFound) {
if (std.fs.path.dirname(file_name)) |dir_name| {
dir.makePath(dir_name) catch |code| break :code code;
break :again dir.symLink(link_name, file_name, .{}) catch |code| {
break :code code;
};
}
}
break :code err;
};
const d = options.diagnostics orelse return error.UnableToCreateSymLink;
try d.errors.append(d.allocator, .{ .unable_to_create_sym_link = .{
.code = err,
.code = code,
.file_name = try d.allocator.dupe(u8, file_name),
.link_name = try d.allocator.dupe(u8, link_name),
} });

View File

@ -1,6 +1,6 @@
const std = @import("std.zig");
const tokenizer = @import("zig/tokenizer.zig");
const fmt = @import("zig/fmt.zig");
pub const fmt = @import("zig/fmt.zig");
const assert = std.debug.assert;
pub const ErrorBundle = @import("zig/ErrorBundle.zig");

View File

@ -383,7 +383,7 @@ pub const Wip = struct {
};
}
pub fn addString(wip: *Wip, s: []const u8) !u32 {
pub fn addString(wip: *Wip, s: []const u8) Allocator.Error!u32 {
const gpa = wip.gpa;
const index: u32 = @intCast(wip.string_bytes.items.len);
try wip.string_bytes.ensureUnusedCapacity(gpa, s.len + 1);
@ -392,7 +392,7 @@ pub const Wip = struct {
return index;
}
pub fn printString(wip: *Wip, comptime fmt: []const u8, args: anytype) !u32 {
pub fn printString(wip: *Wip, comptime fmt: []const u8, args: anytype) Allocator.Error!u32 {
const gpa = wip.gpa;
const index: u32 = @intCast(wip.string_bytes.items.len);
try wip.string_bytes.writer(gpa).print(fmt, args);
@ -400,12 +400,12 @@ pub const Wip = struct {
return index;
}
pub fn addRootErrorMessage(wip: *Wip, em: ErrorMessage) !void {
pub fn addRootErrorMessage(wip: *Wip, em: ErrorMessage) Allocator.Error!void {
try wip.root_list.ensureUnusedCapacity(wip.gpa, 1);
wip.root_list.appendAssumeCapacity(try addErrorMessage(wip, em));
}
pub fn addErrorMessage(wip: *Wip, em: ErrorMessage) !MessageIndex {
pub fn addErrorMessage(wip: *Wip, em: ErrorMessage) Allocator.Error!MessageIndex {
return @enumFromInt(try addExtra(wip, em));
}
@ -413,15 +413,15 @@ pub const Wip = struct {
return @enumFromInt(addExtraAssumeCapacity(wip, em));
}
pub fn addSourceLocation(wip: *Wip, sl: SourceLocation) !SourceLocationIndex {
pub fn addSourceLocation(wip: *Wip, sl: SourceLocation) Allocator.Error!SourceLocationIndex {
return @enumFromInt(try addExtra(wip, sl));
}
pub fn addReferenceTrace(wip: *Wip, rt: ReferenceTrace) !void {
pub fn addReferenceTrace(wip: *Wip, rt: ReferenceTrace) Allocator.Error!void {
_ = try addExtra(wip, rt);
}
pub fn addBundleAsNotes(wip: *Wip, other: ErrorBundle) !void {
pub fn addBundleAsNotes(wip: *Wip, other: ErrorBundle) Allocator.Error!void {
const gpa = wip.gpa;
try wip.string_bytes.ensureUnusedCapacity(gpa, other.string_bytes.len);

View File

@ -13,7 +13,7 @@ fn formatId(
return writer.writeAll(bytes);
}
try writer.writeAll("@\"");
try formatEscapes(bytes, "", options, writer);
try stringEscape(bytes, "", options, writer);
try writer.writeByte('"');
}
@ -47,7 +47,7 @@ test "isValidId" {
/// Print the string as escaped contents of a double quoted or single-quoted string.
/// Format `{}` treats contents as a double-quoted string.
/// Format `{'}` treats contents as a single-quoted string.
fn formatEscapes(
pub fn stringEscape(
bytes: []const u8,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
@ -90,7 +90,7 @@ fn formatEscapes(
/// The format specifier must be one of:
/// * `{}` treats contents as a double-quoted string.
/// * `{'}` treats contents as a single-quoted string.
pub fn fmtEscapes(bytes: []const u8) std.fmt.Formatter(formatEscapes) {
pub fn fmtEscapes(bytes: []const u8) std.fmt.Formatter(stringEscape) {
return .{ .data = bytes };
}

View File

@ -6,7 +6,7 @@ const Autodoc = @This();
const Compilation = @import("Compilation.zig");
const CompilationModule = @import("Module.zig");
const File = CompilationModule.File;
const Module = @import("Package.zig");
const Module = @import("Package.zig").Module;
const Tokenizer = std.zig.Tokenizer;
const InternPool = @import("InternPool.zig");
const Zir = @import("Zir.zig");
@ -98,9 +98,8 @@ pub fn generate(cm: *CompilationModule, output_dir: std.fs.Dir) !void {
}
fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
const root_src_dir = self.comp_module.main_pkg.root_src_directory;
const root_src_path = self.comp_module.main_pkg.root_src_path;
const joined_src_path = try root_src_dir.join(self.arena, &.{root_src_path});
const root_src_path = self.comp_module.main_mod.root_src_path;
const joined_src_path = try self.comp_module.main_mod.root.joinString(self.arena, root_src_path);
defer self.arena.free(joined_src_path);
const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{ ".", joined_src_path });
@ -295,20 +294,20 @@ fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
}
const rootName = blk: {
const rootName = std.fs.path.basename(self.comp_module.main_pkg.root_src_path);
const rootName = std.fs.path.basename(self.comp_module.main_mod.root_src_path);
break :blk rootName[0 .. rootName.len - 4];
};
const main_type_index = self.types.items.len;
{
try self.modules.put(self.arena, self.comp_module.main_pkg, .{
try self.modules.put(self.arena, self.comp_module.main_mod, .{
.name = rootName,
.main = main_type_index,
.table = .{},
});
try self.modules.entries.items(.value)[0].table.put(
self.arena,
self.comp_module.main_pkg,
self.comp_module.main_mod,
.{
.name = rootName,
.value = 0,
@ -412,7 +411,7 @@ fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
while (files_iterator.next()) |entry| {
const sub_file_path = entry.key_ptr.*.sub_file_path;
const file_module = entry.key_ptr.*.pkg;
const file_module = entry.key_ptr.*.mod;
const module_name = (self.modules.get(file_module) orelse continue).name;
const file_path = std.fs.path.dirname(sub_file_path) orelse "";
@ -986,12 +985,12 @@ fn walkInstruction(
// importFile cannot error out since all files
// are already loaded at this point
if (file.pkg.table.get(path)) |other_module| {
if (file.mod.deps.get(path)) |other_module| {
const result = try self.modules.getOrPut(self.arena, other_module);
// Immediately add this module to the import table of our
// current module, regardless of wether it's new or not.
if (self.modules.getPtr(file.pkg)) |current_module| {
if (self.modules.getPtr(file.mod)) |current_module| {
// TODO: apparently, in the stdlib a file gets analyzed before
// its module gets added. I guess we're importing a file
// that belongs to another module through its file path?
@ -1025,12 +1024,12 @@ fn walkInstruction(
// TODO: Add this module as a dependency to the current module
// TODO: this seems something that could be done in bulk
// at the beginning or the end, or something.
const root_src_dir = other_module.root_src_directory;
const root_src_path = other_module.root_src_path;
const joined_src_path = try root_src_dir.join(self.arena, &.{root_src_path});
defer self.arena.free(joined_src_path);
const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{ ".", joined_src_path });
const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{
".",
other_module.root.root_dir.path orelse ".",
other_module.root.sub_path,
other_module.root_src_path,
});
defer self.arena.free(abs_root_src_path);
const new_file = self.comp_module.import_table.get(abs_root_src_path).?;
@ -5683,7 +5682,7 @@ fn writeFileTableToJson(
while (it.next()) |entry| {
try jsw.beginArray();
try jsw.write(entry.key_ptr.*.sub_file_path);
try jsw.write(mods.getIndex(entry.key_ptr.*.pkg) orelse 0);
try jsw.write(mods.getIndex(entry.key_ptr.*.mod) orelse 0);
try jsw.endArray();
}
try jsw.endArray();
@ -5840,7 +5839,7 @@ fn addGuide(self: *Autodoc, file: *File, guide_path: []const u8, section: *Secti
file.sub_file_path, "..", guide_path,
});
var guide_file = try file.pkg.root_src_directory.handle.openFile(resolved_path, .{});
var guide_file = try file.mod.root.openFile(resolved_path, .{});
defer guide_file.close();
const guide = guide_file.reader().readAllAlloc(self.arena, 1 * 1024 * 1024) catch |err| switch (err) {

View File

@ -41,8 +41,9 @@ const resinator = @import("resinator.zig");
/// General-purpose allocator. Used for both temporary and long-term storage.
gpa: Allocator,
/// Arena-allocated memory used during initialization. Should be untouched until deinit.
arena_state: std.heap.ArenaAllocator.State,
/// Arena-allocated memory, mostly used during initialization. However, it can be used
/// for other things requiring the same lifetime as the `Compilation`.
arena: std.heap.ArenaAllocator,
bin_file: *link.File,
c_object_table: std.AutoArrayHashMapUnmanaged(*CObject, void) = .{},
win32_resource_table: if (build_options.only_core_functionality) void else std.AutoArrayHashMapUnmanaged(*Win32Resource, void) =
@ -124,7 +125,7 @@ cache_parent: *Cache,
/// Path to own executable for invoking `zig clang`.
self_exe_path: ?[]const u8,
/// null means -fno-emit-bin.
/// This is mutable memory allocated into the Compilation-lifetime arena (`arena_state`)
/// This is mutable memory allocated into the Compilation-lifetime arena (`arena`)
/// of exactly the correct size for "o/[digest]/[basename]".
/// The basename is of the outputted binary file in case we don't know the directory yet.
whole_bin_sub_path: ?[]u8,
@ -273,8 +274,8 @@ const Job = union(enum) {
/// The source file containing the Decl has been updated, and so the
/// Decl may need its line number information updated in the debug info.
update_line_number: Module.Decl.Index,
/// The main source file for the package needs to be analyzed.
analyze_pkg: *Package,
/// The main source file for the module needs to be analyzed.
analyze_mod: *Package.Module,
/// one of the glibc static objects
glibc_crt_file: glibc.CRTFile,
@ -414,7 +415,7 @@ pub const MiscTask = enum {
compiler_rt,
libssp,
zig_libc,
analyze_pkg,
analyze_mod,
@"musl crti.o",
@"musl crtn.o",
@ -544,7 +545,7 @@ pub const InitOptions = struct {
global_cache_directory: Directory,
target: Target,
root_name: []const u8,
main_pkg: ?*Package,
main_mod: ?*Package.Module,
output_mode: std.builtin.OutputMode,
thread_pool: *ThreadPool,
dynamic_linker: ?[]const u8 = null,
@ -736,53 +737,55 @@ pub const InitOptions = struct {
pdb_out_path: ?[]const u8 = null,
};
fn addPackageTableToCacheHash(
fn addModuleTableToCacheHash(
hash: *Cache.HashHelper,
arena: *std.heap.ArenaAllocator,
pkg_table: Package.Table,
seen_table: *std.AutoHashMap(*Package, void),
mod_table: Package.Module.Deps,
seen_table: *std.AutoHashMap(*Package.Module, void),
hash_type: union(enum) { path_bytes, files: *Cache.Manifest },
) (error{OutOfMemory} || std.os.GetCwdError)!void {
const allocator = arena.allocator();
const packages = try allocator.alloc(Package.Table.KV, pkg_table.count());
const modules = try allocator.alloc(Package.Module.Deps.KV, mod_table.count());
{
// Copy over the hashmap entries to our slice
var table_it = pkg_table.iterator();
var table_it = mod_table.iterator();
var idx: usize = 0;
while (table_it.next()) |entry| : (idx += 1) {
packages[idx] = .{
modules[idx] = .{
.key = entry.key_ptr.*,
.value = entry.value_ptr.*,
};
}
}
// Sort the slice by package name
mem.sort(Package.Table.KV, packages, {}, struct {
fn lessThan(_: void, lhs: Package.Table.KV, rhs: Package.Table.KV) bool {
mem.sortUnstable(Package.Module.Deps.KV, modules, {}, struct {
fn lessThan(_: void, lhs: Package.Module.Deps.KV, rhs: Package.Module.Deps.KV) bool {
return std.mem.lessThan(u8, lhs.key, rhs.key);
}
}.lessThan);
for (packages) |pkg| {
if ((try seen_table.getOrPut(pkg.value)).found_existing) continue;
for (modules) |mod| {
if ((try seen_table.getOrPut(mod.value)).found_existing) continue;
// Finally insert the package name and path to the cache hash.
hash.addBytes(pkg.key);
hash.addBytes(mod.key);
switch (hash_type) {
.path_bytes => {
hash.addBytes(pkg.value.root_src_path);
hash.addOptionalBytes(pkg.value.root_src_directory.path);
hash.addBytes(mod.value.root_src_path);
hash.addOptionalBytes(mod.value.root.root_dir.path);
hash.addBytes(mod.value.root.sub_path);
},
.files => |man| {
const pkg_zig_file = try pkg.value.root_src_directory.join(allocator, &[_][]const u8{
pkg.value.root_src_path,
});
const pkg_zig_file = try mod.value.root.joinString(
allocator,
mod.value.root_src_path,
);
_ = try man.addFile(pkg_zig_file, null);
},
}
// Recurse to handle the package's dependencies
try addPackageTableToCacheHash(hash, arena, pkg.value.table, seen_table, hash_type);
// Recurse to handle the module's dependencies
try addModuleTableToCacheHash(hash, arena, mod.value.deps, seen_table, hash_type);
}
}
@ -839,7 +842,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
break :blk true;
// If we have no zig code to compile, no need for LLVM.
if (options.main_pkg == null)
if (options.main_mod == null)
break :blk false;
// If LLVM does not support the target, then we can't use it.
@ -869,7 +872,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
// compiler state, the second clause here can be removed so that incremental
// cache mode is used for LLVM backend too. We need some fuzz testing before
// that can be enabled.
const cache_mode = if ((use_llvm or options.main_pkg == null) and !options.disable_lld_caching)
const cache_mode = if ((use_llvm or options.main_mod == null) and !options.disable_lld_caching)
CacheMode.whole
else
options.cache_mode;
@ -925,7 +928,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
if (use_llvm) {
// If stage1 generates an object file, self-hosted linker is not
// yet sophisticated enough to handle that.
break :blk options.main_pkg != null;
break :blk options.main_mod != null;
}
break :blk false;
@ -1210,7 +1213,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
if (options.target.os.tag == .wasi) cache.hash.add(wasi_exec_model);
// TODO audit this and make sure everything is in it
const module: ?*Module = if (options.main_pkg) |main_pkg| blk: {
const module: ?*Module = if (options.main_mod) |main_mod| blk: {
// Options that are specific to zig source files, that cannot be
// modified between incremental updates.
var hash = cache.hash;
@ -1223,11 +1226,12 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
// do want to namespace different source file names because they are
// likely different compilations and therefore this would be likely to
// cause cache hits.
hash.addBytes(main_pkg.root_src_path);
hash.addOptionalBytes(main_pkg.root_src_directory.path);
hash.addBytes(main_mod.root_src_path);
hash.addOptionalBytes(main_mod.root.root_dir.path);
hash.addBytes(main_mod.root.sub_path);
{
var seen_table = std.AutoHashMap(*Package, void).init(arena);
try addPackageTableToCacheHash(&hash, &arena_allocator, main_pkg.table, &seen_table, .path_bytes);
var seen_table = std.AutoHashMap(*Package.Module, void).init(arena);
try addModuleTableToCacheHash(&hash, &arena_allocator, main_mod.deps, &seen_table, .path_bytes);
}
},
.whole => {
@ -1283,81 +1287,83 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.path = try options.local_cache_directory.join(arena, &[_][]const u8{artifact_sub_dir}),
};
const builtin_pkg = try Package.createWithDir(
gpa,
zig_cache_artifact_directory,
null,
"builtin.zig",
);
errdefer builtin_pkg.destroy(gpa);
const builtin_mod = try Package.Module.create(arena, .{
.root = .{ .root_dir = zig_cache_artifact_directory },
.root_src_path = "builtin.zig",
.fully_qualified_name = "builtin",
});
// When you're testing std, the main module is std. In that case, we'll just set the std
// module to the main one, since avoiding the errors caused by duplicating it is more
// effort than it's worth.
const main_pkg_is_std = m: {
// When you're testing std, the main module is std. In that case,
// we'll just set the std module to the main one, since avoiding
// the errors caused by duplicating it is more effort than it's
// worth.
const main_mod_is_std = m: {
const std_path = try std.fs.path.resolve(arena, &[_][]const u8{
options.zig_lib_directory.path orelse ".",
"std",
"std.zig",
});
defer arena.free(std_path);
const main_path = try std.fs.path.resolve(arena, &[_][]const u8{
main_pkg.root_src_directory.path orelse ".",
main_pkg.root_src_path,
main_mod.root.root_dir.path orelse ".",
main_mod.root.sub_path,
main_mod.root_src_path,
});
defer arena.free(main_path);
break :m mem.eql(u8, main_path, std_path);
};
const std_pkg = if (main_pkg_is_std)
main_pkg
const std_mod = if (main_mod_is_std)
main_mod
else
try Package.createWithDir(
gpa,
options.zig_lib_directory,
"std",
"std.zig",
);
try Package.Module.create(arena, .{
.root = .{
.root_dir = options.zig_lib_directory,
.sub_path = "std",
},
.root_src_path = "std.zig",
.fully_qualified_name = "std",
});
errdefer if (!main_pkg_is_std) std_pkg.destroy(gpa);
const root_mod = if (options.is_test) root_mod: {
const test_mod = if (options.test_runner_path) |test_runner| test_mod: {
const pkg = try Package.Module.create(arena, .{
.root = .{
.root_dir = Directory.cwd(),
.sub_path = std.fs.path.dirname(test_runner) orelse "",
},
.root_src_path = std.fs.path.basename(test_runner),
.fully_qualified_name = "root",
});
const root_pkg = if (options.is_test) root_pkg: {
const test_pkg = if (options.test_runner_path) |test_runner| test_pkg: {
const test_dir = std.fs.path.dirname(test_runner);
const basename = std.fs.path.basename(test_runner);
const pkg = try Package.create(gpa, test_dir, basename);
pkg.deps = try main_mod.deps.clone(arena);
break :test_mod pkg;
} else try Package.Module.create(arena, .{
.root = .{
.root_dir = options.zig_lib_directory,
},
.root_src_path = "test_runner.zig",
.fully_qualified_name = "root",
});
// copy package table from main_pkg to root_pkg
pkg.table = try main_pkg.table.clone(gpa);
break :test_pkg pkg;
} else try Package.createWithDir(
gpa,
options.zig_lib_directory,
null,
"test_runner.zig",
);
errdefer test_pkg.destroy(gpa);
break :root_mod test_mod;
} else main_mod;
break :root_pkg test_pkg;
} else main_pkg;
errdefer if (options.is_test) root_pkg.destroy(gpa);
const compiler_rt_pkg = if (include_compiler_rt and options.output_mode == .Obj) compiler_rt_pkg: {
break :compiler_rt_pkg try Package.createWithDir(
gpa,
options.zig_lib_directory,
null,
"compiler_rt.zig",
);
const compiler_rt_mod = if (include_compiler_rt and options.output_mode == .Obj) compiler_rt_mod: {
break :compiler_rt_mod try Package.Module.create(arena, .{
.root = .{
.root_dir = options.zig_lib_directory,
},
.root_src_path = "compiler_rt.zig",
.fully_qualified_name = "compiler_rt",
});
} else null;
errdefer if (compiler_rt_pkg) |p| p.destroy(gpa);
try main_pkg.add(gpa, "builtin", builtin_pkg);
try main_pkg.add(gpa, "root", root_pkg);
try main_pkg.add(gpa, "std", std_pkg);
if (compiler_rt_pkg) |p| {
try main_pkg.add(gpa, "compiler_rt", p);
{
try main_mod.deps.ensureUnusedCapacity(arena, 4);
main_mod.deps.putAssumeCapacity("builtin", builtin_mod);
main_mod.deps.putAssumeCapacity("root", root_mod);
main_mod.deps.putAssumeCapacity("std", std_mod);
if (compiler_rt_mod) |m|
main_mod.deps.putAssumeCapacity("compiler_rt", m);
}
// Pre-open the directory handles for cached ZIR code so that it does not need
@ -1395,8 +1401,8 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
module.* = .{
.gpa = gpa,
.comp = comp,
.main_pkg = main_pkg,
.root_pkg = root_pkg,
.main_mod = main_mod,
.root_mod = root_mod,
.zig_cache_artifact_directory = zig_cache_artifact_directory,
.global_zir_cache = global_zir_cache,
.local_zir_cache = local_zir_cache,
@ -1664,7 +1670,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
errdefer bin_file.destroy();
comp.* = .{
.gpa = gpa,
.arena_state = arena_allocator.state,
.arena = arena_allocator,
.zig_lib_directory = options.zig_lib_directory,
.local_cache_directory = options.local_cache_directory,
.global_cache_directory = options.global_cache_directory,
@ -1982,7 +1988,8 @@ pub fn destroy(self: *Compilation) void {
if (self.owned_link_dir) |*dir| dir.close();
// This destroys `self`.
self.arena_state.promote(gpa).deinit();
var arena_instance = self.arena;
arena_instance.deinit();
}
pub fn clearMiscFailures(comp: *Compilation) void {
@ -2005,8 +2012,8 @@ fn restorePrevZigCacheArtifactDirectory(comp: *Compilation, directory: *Director
// This is only for cleanup purposes; Module.deinit calls close
// on the handle of zig_cache_artifact_directory.
if (comp.bin_file.options.module) |module| {
const builtin_pkg = module.main_pkg.table.get("builtin").?;
module.zig_cache_artifact_directory = builtin_pkg.root_src_directory;
const builtin_mod = module.main_mod.deps.get("builtin").?;
module.zig_cache_artifact_directory = builtin_mod.root.root_dir;
}
}
@ -2148,8 +2155,8 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
// Make sure std.zig is inside the import_table. We unconditionally need
// it for start.zig.
const std_pkg = module.main_pkg.table.get("std").?;
_ = try module.importPkg(std_pkg);
const std_mod = module.main_mod.deps.get("std").?;
_ = try module.importPkg(std_mod);
// Normally we rely on importing std to in turn import the root source file
// in the start code, but when using the stage1 backend that won't happen,
@ -2158,11 +2165,11 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
// Likewise, in the case of `zig test`, the test runner is the root source file,
// and so there is nothing to import the main file.
if (comp.bin_file.options.is_test) {
_ = try module.importPkg(module.main_pkg);
_ = try module.importPkg(module.main_mod);
}
if (module.main_pkg.table.get("compiler_rt")) |compiler_rt_pkg| {
_ = try module.importPkg(compiler_rt_pkg);
if (module.main_mod.deps.get("compiler_rt")) |compiler_rt_mod| {
_ = try module.importPkg(compiler_rt_mod);
}
// Put a work item in for every known source file to detect if
@ -2185,13 +2192,13 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
}
}
try comp.work_queue.writeItem(.{ .analyze_pkg = std_pkg });
try comp.work_queue.writeItem(.{ .analyze_mod = std_mod });
if (comp.bin_file.options.is_test) {
try comp.work_queue.writeItem(.{ .analyze_pkg = module.main_pkg });
try comp.work_queue.writeItem(.{ .analyze_mod = module.main_mod });
}
if (module.main_pkg.table.get("compiler_rt")) |compiler_rt_pkg| {
try comp.work_queue.writeItem(.{ .analyze_pkg = compiler_rt_pkg });
if (module.main_mod.deps.get("compiler_rt")) |compiler_rt_mod| {
try comp.work_queue.writeItem(.{ .analyze_mod = compiler_rt_mod });
}
}
@ -2420,19 +2427,17 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes
comptime assert(link_hash_implementation_version == 10);
if (comp.bin_file.options.module) |mod| {
const main_zig_file = try mod.main_pkg.root_src_directory.join(arena, &[_][]const u8{
mod.main_pkg.root_src_path,
});
const main_zig_file = try mod.main_mod.root.joinString(arena, mod.main_mod.root_src_path);
_ = try man.addFile(main_zig_file, null);
{
var seen_table = std.AutoHashMap(*Package, void).init(arena);
var seen_table = std.AutoHashMap(*Package.Module, void).init(arena);
// Skip builtin.zig; it is useless as an input, and we don't want to have to
// write it before checking for a cache hit.
const builtin_pkg = mod.main_pkg.table.get("builtin").?;
try seen_table.put(builtin_pkg, {});
const builtin_mod = mod.main_mod.deps.get("builtin").?;
try seen_table.put(builtin_mod, {});
try addPackageTableToCacheHash(&man.hash, &arena_allocator, mod.main_pkg.table, &seen_table, .{ .files = man });
try addModuleTableToCacheHash(&man.hash, &arena_allocator, mod.main_mod.deps, &seen_table, .{ .files = man });
}
// Synchronize with other matching comments: ZigOnlyHashStuff
@ -2616,23 +2621,19 @@ fn reportMultiModuleErrors(mod: *Module) !void {
errdefer for (notes[0..i]) |*n| n.deinit(mod.gpa);
note.* = switch (ref) {
.import => |loc| blk: {
const name = try loc.file_scope.pkg.getName(mod.gpa, mod.*);
defer mod.gpa.free(name);
break :blk try Module.ErrorMsg.init(
mod.gpa,
loc,
"imported from module {s}",
.{name},
.{loc.file_scope.mod.fully_qualified_name},
);
},
.root => |pkg| blk: {
const name = try pkg.getName(mod.gpa, mod.*);
defer mod.gpa.free(name);
break :blk try Module.ErrorMsg.init(
mod.gpa,
.{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
"root of module {s}",
.{name},
.{pkg.fully_qualified_name},
);
},
};
@ -3564,8 +3565,8 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
decl.analysis = .codegen_failure_retryable;
};
},
.analyze_pkg => |pkg| {
const named_frame = tracy.namedFrame("analyze_pkg");
.analyze_mod => |pkg| {
const named_frame = tracy.namedFrame("analyze_mod");
defer named_frame.end();
const module = comp.bin_file.options.module.?;
@ -3904,17 +3905,12 @@ pub fn obtainWin32ResourceCacheManifest(comp: *const Compilation) Cache.Manifest
return man;
}
test "cImport" {
_ = cImport;
}
pub const CImportResult = struct {
out_zig_path: []u8,
cache_hit: bool,
errors: std.zig.ErrorBundle,
pub fn deinit(result: *CImportResult, gpa: std.mem.Allocator) void {
gpa.free(result.out_zig_path);
result.errors.deinit(gpa);
}
};
@ -4059,7 +4055,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
};
}
const out_zig_path = try comp.local_cache_directory.join(comp.gpa, &[_][]const u8{
const out_zig_path = try comp.local_cache_directory.join(comp.arena.allocator(), &.{
"o", &digest, cimport_zig_basename,
});
if (comp.verbose_cimport) {
@ -4214,17 +4210,9 @@ fn reportRetryableAstGenError(
},
};
const err_msg = if (file.pkg.root_src_directory.path) |dir_path|
try Module.ErrorMsg.create(
gpa,
src_loc,
"unable to load '{s}" ++ std.fs.path.sep_str ++ "{s}': {s}",
.{ dir_path, file.sub_file_path, @errorName(err) },
)
else
try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{s}': {s}", .{
file.sub_file_path, @errorName(err),
});
const err_msg = try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{}{s}': {s}", .{
file.mod.root, file.sub_file_path, @errorName(err),
});
errdefer err_msg.destroy(gpa);
{
@ -4244,17 +4232,10 @@ fn reportRetryableEmbedFileError(
const src_loc: Module.SrcLoc = mod.declPtr(embed_file.owner_decl).srcLoc(mod);
const err_msg = if (embed_file.pkg.root_src_directory.path) |dir_path|
try Module.ErrorMsg.create(
gpa,
src_loc,
"unable to load '{s}" ++ std.fs.path.sep_str ++ "{s}': {s}",
.{ dir_path, embed_file.sub_file_path, @errorName(err) },
)
else
try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{s}': {s}", .{
embed_file.sub_file_path, @errorName(err),
});
const err_msg = try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{}{s}': {s}", .{
embed_file.mod.root, embed_file.sub_file_path, @errorName(err),
});
errdefer err_msg.destroy(gpa);
{
@ -6377,13 +6358,13 @@ fn buildOutputFromZig(
const tracy_trace = trace(@src());
defer tracy_trace.end();
std.debug.assert(output_mode != .Exe);
assert(output_mode != .Exe);
var main_pkg: Package = .{
.root_src_directory = comp.zig_lib_directory,
var main_mod: Package.Module = .{
.root = .{ .root_dir = comp.zig_lib_directory },
.root_src_path = src_basename,
.fully_qualified_name = "root",
};
defer main_pkg.deinitTable(comp.gpa);
const root_name = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len];
const target = comp.getTarget();
const bin_basename = try std.zig.binNameAlloc(comp.gpa, .{
@ -6404,7 +6385,7 @@ fn buildOutputFromZig(
.cache_mode = .whole,
.target = target,
.root_name = root_name,
.main_pkg = &main_pkg,
.main_mod = &main_mod,
.output_mode = output_mode,
.thread_pool = comp.thread_pool,
.libc_installation = comp.bin_file.options.libc_installation,
@ -6481,7 +6462,7 @@ pub fn build_crt_file(
.cache_mode = .whole,
.target = target,
.root_name = root_name,
.main_pkg = null,
.main_mod = null,
.output_mode = output_mode,
.thread_pool = comp.thread_pool,
.libc_installation = comp.bin_file.options.libc_installation,

View File

@ -55,10 +55,10 @@ comp: *Compilation,
/// Where build artifacts and incremental compilation metadata serialization go.
zig_cache_artifact_directory: Compilation.Directory,
/// Pointer to externally managed resource.
root_pkg: *Package,
/// Normally, `main_pkg` and `root_pkg` are the same. The exception is `zig test`, in which
/// `root_pkg` is the test runner, and `main_pkg` is the user's source file which has the tests.
main_pkg: *Package,
root_mod: *Package.Module,
/// Normally, `main_mod` and `root_mod` are the same. The exception is `zig test`, in which
/// `root_mod` is the test runner, and `main_mod` is the user's source file which has the tests.
main_mod: *Package.Module,
sema_prog_node: std.Progress.Node = undefined,
/// Used by AstGen worker to load and store ZIR cache.
@ -973,8 +973,8 @@ pub const File = struct {
tree: Ast,
/// Whether this is populated or not depends on `zir_loaded`.
zir: Zir,
/// Package that this file is a part of, managed externally.
pkg: *Package,
/// Module that this file is a part of, managed externally.
mod: *Package.Module,
/// Whether this file is a part of multiple packages. This is an error condition which will be reported after AstGen.
multi_pkg: bool = false,
/// List of references to this file, used for multi-package errors.
@ -998,8 +998,8 @@ pub const File = struct {
pub const Reference = union(enum) {
/// The file is imported directly (i.e. not as a package) with @import.
import: SrcLoc,
/// The file is the root of a package.
root: *Package,
/// The file is the root of a module.
root: *Package.Module,
};
pub fn unload(file: *File, gpa: Allocator) void {
@ -1058,14 +1058,9 @@ pub const File = struct {
.stat = file.stat,
};
const root_dir_path = file.pkg.root_src_directory.path orelse ".";
log.debug("File.getSource, not cached. pkgdir={s} sub_file_path={s}", .{
root_dir_path, file.sub_file_path,
});
// Keep track of inode, file size, mtime, hash so we can detect which files
// have been modified when an incremental update is requested.
var f = try file.pkg.root_src_directory.handle.openFile(file.sub_file_path, .{});
var f = try file.mod.root.openFile(file.sub_file_path, .{});
defer f.close();
const stat = try f.stat();
@ -1134,14 +1129,12 @@ pub const File = struct {
return ip.getOrPutTrailingString(mod.gpa, ip.string_bytes.items.len - start);
}
/// Returns the full path to this file relative to its package.
pub fn fullPath(file: File, ally: Allocator) ![]u8 {
return file.pkg.root_src_directory.join(ally, &[_][]const u8{file.sub_file_path});
return file.mod.root.joinString(ally, file.sub_file_path);
}
/// Returns the full path to this file relative to its package.
pub fn fullPathZ(file: File, ally: Allocator) ![:0]u8 {
return file.pkg.root_src_directory.joinZ(ally, &[_][]const u8{file.sub_file_path});
return file.mod.root.joinStringZ(ally, file.sub_file_path);
}
pub fn dumpSrc(file: *File, src: LazySrcLoc) void {
@ -1181,10 +1174,10 @@ pub const File = struct {
}
const pkg = switch (ref) {
.import => |loc| loc.file_scope.pkg,
.import => |loc| loc.file_scope.mod,
.root => |pkg| pkg,
};
if (pkg != file.pkg) file.multi_pkg = true;
if (pkg != file.mod) file.multi_pkg = true;
}
/// Mark this file and every file referenced by it as multi_pkg and report an
@ -1226,7 +1219,7 @@ pub const EmbedFile = struct {
bytes: [:0]const u8,
stat: Cache.File.Stat,
/// Package that this file is a part of, managed externally.
pkg: *Package,
mod: *Package.Module,
/// The Decl that was created from the `@embedFile` to own this resource.
/// This is how zig knows what other Decl objects to invalidate if the file
/// changes on disk.
@ -2542,28 +2535,6 @@ pub fn deinit(mod: *Module) void {
}
mod.deletion_set.deinit(gpa);
// The callsite of `Compilation.create` owns the `main_pkg`, however
// Module owns the builtin and std packages that it adds.
if (mod.main_pkg.table.fetchRemove("builtin")) |kv| {
gpa.free(kv.key);
kv.value.destroy(gpa);
}
if (mod.main_pkg.table.fetchRemove("std")) |kv| {
gpa.free(kv.key);
// It's possible for main_pkg to be std when running 'zig test'! In this case, we must not
// destroy it, since it would lead to a double-free.
if (kv.value != mod.main_pkg) {
kv.value.destroy(gpa);
}
}
if (mod.main_pkg.table.fetchRemove("root")) |kv| {
gpa.free(kv.key);
}
if (mod.root_pkg != mod.main_pkg) {
mod.root_pkg.destroy(gpa);
}
mod.compile_log_text.deinit(gpa);
mod.zig_cache_artifact_directory.handle.close();
@ -2710,18 +2681,19 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
const gpa = mod.gpa;
// In any case we need to examine the stat of the file to determine the course of action.
var source_file = try file.pkg.root_src_directory.handle.openFile(file.sub_file_path, .{});
var source_file = try file.mod.root.openFile(file.sub_file_path, .{});
defer source_file.close();
const stat = try source_file.stat();
const want_local_cache = file.pkg == mod.main_pkg;
const want_local_cache = file.mod == mod.main_mod;
const digest = hash: {
var path_hash: Cache.HashHelper = .{};
path_hash.addBytes(build_options.version);
path_hash.add(builtin.zig_backend);
if (!want_local_cache) {
path_hash.addOptionalBytes(file.pkg.root_src_directory.path);
path_hash.addOptionalBytes(file.mod.root.root_dir.path);
path_hash.addBytes(file.mod.root.sub_path);
}
path_hash.addBytes(file.sub_file_path);
break :hash path_hash.final();
@ -2946,10 +2918,8 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
},
};
cache_file.writevAll(&iovecs) catch |err| {
const pkg_path = file.pkg.root_src_directory.path orelse ".";
const cache_path = cache_directory.path orelse ".";
log.warn("unable to write cached ZIR code for {s}/{s} to {s}/{s}: {s}", .{
pkg_path, file.sub_file_path, cache_path, &digest, @errorName(err),
log.warn("unable to write cached ZIR code for {}{s} to {}{s}: {s}", .{
file.mod.root, file.sub_file_path, cache_directory, &digest, @errorName(err),
});
};
@ -3154,37 +3124,27 @@ pub fn populateBuiltinFile(mod: *Module) !void {
defer tracy.end();
const comp = mod.comp;
const pkg_and_file = blk: {
const builtin_mod, const file = blk: {
comp.mutex.lock();
defer comp.mutex.unlock();
const builtin_pkg = mod.main_pkg.table.get("builtin").?;
const result = try mod.importPkg(builtin_pkg);
break :blk .{
.file = result.file,
.pkg = builtin_pkg,
};
const builtin_mod = mod.main_mod.deps.get("builtin").?;
const result = try mod.importPkg(builtin_mod);
break :blk .{ builtin_mod, result.file };
};
const file = pkg_and_file.file;
const builtin_pkg = pkg_and_file.pkg;
const gpa = mod.gpa;
file.source = try comp.generateBuiltinZigSource(gpa);
file.source_loaded = true;
if (builtin_pkg.root_src_directory.handle.statFile(builtin_pkg.root_src_path)) |stat| {
if (builtin_mod.root.statFile(builtin_mod.root_src_path)) |stat| {
if (stat.size != file.source.len) {
const full_path = try builtin_pkg.root_src_directory.join(gpa, &.{
builtin_pkg.root_src_path,
});
defer gpa.free(full_path);
log.warn(
"the cached file '{s}' had the wrong size. Expected {d}, found {d}. " ++
"the cached file '{}{s}' had the wrong size. Expected {d}, found {d}. " ++
"Overwriting with correct file contents now",
.{ full_path, file.source.len, stat.size },
.{ builtin_mod.root, builtin_mod.root_src_path, file.source.len, stat.size },
);
try writeBuiltinFile(file, builtin_pkg);
try writeBuiltinFile(file, builtin_mod);
} else {
file.stat = .{
.size = stat.size,
@ -3198,7 +3158,7 @@ pub fn populateBuiltinFile(mod: *Module) !void {
error.PipeBusy => unreachable, // it's not a pipe
error.WouldBlock => unreachable, // not asking for non-blocking I/O
error.FileNotFound => try writeBuiltinFile(file, builtin_pkg),
error.FileNotFound => try writeBuiltinFile(file, builtin_mod),
else => |e| return e,
}
@ -3212,8 +3172,8 @@ pub fn populateBuiltinFile(mod: *Module) !void {
file.status = .success_zir;
}
fn writeBuiltinFile(file: *File, builtin_pkg: *Package) !void {
var af = try builtin_pkg.root_src_directory.handle.atomicFile(builtin_pkg.root_src_path, .{});
fn writeBuiltinFile(file: *File, builtin_mod: *Package.Module) !void {
var af = try builtin_mod.root.atomicFile(builtin_mod.root_src_path, .{});
defer af.deinit();
try af.file.writeAll(file.source);
try af.finish();
@ -3609,7 +3569,8 @@ pub fn updateEmbedFile(mod: *Module, embed_file: *EmbedFile) SemaError!void {
}
}
pub fn semaPkg(mod: *Module, pkg: *Package) !void {
/// https://github.com/ziglang/zig/issues/14307
pub fn semaPkg(mod: *Module, pkg: *Package.Module) !void {
const file = (try mod.importPkg(pkg)).file;
return mod.semaFile(file);
}
@ -3711,13 +3672,11 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
return error.AnalysisFail;
};
const resolved_path = std.fs.path.resolve(
gpa,
if (file.pkg.root_src_directory.path) |pkg_path|
&[_][]const u8{ pkg_path, file.sub_file_path }
else
&[_][]const u8{file.sub_file_path},
) catch |err| {
const resolved_path = std.fs.path.resolve(gpa, &.{
file.mod.root.root_dir.path orelse ".",
file.mod.root.sub_path,
file.sub_file_path,
}) catch |err| {
try reportRetryableFileError(mod, file, "unable to resolve path: {s}", .{@errorName(err)});
return error.AnalysisFail;
};
@ -3748,8 +3707,8 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
// TODO: figure out how this works under incremental changes to builtin.zig!
const builtin_type_target_index: InternPool.Index = blk: {
const std_mod = mod.main_pkg.table.get("std").?;
if (decl.getFileScope(mod).pkg != std_mod) break :blk .none;
const std_mod = mod.main_mod.deps.get("std").?;
if (decl.getFileScope(mod).mod != std_mod) break :blk .none;
// We're in the std module.
const std_file = (try mod.importPkg(std_mod)).file;
const std_decl = mod.declPtr(std_file.root_decl.unwrap().?);
@ -4042,14 +4001,17 @@ pub const ImportFileResult = struct {
is_pkg: bool,
};
pub fn importPkg(mod: *Module, pkg: *Package) !ImportFileResult {
/// https://github.com/ziglang/zig/issues/14307
pub fn importPkg(mod: *Module, pkg: *Package.Module) !ImportFileResult {
const gpa = mod.gpa;
// The resolved path is used as the key in the import table, to detect if
// an import refers to the same as another, despite different relative paths
// or differently mapped package names.
const resolved_path = try std.fs.path.resolve(gpa, &[_][]const u8{
pkg.root_src_directory.path orelse ".", pkg.root_src_path,
const resolved_path = try std.fs.path.resolve(gpa, &.{
pkg.root.root_dir.path orelse ".",
pkg.root.sub_path,
pkg.root_src_path,
});
var keep_resolved_path = false;
defer if (!keep_resolved_path) gpa.free(resolved_path);
@ -4083,7 +4045,7 @@ pub fn importPkg(mod: *Module, pkg: *Package) !ImportFileResult {
.tree = undefined,
.zir = undefined,
.status = .never_loaded,
.pkg = pkg,
.mod = pkg,
.root_decl = .none,
};
try new_file.addReference(mod.*, .{ .root = pkg });
@ -4100,29 +4062,33 @@ pub fn importFile(
import_string: []const u8,
) !ImportFileResult {
if (std.mem.eql(u8, import_string, "std")) {
return mod.importPkg(mod.main_pkg.table.get("std").?);
return mod.importPkg(mod.main_mod.deps.get("std").?);
}
if (std.mem.eql(u8, import_string, "builtin")) {
return mod.importPkg(mod.main_pkg.table.get("builtin").?);
return mod.importPkg(mod.main_mod.deps.get("builtin").?);
}
if (std.mem.eql(u8, import_string, "root")) {
return mod.importPkg(mod.root_pkg);
return mod.importPkg(mod.root_mod);
}
if (cur_file.pkg.table.get(import_string)) |pkg| {
if (cur_file.mod.deps.get(import_string)) |pkg| {
return mod.importPkg(pkg);
}
if (!mem.endsWith(u8, import_string, ".zig")) {
return error.PackageNotFound;
return error.ModuleNotFound;
}
const gpa = mod.gpa;
// The resolved path is used as the key in the import table, to detect if
// an import refers to the same as another, despite different relative paths
// or differently mapped package names.
const cur_pkg_dir_path = cur_file.pkg.root_src_directory.path orelse ".";
const resolved_path = try std.fs.path.resolve(gpa, &[_][]const u8{
cur_pkg_dir_path, cur_file.sub_file_path, "..", import_string,
const resolved_path = try std.fs.path.resolve(gpa, &.{
cur_file.mod.root.root_dir.path orelse ".",
cur_file.mod.root.sub_path,
cur_file.sub_file_path,
"..",
import_string,
});
var keep_resolved_path = false;
defer if (!keep_resolved_path) gpa.free(resolved_path);
@ -4137,7 +4103,10 @@ pub fn importFile(
const new_file = try gpa.create(File);
errdefer gpa.destroy(new_file);
const resolved_root_path = try std.fs.path.resolve(gpa, &[_][]const u8{cur_pkg_dir_path});
const resolved_root_path = try std.fs.path.resolve(gpa, &.{
cur_file.mod.root.root_dir.path orelse ".",
cur_file.mod.root.sub_path,
});
defer gpa.free(resolved_root_path);
const sub_file_path = p: {
@ -4151,7 +4120,7 @@ pub fn importFile(
{
break :p try gpa.dupe(u8, resolved_path);
}
return error.ImportOutsidePkgPath;
return error.ImportOutsideModulePath;
};
errdefer gpa.free(sub_file_path);
@ -4171,7 +4140,7 @@ pub fn importFile(
.tree = undefined,
.zir = undefined,
.status = .never_loaded,
.pkg = cur_file.pkg,
.mod = cur_file.mod,
.root_decl = .none,
};
return ImportFileResult{
@ -4184,9 +4153,11 @@ pub fn importFile(
pub fn embedFile(mod: *Module, cur_file: *File, import_string: []const u8) !*EmbedFile {
const gpa = mod.gpa;
if (cur_file.pkg.table.get(import_string)) |pkg| {
const resolved_path = try std.fs.path.resolve(gpa, &[_][]const u8{
pkg.root_src_directory.path orelse ".", pkg.root_src_path,
if (cur_file.mod.deps.get(import_string)) |pkg| {
const resolved_path = try std.fs.path.resolve(gpa, &.{
pkg.root.root_dir.path orelse ".",
pkg.root.sub_path,
pkg.root_src_path,
});
var keep_resolved_path = false;
defer if (!keep_resolved_path) gpa.free(resolved_path);
@ -4203,10 +4174,14 @@ pub fn embedFile(mod: *Module, cur_file: *File, import_string: []const u8) !*Emb
// The resolved path is used as the key in the table, to detect if a file
// refers to the same as another, despite different relative paths.
const cur_pkg_dir_path = cur_file.pkg.root_src_directory.path orelse ".";
const resolved_path = try std.fs.path.resolve(gpa, &[_][]const u8{
cur_pkg_dir_path, cur_file.sub_file_path, "..", import_string,
const resolved_path = try std.fs.path.resolve(gpa, &.{
cur_file.mod.root.root_dir.path orelse ".",
cur_file.mod.root.sub_path,
cur_file.sub_file_path,
"..",
import_string,
});
var keep_resolved_path = false;
defer if (!keep_resolved_path) gpa.free(resolved_path);
@ -4214,7 +4189,10 @@ pub fn embedFile(mod: *Module, cur_file: *File, import_string: []const u8) !*Emb
errdefer assert(mod.embed_table.remove(resolved_path));
if (gop.found_existing) return gop.value_ptr.*;
const resolved_root_path = try std.fs.path.resolve(gpa, &[_][]const u8{cur_pkg_dir_path});
const resolved_root_path = try std.fs.path.resolve(gpa, &.{
cur_file.mod.root.root_dir.path orelse ".",
cur_file.mod.root.sub_path,
});
defer gpa.free(resolved_root_path);
const sub_file_path = p: {
@ -4228,16 +4206,17 @@ pub fn embedFile(mod: *Module, cur_file: *File, import_string: []const u8) !*Emb
{
break :p try gpa.dupe(u8, resolved_path);
}
return error.ImportOutsidePkgPath;
return error.ImportOutsideModulePath;
};
errdefer gpa.free(sub_file_path);
return newEmbedFile(mod, cur_file.pkg, sub_file_path, resolved_path, &keep_resolved_path, gop);
return newEmbedFile(mod, cur_file.mod, sub_file_path, resolved_path, &keep_resolved_path, gop);
}
/// https://github.com/ziglang/zig/issues/14307
fn newEmbedFile(
mod: *Module,
pkg: *Package,
pkg: *Package.Module,
sub_file_path: []const u8,
resolved_path: []const u8,
keep_resolved_path: *bool,
@ -4248,7 +4227,7 @@ fn newEmbedFile(
const new_file = try gpa.create(EmbedFile);
errdefer gpa.destroy(new_file);
var file = try pkg.root_src_directory.handle.openFile(sub_file_path, .{});
var file = try pkg.root.openFile(sub_file_path, .{});
defer file.close();
const actual_stat = try file.stat();
@ -4275,14 +4254,14 @@ fn newEmbedFile(
.sub_file_path = sub_file_path,
.bytes = bytes,
.stat = stat,
.pkg = pkg,
.mod = pkg,
.owner_decl = undefined, // Set by Sema immediately after this function returns.
};
return new_file;
}
pub fn detectEmbedFileUpdate(mod: *Module, embed_file: *EmbedFile) !void {
var file = try embed_file.pkg.root_src_directory.handle.openFile(embed_file.sub_file_path, .{});
var file = try embed_file.mod.root.openFile(embed_file.sub_file_path, .{});
defer file.close();
const stat = try file.stat();
@ -4455,21 +4434,21 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
gop.key_ptr.* = new_decl_index;
// Exported decls, comptime decls, usingnamespace decls, and
// test decls if in test mode, get analyzed.
const decl_pkg = namespace.file_scope.pkg;
const decl_mod = namespace.file_scope.mod;
const want_analysis = is_exported or switch (decl_name_index) {
0 => true, // comptime or usingnamespace decl
1 => blk: {
// test decl with no name. Skip the part where we check against
// the test name filter.
if (!comp.bin_file.options.is_test) break :blk false;
if (decl_pkg != mod.main_pkg) break :blk false;
if (decl_mod != mod.main_mod) break :blk false;
try mod.test_functions.put(gpa, new_decl_index, {});
break :blk true;
},
else => blk: {
if (!is_named_test) break :blk false;
if (!comp.bin_file.options.is_test) break :blk false;
if (decl_pkg != mod.main_pkg) break :blk false;
if (decl_mod != mod.main_mod) break :blk false;
if (comp.test_filter) |test_filter| {
if (mem.indexOf(u8, ip.stringToSlice(decl_name), test_filter) == null) {
break :blk false;
@ -5596,8 +5575,8 @@ pub fn populateTestFunctions(
) !void {
const gpa = mod.gpa;
const ip = &mod.intern_pool;
const builtin_pkg = mod.main_pkg.table.get("builtin").?;
const builtin_file = (mod.importPkg(builtin_pkg) catch unreachable).file;
const builtin_mod = mod.main_mod.deps.get("builtin").?;
const builtin_file = (mod.importPkg(builtin_mod) catch unreachable).file;
const root_decl = mod.declPtr(builtin_file.root_decl.unwrap().?);
const builtin_namespace = mod.namespacePtr(root_decl.src_namespace);
const test_functions_str = try ip.getOrPutString(gpa, "test_functions");

File diff suppressed because it is too large Load Diff

1557
src/Package/Fetch.zig Normal file

File diff suppressed because it is too large Load Diff

View File

@ -11,8 +11,6 @@ const Allocator = mem.Allocator;
const Sha1 = std.crypto.hash.Sha1;
const assert = std.debug.assert;
const ProgressReader = @import("Package.zig").ProgressReader;
pub const oid_length = Sha1.digest_length;
pub const fmt_oid_length = 2 * oid_length;
/// The ID of a Git object (an SHA-1 hash).

View File

@ -1,14 +1,21 @@
pub const max_bytes = 10 * 1024 * 1024;
pub const basename = "build.zig.zon";
pub const Hash = std.crypto.hash.sha2.Sha256;
pub const Digest = [Hash.digest_length]u8;
pub const multihash_len = 1 + 1 + Hash.digest_length;
pub const multihash_hex_digest_len = 2 * multihash_len;
pub const MultiHashHexDigest = [multihash_hex_digest_len]u8;
pub const Dependency = struct {
location: union(enum) {
url: []const u8,
path: []const u8,
},
location: Location,
location_tok: Ast.TokenIndex,
hash: ?[]const u8,
hash_tok: Ast.TokenIndex,
pub const Location = union(enum) {
url: []const u8,
path: []const u8,
};
};
pub const ErrorMessage = struct {
@ -45,18 +52,22 @@ comptime {
assert(@intFromEnum(multihash_function) < 127);
assert(Hash.digest_length < 127);
}
pub const multihash_len = 1 + 1 + Hash.digest_length;
name: []const u8,
version: std.SemanticVersion,
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
paths: std.StringArrayHashMapUnmanaged(void),
errors: []ErrorMessage,
arena_state: std.heap.ArenaAllocator.State,
pub const ParseOptions = struct {
allow_missing_paths_field: bool = false,
};
pub const Error = Allocator.Error;
pub fn parse(gpa: Allocator, ast: std.zig.Ast) Error!Manifest {
pub fn parse(gpa: Allocator, ast: std.zig.Ast, options: ParseOptions) Error!Manifest {
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
assert(node_tags[0] == .root);
@ -74,11 +85,14 @@ pub fn parse(gpa: Allocator, ast: std.zig.Ast) Error!Manifest {
.name = undefined,
.version = undefined,
.dependencies = .{},
.paths = .{},
.allow_missing_paths_field = options.allow_missing_paths_field,
.buf = .{},
};
defer p.buf.deinit(gpa);
defer p.errors.deinit(gpa);
defer p.dependencies.deinit(gpa);
defer p.paths.deinit(gpa);
p.parseRoot(main_node_index) catch |err| switch (err) {
error.ParseFailure => assert(p.errors.items.len > 0),
@ -89,6 +103,7 @@ pub fn parse(gpa: Allocator, ast: std.zig.Ast) Error!Manifest {
.name = p.name,
.version = p.version,
.dependencies = try p.dependencies.clone(p.arena),
.paths = try p.paths.clone(p.arena),
.errors = try p.arena.dupe(ErrorMessage, p.errors.items),
.arena_state = arena_instance.state,
};
@ -117,8 +132,8 @@ test hex64 {
try std.testing.expectEqualStrings("[00efcdab78563412]", s);
}
pub fn hexDigest(digest: [Hash.digest_length]u8) [multihash_len * 2]u8 {
var result: [multihash_len * 2]u8 = undefined;
pub fn hexDigest(digest: Digest) MultiHashHexDigest {
var result: MultiHashHexDigest = undefined;
result[0] = hex_charset[@intFromEnum(multihash_function) >> 4];
result[1] = hex_charset[@intFromEnum(multihash_function) & 15];
@ -143,6 +158,8 @@ const Parse = struct {
name: []const u8,
version: std.SemanticVersion,
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
paths: std.StringArrayHashMapUnmanaged(void),
allow_missing_paths_field: bool,
const InnerError = error{ ParseFailure, OutOfMemory };
@ -158,6 +175,7 @@ const Parse = struct {
var have_name = false;
var have_version = false;
var have_included_paths = false;
for (struct_init.ast.fields) |field_init| {
const name_token = ast.firstToken(field_init) - 2;
@ -167,6 +185,9 @@ const Parse = struct {
// that is desirable on a per-field basis.
if (mem.eql(u8, field_name, "dependencies")) {
try parseDependencies(p, field_init);
} else if (mem.eql(u8, field_name, "paths")) {
have_included_paths = true;
try parseIncludedPaths(p, field_init);
} else if (mem.eql(u8, field_name, "name")) {
p.name = try parseString(p, field_init);
have_name = true;
@ -190,6 +211,14 @@ const Parse = struct {
if (!have_version) {
try appendError(p, main_token, "missing top-level 'version' field", .{});
}
if (!have_included_paths) {
if (p.allow_missing_paths_field) {
try p.paths.put(p.gpa, "", {});
} else {
try appendError(p, main_token, "missing top-level 'paths' field", .{});
}
}
}
fn parseDependencies(p: *Parse, node: Ast.Node.Index) !void {
@ -222,9 +251,9 @@ const Parse = struct {
var dep: Dependency = .{
.location = undefined,
.location_tok = undefined,
.location_tok = 0,
.hash = null,
.hash_tok = undefined,
.hash_tok = 0,
};
var has_location = false;
@ -277,6 +306,25 @@ const Parse = struct {
return dep;
}
fn parseIncludedPaths(p: *Parse, node: Ast.Node.Index) !void {
const ast = p.ast;
const main_tokens = ast.nodes.items(.main_token);
var buf: [2]Ast.Node.Index = undefined;
const array_init = ast.fullArrayInit(&buf, node) orelse {
const tok = main_tokens[node];
return fail(p, tok, "expected paths expression to be a struct", .{});
};
for (array_init.ast.elements) |elem_node| {
const path_string = try parseString(p, elem_node);
// This is normalized so that it can be used in string comparisons
// against file system paths.
const normalized = try std.fs.path.resolve(p.arena, &.{path_string});
try p.paths.put(p.gpa, normalized, {});
}
}
fn parseString(p: *Parse, node: Ast.Node.Index) ![]const u8 {
const ast = p.ast;
const node_tags = ast.nodes.items(.tag);
@ -309,10 +357,9 @@ const Parse = struct {
}
}
const hex_multihash_len = 2 * Manifest.multihash_len;
if (h.len != hex_multihash_len) {
if (h.len != multihash_hex_digest_len) {
return fail(p, tok, "wrong hash size. expected: {d}, found: {d}", .{
hex_multihash_len, h.len,
multihash_hex_digest_len, h.len,
});
}

34
src/Package/Module.zig Normal file
View File

@ -0,0 +1,34 @@
//! Corresponds to something that Zig source code can `@import`.
//! Not to be confused with src/Module.zig which should be renamed
//! to something else. https://github.com/ziglang/zig/issues/14307
/// Only files inside this directory can be imported.
root: Package.Path,
/// Relative to `root`. May contain path separators.
root_src_path: []const u8,
/// Name used in compile errors. Looks like "root.foo.bar".
fully_qualified_name: []const u8,
/// The dependency table of this module. Shared dependencies such as 'std',
/// 'builtin', and 'root' are not specified in every dependency table, but
/// instead only in the table of `main_mod`. `Module.importFile` is
/// responsible for detecting these names and using the correct package.
deps: Deps = .{},
pub const Deps = std.StringHashMapUnmanaged(*Module);
pub const Tree = struct {
/// Each `Package` exposes a `Module` with build.zig as its root source file.
build_module_table: std.AutoArrayHashMapUnmanaged(MultiHashHexDigest, *Module),
};
pub fn create(allocator: Allocator, m: Module) Allocator.Error!*Module {
const new = try allocator.create(Module);
new.* = m;
return new;
}
const Module = @This();
const Package = @import("../Package.zig");
const std = @import("std");
const Allocator = std.mem.Allocator;
const MultiHashHexDigest = Package.Manifest.MultiHashHexDigest;

View File

@ -1,153 +0,0 @@
const builtin = @import("builtin");
const std = @import("std");
const fs = std.fs;
const ThreadPool = std.Thread.Pool;
const WaitGroup = std.Thread.WaitGroup;
const Allocator = std.mem.Allocator;
const Hash = @import("../Manifest.zig").Hash;
pub fn compute(thread_pool: *ThreadPool, pkg_dir: fs.IterableDir) ![Hash.digest_length]u8 {
const gpa = thread_pool.allocator;
// We'll use an arena allocator for the path name strings since they all
// need to be in memory for sorting.
var arena_instance = std.heap.ArenaAllocator.init(gpa);
defer arena_instance.deinit();
const arena = arena_instance.allocator();
// TODO: delete files not included in the package prior to computing the package hash.
// for example, if the ini file has directives to include/not include certain files,
// apply those rules directly to the filesystem right here. This ensures that files
// not protected by the hash are not present on the file system.
// Collect all files, recursively, then sort.
var all_files = std.ArrayList(*HashedFile).init(gpa);
defer all_files.deinit();
var walker = try pkg_dir.walk(gpa);
defer walker.deinit();
{
// The final hash will be a hash of each file hashed independently. This
// allows hashing in parallel.
var wait_group: WaitGroup = .{};
defer wait_group.wait();
while (try walker.next()) |entry| {
const kind: HashedFile.Kind = switch (entry.kind) {
.directory => continue,
.file => .file,
.sym_link => .sym_link,
else => return error.IllegalFileTypeInPackage,
};
const hashed_file = try arena.create(HashedFile);
const fs_path = try arena.dupe(u8, entry.path);
hashed_file.* = .{
.fs_path = fs_path,
.normalized_path = try normalizePath(arena, fs_path),
.kind = kind,
.hash = undefined, // to be populated by the worker
.failure = undefined, // to be populated by the worker
};
wait_group.start();
try thread_pool.spawn(workerHashFile, .{ pkg_dir.dir, hashed_file, &wait_group });
try all_files.append(hashed_file);
}
}
std.mem.sortUnstable(*HashedFile, all_files.items, {}, HashedFile.lessThan);
var hasher = Hash.init(.{});
var any_failures = false;
for (all_files.items) |hashed_file| {
hashed_file.failure catch |err| {
any_failures = true;
std.log.err("unable to hash '{s}': {s}", .{ hashed_file.fs_path, @errorName(err) });
};
hasher.update(&hashed_file.hash);
}
if (any_failures) return error.PackageHashUnavailable;
return hasher.finalResult();
}
const HashedFile = struct {
fs_path: []const u8,
normalized_path: []const u8,
hash: [Hash.digest_length]u8,
failure: Error!void,
kind: Kind,
const Error =
fs.File.OpenError ||
fs.File.ReadError ||
fs.File.StatError ||
fs.Dir.ReadLinkError;
const Kind = enum { file, sym_link };
fn lessThan(context: void, lhs: *const HashedFile, rhs: *const HashedFile) bool {
_ = context;
return std.mem.lessThan(u8, lhs.normalized_path, rhs.normalized_path);
}
};
/// Make a file system path identical independently of operating system path inconsistencies.
/// This converts backslashes into forward slashes.
fn normalizePath(arena: Allocator, fs_path: []const u8) ![]const u8 {
const canonical_sep = '/';
if (fs.path.sep == canonical_sep)
return fs_path;
const normalized = try arena.dupe(u8, fs_path);
for (normalized) |*byte| {
switch (byte.*) {
fs.path.sep => byte.* = canonical_sep,
else => continue,
}
}
return normalized;
}
fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile, wg: *WaitGroup) void {
defer wg.finish();
hashed_file.failure = hashFileFallible(dir, hashed_file);
}
fn hashFileFallible(dir: fs.Dir, hashed_file: *HashedFile) HashedFile.Error!void {
var buf: [8000]u8 = undefined;
var hasher = Hash.init(.{});
hasher.update(hashed_file.normalized_path);
switch (hashed_file.kind) {
.file => {
var file = try dir.openFile(hashed_file.fs_path, .{});
defer file.close();
hasher.update(&.{ 0, @intFromBool(try isExecutable(file)) });
while (true) {
const bytes_read = try file.read(&buf);
if (bytes_read == 0) break;
hasher.update(buf[0..bytes_read]);
}
},
.sym_link => {
const link_name = try dir.readLink(hashed_file.fs_path, &buf);
hasher.update(link_name);
},
}
hasher.final(&hashed_file.hash);
}
fn isExecutable(file: fs.File) !bool {
if (builtin.os.tag == .windows) {
// TODO check the ACL on Windows.
// Until this is implemented, this could be a false negative on
// Windows, which is why we do not yet set executable_bit_only above
// when unpacking the tarball.
return false;
} else {
const stat = try file.stat();
return (stat.mode & std.os.S.IXUSR) != 0;
}
}

View File

@ -5732,6 +5732,9 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
const tracy = trace(@src());
defer tracy.end();
const mod = sema.mod;
const comp = mod.comp;
const gpa = sema.gpa;
const pl_node = sema.code.instructions.items(.data)[inst].pl_node;
const src = pl_node.src();
const extra = sema.code.extraData(Zir.Inst.Block, pl_node.payload_index);
@ -5741,7 +5744,7 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
if (!@import("build_options").have_llvm)
return sema.fail(parent_block, src, "C import unavailable; Zig compiler built without LLVM extensions", .{});
var c_import_buf = std.ArrayList(u8).init(sema.gpa);
var c_import_buf = std.ArrayList(u8).init(gpa);
defer c_import_buf.deinit();
var comptime_reason: Block.ComptimeReason = .{ .c_import = .{
@ -5763,25 +5766,24 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
.runtime_loop = parent_block.runtime_loop,
.runtime_index = parent_block.runtime_index,
};
defer child_block.instructions.deinit(sema.gpa);
defer child_block.instructions.deinit(gpa);
// Ignore the result, all the relevant operations have written to c_import_buf already.
_ = try sema.analyzeBodyBreak(&child_block, body);
const mod = sema.mod;
var c_import_res = mod.comp.cImport(c_import_buf.items) catch |err|
var c_import_res = comp.cImport(c_import_buf.items) catch |err|
return sema.fail(&child_block, src, "C import failed: {s}", .{@errorName(err)});
defer c_import_res.deinit(mod.comp.gpa);
defer c_import_res.deinit(gpa);
if (c_import_res.errors.errorMessageCount() != 0) {
const msg = msg: {
const msg = try sema.errMsg(&child_block, src, "C import failed", .{});
errdefer msg.destroy(sema.gpa);
errdefer msg.destroy(gpa);
if (!mod.comp.bin_file.options.link_libc)
if (!comp.bin_file.options.link_libc)
try sema.errNote(&child_block, src, msg, "libc headers not available; compilation does not link against libc", .{});
const gop = try mod.cimport_errors.getOrPut(sema.gpa, sema.owner_decl_index);
const gop = try mod.cimport_errors.getOrPut(gpa, sema.owner_decl_index);
if (!gop.found_existing) {
gop.value_ptr.* = c_import_res.errors;
c_import_res.errors = std.zig.ErrorBundle.empty;
@ -5790,16 +5792,16 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
};
return sema.failWithOwnedErrorMsg(&child_block, msg);
}
const c_import_pkg = Package.create(
sema.gpa,
null,
c_import_res.out_zig_path,
) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => unreachable, // we pass null for root_src_dir_path
};
const c_import_mod = try Package.Module.create(comp.arena.allocator(), .{
.root = .{
.root_dir = Compilation.Directory.cwd(),
.sub_path = std.fs.path.dirname(c_import_res.out_zig_path) orelse "",
},
.root_src_path = std.fs.path.basename(c_import_res.out_zig_path),
.fully_qualified_name = c_import_res.out_zig_path,
});
const result = mod.importPkg(c_import_pkg) catch |err|
const result = mod.importPkg(c_import_mod) catch |err|
return sema.fail(&child_block, src, "C import failed: {s}", .{@errorName(err)});
mod.astGenFile(result.file) catch |err|
@ -13071,13 +13073,13 @@ fn zirImport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
const operand = inst_data.get(sema.code);
const result = mod.importFile(block.getFileScope(mod), operand) catch |err| switch (err) {
error.ImportOutsidePkgPath => {
return sema.fail(block, operand_src, "import of file outside package path: '{s}'", .{operand});
error.ImportOutsideModulePath => {
return sema.fail(block, operand_src, "import of file outside module path: '{s}'", .{operand});
},
error.PackageNotFound => {
const name = try block.getFileScope(mod).pkg.getName(sema.gpa, mod.*);
defer sema.gpa.free(name);
return sema.fail(block, operand_src, "no package named '{s}' available within package '{s}'", .{ operand, name });
error.ModuleNotFound => {
return sema.fail(block, operand_src, "no module named '{s}' available within module {s}", .{
operand, block.getFileScope(mod).mod.fully_qualified_name,
});
},
else => {
// TODO: these errors are file system errors; make sure an update() will
@ -13106,7 +13108,7 @@ fn zirEmbedFile(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
}
const embed_file = mod.embedFile(block.getFileScope(mod), name) catch |err| switch (err) {
error.ImportOutsidePkgPath => {
error.ImportOutsideModulePath => {
return sema.fail(block, operand_src, "embed of file outside package path: '{s}'", .{name});
},
else => {
@ -36415,8 +36417,8 @@ fn getBuiltinDecl(sema: *Sema, block: *Block, name: []const u8) CompileError!Mod
const mod = sema.mod;
const ip = &mod.intern_pool;
const std_pkg = mod.main_pkg.table.get("std").?;
const std_file = (mod.importPkg(std_pkg) catch unreachable).file;
const std_mod = mod.main_mod.deps.get("std").?;
const std_file = (mod.importPkg(std_mod) catch unreachable).file;
const opt_builtin_inst = (try sema.namespaceLookupRef(
block,
src,

View File

@ -892,21 +892,24 @@ pub const Object = struct {
build_options.semver.patch,
});
// We fully resolve all paths at this point to avoid lack of source line info in stack
// traces or lack of debugging information which, if relative paths were used, would
// be very location dependent.
// We fully resolve all paths at this point to avoid lack of
// source line info in stack traces or lack of debugging
// information which, if relative paths were used, would be
// very location dependent.
// TODO: the only concern I have with this is WASI as either host or target, should
// we leave the paths as relative then?
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const compile_unit_dir = blk: {
const path = d: {
const mod = options.module orelse break :d ".";
break :d mod.root_pkg.root_src_directory.path orelse ".";
};
if (std.fs.path.isAbsolute(path)) break :blk path;
break :blk std.os.realpath(path, &buf) catch path; // If realpath fails, fallback to whatever path was
const compile_unit_dir_z = blk: {
if (options.module) |mod| {
const d = try mod.root_mod.root.joinStringZ(builder.gpa, "");
if (std.fs.path.isAbsolute(d)) break :blk d;
const abs = std.fs.realpath(d, &buf) catch break :blk d;
builder.gpa.free(d);
break :blk try builder.gpa.dupeZ(u8, abs);
}
const cwd = try std.process.getCwd(&buf);
break :blk try builder.gpa.dupeZ(u8, cwd);
};
const compile_unit_dir_z = try builder.gpa.dupeZ(u8, compile_unit_dir);
defer builder.gpa.free(compile_unit_dir_z);
builder.llvm.di_compile_unit = builder.llvm.di_builder.?.createCompileUnit(
@ -1833,14 +1836,11 @@ pub const Object = struct {
}
const dir_path_z = d: {
var buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const dir_path = file.pkg.root_src_directory.path orelse ".";
const resolved_dir_path = if (std.fs.path.isAbsolute(dir_path))
dir_path
else
std.os.realpath(dir_path, &buffer) catch dir_path; // If realpath fails, fallback to whatever dir_path was
break :d try std.fs.path.joinZ(gpa, &.{
resolved_dir_path, std.fs.path.dirname(file.sub_file_path) orelse "",
});
const sub_path = std.fs.path.dirname(file.sub_file_path) orelse "";
const dir_path = try file.mod.root.joinStringZ(gpa, sub_path);
if (std.fs.path.isAbsolute(dir_path)) break :d dir_path;
const abs = std.fs.realpath(dir_path, &buffer) catch break :d dir_path;
break :d try std.fs.path.joinZ(gpa, &.{ abs, sub_path });
};
defer gpa.free(dir_path_z);
const sub_file_path_z = try gpa.dupeZ(u8, std.fs.path.basename(file.sub_file_path));
@ -2828,8 +2828,8 @@ pub const Object = struct {
fn getStackTraceType(o: *Object) Allocator.Error!Type {
const mod = o.module;
const std_pkg = mod.main_pkg.table.get("std").?;
const std_file = (mod.importPkg(std_pkg) catch unreachable).file;
const std_mod = mod.main_mod.deps.get("std").?;
const std_file = (mod.importPkg(std_mod) catch unreachable).file;
const builtin_str = try mod.intern_pool.getOrPutString(mod.gpa, "builtin");
const std_namespace = mod.namespacePtr(mod.declPtr(std_file.root_decl.unwrap().?).src_namespace);

View File

@ -139,18 +139,22 @@ fn dumpStatusReport() !void {
var crash_heap: [16 * 4096]u8 = undefined;
fn writeFilePath(file: *Module.File, stream: anytype) !void {
if (file.pkg.root_src_directory.path) |path| {
try stream.writeAll(path);
try stream.writeAll(std.fs.path.sep_str);
fn writeFilePath(file: *Module.File, writer: anytype) !void {
if (file.mod.root.root_dir.path) |path| {
try writer.writeAll(path);
try writer.writeAll(std.fs.path.sep_str);
}
try stream.writeAll(file.sub_file_path);
if (file.mod.root.sub_path.len > 0) {
try writer.writeAll(file.mod.root.sub_path);
try writer.writeAll(std.fs.path.sep_str);
}
try writer.writeAll(file.sub_file_path);
}
fn writeFullyQualifiedDeclWithFile(mod: *Module, decl: *Decl, stream: anytype) !void {
try writeFilePath(decl.getFileScope(mod), stream);
try stream.writeAll(": ");
try decl.renderFullyQualifiedDebugName(mod, stream);
fn writeFullyQualifiedDeclWithFile(mod: *Module, decl: *Decl, writer: anytype) !void {
try writeFilePath(decl.getFileScope(mod), writer);
try writer.writeAll(": ");
try decl.renderFullyQualifiedDebugName(mod, writer);
}
pub fn compilerPanic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace, maybe_ret_addr: ?usize) noreturn {

View File

@ -1074,7 +1074,7 @@ fn buildSharedLib(
.cache_mode = .whole,
.target = comp.getTarget(),
.root_name = lib.name,
.main_pkg = null,
.main_mod = null,
.output_mode = .Lib,
.link_mode = .Dynamic,
.thread_pool = comp.thread_pool,

View File

@ -233,7 +233,7 @@ pub fn buildLibCXX(comp: *Compilation, prog_node: *std.Progress.Node) !void {
.cache_mode = .whole,
.target = target,
.root_name = root_name,
.main_pkg = null,
.main_mod = null,
.output_mode = output_mode,
.thread_pool = comp.thread_pool,
.libc_installation = comp.bin_file.options.libc_installation,
@ -396,7 +396,7 @@ pub fn buildLibCXXABI(comp: *Compilation, prog_node: *std.Progress.Node) !void {
.cache_mode = .whole,
.target = target,
.root_name = root_name,
.main_pkg = null,
.main_mod = null,
.output_mode = output_mode,
.thread_pool = comp.thread_pool,
.libc_installation = comp.bin_file.options.libc_installation,

View File

@ -202,7 +202,7 @@ pub fn buildTsan(comp: *Compilation, prog_node: *std.Progress.Node) !void {
.cache_mode = .whole,
.target = target,
.root_name = root_name,
.main_pkg = null,
.main_mod = null,
.output_mode = output_mode,
.thread_pool = comp.thread_pool,
.libc_installation = comp.bin_file.options.libc_installation,

View File

@ -89,7 +89,7 @@ pub fn buildStaticLib(comp: *Compilation, prog_node: *std.Progress.Node) !void {
.cache_mode = .whole,
.target = target,
.root_name = root_name,
.main_pkg = null,
.main_mod = null,
.output_mode = output_mode,
.thread_pool = comp.thread_pool,
.libc_installation = comp.bin_file.options.libc_installation,

View File

@ -1880,7 +1880,7 @@ pub fn writeDbgInfoHeader(self: *Dwarf, module: *Module, low_pc: u64, high_pc: u
},
}
// Write the form for the compile unit, which must match the abbrev table above.
const name_strp = try self.strtab.insert(self.allocator, module.root_pkg.root_src_path);
const name_strp = try self.strtab.insert(self.allocator, module.root_mod.root_src_path);
var compile_unit_dir_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const compile_unit_dir = resolveCompilationDir(module, &compile_unit_dir_buffer);
const comp_dir_strp = try self.strtab.insert(self.allocator, compile_unit_dir);
@ -1940,9 +1940,17 @@ fn resolveCompilationDir(module: *Module, buffer: *[std.fs.MAX_PATH_BYTES]u8) []
// be very location dependent.
// TODO: the only concern I have with this is WASI as either host or target, should
// we leave the paths as relative then?
const comp_dir_path = module.root_pkg.root_src_directory.path orelse ".";
if (std.fs.path.isAbsolute(comp_dir_path)) return comp_dir_path;
return std.os.realpath(comp_dir_path, buffer) catch comp_dir_path; // If realpath fails, fallback to whatever comp_dir_path was
const root_dir_path = module.root_mod.root.root_dir.path orelse ".";
const sub_path = module.root_mod.root.sub_path;
const realpath = if (std.fs.path.isAbsolute(root_dir_path)) r: {
@memcpy(buffer[0..root_dir_path.len], root_dir_path);
break :r root_dir_path;
} else std.fs.realpath(root_dir_path, buffer) catch return root_dir_path;
const len = realpath.len + 1 + sub_path.len;
if (buffer.len < len) return root_dir_path;
buffer[realpath.len] = '/';
@memcpy(buffer[realpath.len + 1 ..][0..sub_path.len], sub_path);
return buffer[0..len];
}
fn writeAddrAssumeCapacity(self: *Dwarf, buf: *std.ArrayList(u8), addr: u64) void {
@ -2664,7 +2672,7 @@ fn genIncludeDirsAndFileNames(self: *Dwarf, arena: Allocator) !struct {
for (self.di_files.keys()) |dif| {
const dir_path = d: {
var buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const dir_path = dif.pkg.root_src_directory.path orelse ".";
const dir_path = try dif.mod.root.joinString(arena, dif.mod.root.sub_path);
const abs_dir_path = if (std.fs.path.isAbsolute(dir_path))
dir_path
else

View File

@ -929,15 +929,15 @@ pub fn populateMissingMetadata(self: *Elf) !void {
if (self.base.options.module) |module| {
if (self.zig_module_index == null and !self.base.options.use_llvm) {
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
const index: File.Index = @intCast(try self.files.addOne(gpa));
self.files.set(index, .{ .zig_module = .{
.index = index,
.path = module.main_pkg.root_src_path,
.path = module.main_mod.root_src_path,
} });
self.zig_module_index = index;
const zig_module = self.file(index).?.zig_module;
const name_off = try self.strtab.insert(gpa, std.fs.path.stem(module.main_pkg.root_src_path));
const name_off = try self.strtab.insert(gpa, std.fs.path.stem(module.main_mod.root_src_path));
const symbol_index = try self.addSymbol();
try zig_module.local_symbols.append(gpa, symbol_index);
const symbol_ptr = self.symbol(symbol_index);

View File

@ -352,9 +352,12 @@ fn putFn(self: *Plan9, decl_index: Module.Decl.Index, out: FnDeclOutput) !void {
// getting the full file path
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const dir = file.pkg.root_src_directory.path orelse try std.os.getcwd(&buf);
const sub_path = try std.fs.path.join(arena, &.{ dir, file.sub_file_path });
try self.addPathComponents(sub_path, &a);
const full_path = try std.fs.path.join(arena, &.{
file.mod.root.root_dir.path orelse try std.os.getcwd(&buf),
file.mod.root.sub_path,
file.sub_file_path,
});
try self.addPathComponents(full_path, &a);
// null terminate
try a.append(0);

View File

@ -416,7 +416,7 @@ const usage_build_generic =
\\ dep: [[import=]name]
\\ --deps [dep],[dep],... Set dependency names for the root package
\\ dep: [[import=]name]
\\ --main-pkg-path Set the directory of the root package
\\ --main-mod-path Set the directory of the root module
\\ -fPIC Force-enable Position Independent Code
\\ -fno-PIC Force-disable Position Independent Code
\\ -fPIE Force-enable Position Independent Executable
@ -765,17 +765,11 @@ const Framework = struct {
};
const CliModule = struct {
mod: *Package,
mod: *Package.Module,
/// still in CLI arg format
deps_str: []const u8,
};
fn cleanupModules(modules: *std.StringArrayHashMap(CliModule)) void {
var it = modules.iterator();
while (it.next()) |kv| kv.value_ptr.mod.destroy(modules.allocator);
modules.deinit();
}
fn buildOutputType(
gpa: Allocator,
arena: Allocator,
@ -903,7 +897,7 @@ fn buildOutputType(
var override_local_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_LOCAL_CACHE_DIR");
var override_global_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_GLOBAL_CACHE_DIR");
var override_lib_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_LIB_DIR");
var main_pkg_path: ?[]const u8 = null;
var main_mod_path: ?[]const u8 = null;
var clang_preprocessor_mode: Compilation.ClangPreprocessorMode = .no;
var subsystem: ?std.Target.SubSystem = null;
var major_subsystem_version: ?u32 = null;
@ -950,8 +944,7 @@ fn buildOutputType(
// Contains every module specified via --mod. The dependencies are added
// after argument parsing is completed. We use a StringArrayHashMap to make
// error output consistent.
var modules = std.StringArrayHashMap(CliModule).init(gpa);
defer cleanupModules(&modules);
var modules = std.StringArrayHashMap(CliModule).init(arena);
// The dependency string for the root package
var root_deps_str: ?[]const u8 = null;
@ -1023,33 +1016,36 @@ fn buildOutputType(
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
if (mem.eql(u8, mod_name, name)) {
fatal("unable to add module '{s}' -> '{s}': conflicts with builtin module", .{ mod_name, root_src });
fatal("unable to add module '{s}' -> '{s}': conflicts with builtin module", .{
mod_name, root_src,
});
}
}
var mod_it = modules.iterator();
while (mod_it.next()) |kv| {
if (std.mem.eql(u8, mod_name, kv.key_ptr.*)) {
fatal("unable to add module '{s}' -> '{s}': already exists as '{s}'", .{ mod_name, root_src, kv.value_ptr.mod.root_src_path });
}
if (modules.get(mod_name)) |value| {
fatal("unable to add module '{s}' -> '{s}': already exists as '{s}'", .{
mod_name, root_src, value.mod.root_src_path,
});
}
try modules.ensureUnusedCapacity(1);
modules.put(mod_name, .{
.mod = try Package.create(
gpa,
fs.path.dirname(root_src),
fs.path.basename(root_src),
),
try modules.put(mod_name, .{
.mod = try Package.Module.create(arena, .{
.root = .{
.root_dir = Cache.Directory.cwd(),
.sub_path = fs.path.dirname(root_src) orelse "",
},
.root_src_path = fs.path.basename(root_src),
.fully_qualified_name = mod_name,
}),
.deps_str = deps_str,
}) catch unreachable;
});
} else if (mem.eql(u8, arg, "--deps")) {
if (root_deps_str != null) {
fatal("only one --deps argument is allowed", .{});
}
root_deps_str = args_iter.nextOrFatal();
} else if (mem.eql(u8, arg, "--main-pkg-path")) {
main_pkg_path = args_iter.nextOrFatal();
} else if (mem.eql(u8, arg, "--main-mod-path")) {
main_mod_path = args_iter.nextOrFatal();
} else if (mem.eql(u8, arg, "-cflags")) {
extra_cflags.shrinkRetainingCapacity(0);
while (true) {
@ -2461,19 +2457,26 @@ fn buildOutputType(
var deps_it = ModuleDepIterator.init(deps_str);
while (deps_it.next()) |dep| {
if (dep.expose.len == 0) {
fatal("module '{s}' depends on '{s}' with a blank name", .{ kv.key_ptr.*, dep.name });
fatal("module '{s}' depends on '{s}' with a blank name", .{
kv.key_ptr.*, dep.name,
});
}
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
if (mem.eql(u8, dep.expose, name)) {
fatal("unable to add module '{s}' under name '{s}': conflicts with builtin module", .{ dep.name, dep.expose });
fatal("unable to add module '{s}' under name '{s}': conflicts with builtin module", .{
dep.name, dep.expose,
});
}
}
const dep_mod = modules.get(dep.name) orelse
fatal("module '{s}' depends on module '{s}' which does not exist", .{ kv.key_ptr.*, dep.name });
const dep_mod = modules.get(dep.name) orelse {
fatal("module '{s}' depends on module '{s}' which does not exist", .{
kv.key_ptr.*, dep.name,
});
};
try kv.value_ptr.mod.add(gpa, dep.expose, dep_mod.mod);
try kv.value_ptr.mod.deps.put(arena, dep.expose, dep_mod.mod);
}
}
}
@ -3229,31 +3232,35 @@ fn buildOutputType(
};
defer emit_implib_resolved.deinit();
const main_pkg: ?*Package = if (root_src_file) |unresolved_src_path| blk: {
const main_mod: ?*Package.Module = if (root_src_file) |unresolved_src_path| blk: {
const src_path = try introspect.resolvePath(arena, unresolved_src_path);
if (main_pkg_path) |unresolved_main_pkg_path| {
const p = try introspect.resolvePath(arena, unresolved_main_pkg_path);
if (p.len == 0) {
break :blk try Package.create(gpa, null, src_path);
} else {
const rel_src_path = try fs.path.relative(arena, p, src_path);
break :blk try Package.create(gpa, p, rel_src_path);
}
if (main_mod_path) |unresolved_main_mod_path| {
const p = try introspect.resolvePath(arena, unresolved_main_mod_path);
break :blk try Package.Module.create(arena, .{
.root = .{
.root_dir = Cache.Directory.cwd(),
.sub_path = p,
},
.root_src_path = if (p.len == 0)
src_path
else
try fs.path.relative(arena, p, src_path),
.fully_qualified_name = "root",
});
} else {
const root_src_dir_path = fs.path.dirname(src_path);
break :blk Package.create(gpa, root_src_dir_path, fs.path.basename(src_path)) catch |err| {
if (root_src_dir_path) |p| {
fatal("unable to open '{s}': {s}", .{ p, @errorName(err) });
} else {
return err;
}
};
break :blk try Package.Module.create(arena, .{
.root = .{
.root_dir = Cache.Directory.cwd(),
.sub_path = fs.path.dirname(src_path) orelse "",
},
.root_src_path = fs.path.basename(src_path),
.fully_qualified_name = "root",
});
}
} else null;
defer if (main_pkg) |p| p.destroy(gpa);
// Transfer packages added with --deps to the root package
if (main_pkg) |mod| {
if (main_mod) |mod| {
var it = ModuleDepIterator.init(root_deps_str orelse "");
while (it.next()) |dep| {
if (dep.expose.len == 0) {
@ -3269,7 +3276,7 @@ fn buildOutputType(
const dep_mod = modules.get(dep.name) orelse
fatal("root module depends on module '{s}' which does not exist", .{dep.name});
try mod.add(gpa, dep.expose, dep_mod.mod);
try mod.deps.put(arena, dep.expose, dep_mod.mod);
}
}
@ -3310,17 +3317,18 @@ fn buildOutputType(
if (arg_mode == .run) {
break :l global_cache_directory;
}
if (main_pkg) |pkg| {
if (main_mod != null) {
// search upwards from cwd until we find directory with build.zig
const cwd_path = try process.getCwdAlloc(arena);
const build_zig = "build.zig";
const zig_cache = "zig-cache";
var dirname: []const u8 = cwd_path;
while (true) {
const joined_path = try fs.path.join(arena, &[_][]const u8{ dirname, build_zig });
const joined_path = try fs.path.join(arena, &.{
dirname, Package.build_zig_basename,
});
if (fs.cwd().access(joined_path, .{})) |_| {
const cache_dir_path = try fs.path.join(arena, &[_][]const u8{ dirname, zig_cache });
const dir = try pkg.root_src_directory.handle.makeOpenPath(cache_dir_path, .{});
const cache_dir_path = try fs.path.join(arena, &.{ dirname, zig_cache });
const dir = try fs.cwd().makeOpenPath(cache_dir_path, .{});
cleanup_local_cache_dir = dir;
break :l .{ .handle = dir, .path = cache_dir_path };
} else |err| switch (err) {
@ -3389,7 +3397,7 @@ fn buildOutputType(
.dynamic_linker = target_info.dynamic_linker.get(),
.sysroot = sysroot,
.output_mode = output_mode,
.main_pkg = main_pkg,
.main_mod = main_mod,
.emit_bin = emit_bin_loc,
.emit_h = emit_h_resolved.data,
.emit_asm = emit_asm_resolved.data,
@ -4613,11 +4621,14 @@ pub const usage_build =
\\ --global-cache-dir [path] Override path to global Zig cache directory
\\ --zig-lib-dir [arg] Override path to Zig lib directory
\\ --build-runner [file] Override path to build runner
\\ --fetch Exit after fetching dependency tree
\\ -h, --help Print this help and exit
\\
;
pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
const work_around_btrfs_bug = builtin.os.tag == .linux and
std.process.hasEnvVarConstant("ZIG_BTRFS_WORKAROUND");
var color: Color = .auto;
// We want to release all the locks before executing the child process, so we make a nice
@ -4633,6 +4644,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
var child_argv = std.ArrayList([]const u8).init(arena);
var reference_trace: ?u32 = null;
var debug_compile_errors = false;
var fetch_only = false;
const argv_index_exe = child_argv.items.len;
_ = try child_argv.addOne();
@ -4682,6 +4694,8 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
} else if (mem.eql(u8, arg, "-freference-trace")) {
try child_argv.append(arg);
reference_trace = 256;
} else if (mem.eql(u8, arg, "--fetch")) {
fetch_only = true;
} else if (mem.startsWith(u8, arg, "-freference-trace=")) {
try child_argv.append(arg);
const num = arg["-freference-trace=".len..];
@ -4714,8 +4728,8 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
defer if (cleanup_build_dir) |*dir| dir.close();
const cwd_path = try process.getCwdAlloc(arena);
const build_zig_basename = if (build_file) |bf| fs.path.basename(bf) else "build.zig";
const build_directory: Compilation.Directory = blk: {
const build_zig_basename = if (build_file) |bf| fs.path.basename(bf) else Package.build_zig_basename;
const build_root: Compilation.Directory = blk: {
if (build_file) |bf| {
if (fs.path.dirname(bf)) |dirname| {
const dir = fs.cwd().openDir(dirname, .{}) catch |err| {
@ -4751,7 +4765,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
}
}
};
child_argv.items[argv_index_build_file] = build_directory.path orelse cwd_path;
child_argv.items[argv_index_build_file] = build_root.path orelse cwd_path;
var global_cache_directory: Compilation.Directory = l: {
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
@ -4771,9 +4785,9 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
.path = local_cache_dir_path,
};
}
const cache_dir_path = try build_directory.join(arena, &[_][]const u8{"zig-cache"});
const cache_dir_path = try build_root.join(arena, &[_][]const u8{"zig-cache"});
break :l .{
.handle = try build_directory.handle.makeOpenPath("zig-cache", .{}),
.handle = try build_root.handle.makeOpenPath("zig-cache", .{}),
.path = cache_dir_path,
};
};
@ -4799,97 +4813,150 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
try thread_pool.init(.{ .allocator = gpa });
defer thread_pool.deinit();
var cleanup_build_runner_dir: ?fs.Dir = null;
defer if (cleanup_build_runner_dir) |*dir| dir.close();
var main_pkg: Package = if (override_build_runner) |build_runner_path|
var main_mod: Package.Module = if (override_build_runner) |build_runner_path|
.{
.root_src_directory = blk: {
if (std.fs.path.dirname(build_runner_path)) |dirname| {
const dir = fs.cwd().openDir(dirname, .{}) catch |err| {
fatal("unable to open directory to build runner from argument 'build-runner', '{s}': {s}", .{ dirname, @errorName(err) });
};
cleanup_build_runner_dir = dir;
break :blk .{ .path = dirname, .handle = dir };
}
break :blk .{ .path = null, .handle = fs.cwd() };
.root = .{
.root_dir = Cache.Directory.cwd(),
.sub_path = fs.path.dirname(build_runner_path) orelse "",
},
.root_src_path = std.fs.path.basename(build_runner_path),
.root_src_path = fs.path.basename(build_runner_path),
.fully_qualified_name = "root",
}
else
.{
.root_src_directory = zig_lib_directory,
.root = .{ .root_dir = zig_lib_directory },
.root_src_path = "build_runner.zig",
.fully_qualified_name = "root",
};
var build_pkg: Package = .{
.root_src_directory = build_directory,
var build_mod: Package.Module = .{
.root = .{ .root_dir = build_root },
.root_src_path = build_zig_basename,
.fully_qualified_name = "root.@build",
};
if (build_options.only_core_functionality) {
const deps_pkg = try Package.createFilePkg(gpa, local_cache_directory, "dependencies.zig",
\\pub const packages = struct {};
\\pub const root_deps: []const struct { []const u8, []const u8 } = &.{};
\\
);
try main_pkg.add(gpa, "@dependencies", deps_pkg);
try createEmptyDependenciesModule(arena, &main_mod, local_cache_directory);
} else {
var http_client: std.http.Client = .{ .allocator = gpa };
defer http_client.deinit();
// Here we provide an import to the build runner that allows using reflection to find
// all of the dependencies. Without this, there would be no way to use `@import` to
// access dependencies by name, since `@import` requires string literals.
var dependencies_source = std.ArrayList(u8).init(gpa);
defer dependencies_source.deinit();
var all_modules: Package.AllModules = .{};
defer all_modules.deinit(gpa);
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
var progress: std.Progress = .{ .dont_print_on_dumb = true };
const root_prog_node = progress.start("Fetch Packages", 0);
defer root_prog_node.end();
// Here we borrow main package's table and will replace it with a fresh
// one after this process completes.
const fetch_result = build_pkg.fetchAndAddDependencies(
&main_pkg,
arena,
&thread_pool,
&http_client,
build_directory,
global_cache_directory,
local_cache_directory,
&dependencies_source,
&wip_errors,
&all_modules,
root_prog_node,
null,
var job_queue: Package.Fetch.JobQueue = .{
.http_client = &http_client,
.thread_pool = &thread_pool,
.global_cache = global_cache_directory,
.recursive = true,
.work_around_btrfs_bug = work_around_btrfs_bug,
};
defer job_queue.deinit();
try job_queue.all_fetches.ensureUnusedCapacity(gpa, 1);
try job_queue.table.ensureUnusedCapacity(gpa, 1);
var fetch: Package.Fetch = .{
.arena = std.heap.ArenaAllocator.init(gpa),
.location = .{ .relative_path = build_mod.root },
.location_tok = 0,
.hash_tok = 0,
.parent_package_root = build_mod.root,
.parent_manifest_ast = null,
.prog_node = root_prog_node,
.job_queue = &job_queue,
.omit_missing_hash_error = true,
.allow_missing_paths_field = false,
.package_root = undefined,
.error_bundle = undefined,
.manifest = null,
.manifest_ast = undefined,
.actual_hash = undefined,
.has_build_zig = true,
.oom_flag = false,
.module = &build_mod,
};
job_queue.all_fetches.appendAssumeCapacity(&fetch);
job_queue.table.putAssumeCapacityNoClobber(
Package.Fetch.relativePathDigest(build_mod.root, global_cache_directory),
&fetch,
);
if (wip_errors.root_list.items.len > 0) {
var errors = try wip_errors.toOwnedBundle("");
defer errors.deinit(gpa);
job_queue.wait_group.start();
try job_queue.thread_pool.spawn(Package.Fetch.workerRun, .{ &fetch, "root" });
job_queue.wait_group.wait();
try job_queue.consolidateErrors();
if (fetch.error_bundle.root_list.items.len > 0) {
var errors = try fetch.error_bundle.toOwnedBundle("");
errors.renderToStdErr(renderOptions(color));
process.exit(1);
}
try fetch_result;
const deps_pkg = try Package.createFilePkg(
gpa,
if (fetch_only) return cleanExit();
var source_buf = std.ArrayList(u8).init(gpa);
defer source_buf.deinit();
try job_queue.createDependenciesSource(&source_buf);
const deps_mod = try createDependenciesModule(
arena,
source_buf.items,
&main_mod,
local_cache_directory,
"dependencies.zig",
dependencies_source.items,
);
mem.swap(Package.Table, &main_pkg.table, &deps_pkg.table);
try main_pkg.add(gpa, "@dependencies", deps_pkg);
{
// We need a Module for each package's build.zig.
const hashes = job_queue.table.keys();
const fetches = job_queue.table.values();
try deps_mod.deps.ensureUnusedCapacity(arena, @intCast(hashes.len));
for (hashes, fetches) |hash, f| {
if (f == &fetch) {
// The first one is a dummy package for the current project.
continue;
}
if (!f.has_build_zig)
continue;
const m = try Package.Module.create(arena, .{
.root = try f.package_root.clone(arena),
.root_src_path = Package.build_zig_basename,
.fully_qualified_name = try std.fmt.allocPrint(
arena,
"root.@dependencies.{s}",
.{&hash},
),
});
const hash_cloned = try arena.dupe(u8, &hash);
deps_mod.deps.putAssumeCapacityNoClobber(hash_cloned, m);
f.module = m;
}
// Each build.zig module needs access to each of its
// dependencies' build.zig modules by name.
for (fetches) |f| {
const mod = f.module orelse continue;
const man = f.manifest orelse continue;
const dep_names = man.dependencies.keys();
try mod.deps.ensureUnusedCapacity(arena, @intCast(dep_names.len));
for (dep_names, man.dependencies.values()) |name, dep| {
const dep_digest = Package.Fetch.depDigest(
f.package_root,
global_cache_directory,
dep,
) orelse continue;
const dep_mod = job_queue.table.get(dep_digest).?.module orelse continue;
const name_cloned = try arena.dupe(u8, name);
mod.deps.putAssumeCapacityNoClobber(name_cloned, dep_mod);
}
}
}
}
try main_pkg.add(gpa, "@build", &build_pkg);
try main_mod.deps.put(arena, "@build", &build_mod);
const comp = Compilation.create(gpa, .{
.zig_lib_directory = zig_lib_directory,
@ -4901,7 +4968,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
.is_native_abi = cross_target.isNativeAbi(),
.dynamic_linker = target_info.dynamic_linker.get(),
.output_mode = .Exe,
.main_pkg = &main_pkg,
.main_mod = &main_mod,
.emit_bin = emit_bin,
.emit_h = null,
.optimize_mode = .Debug,
@ -5115,12 +5182,15 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
.tree = tree,
.tree_loaded = true,
.zir = undefined,
.pkg = undefined,
.mod = undefined,
.root_decl = .none,
};
file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
file.mod = try Package.Module.create(arena, .{
.root = Package.Path.cwd(),
.root_src_path = file.sub_file_path,
.fully_qualified_name = "root",
});
file.zir = try AstGen.generate(gpa, file.tree);
file.zir_loaded = true;
@ -5321,12 +5391,15 @@ fn fmtPathFile(
.tree = tree,
.tree_loaded = true,
.zir = undefined,
.pkg = undefined,
.mod = undefined,
.root_decl = .none,
};
file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
file.mod = try Package.Module.create(fmt.arena, .{
.root = Package.Path.cwd(),
.root_src_path = file.sub_file_path,
.fully_qualified_name = "root",
});
if (stat.size > max_src_size)
return error.FileTooBig;
@ -5387,7 +5460,7 @@ pub fn putAstErrorsIntoBundle(
tree: Ast,
path: []const u8,
wip_errors: *std.zig.ErrorBundle.Wip,
) !void {
) Allocator.Error!void {
var file: Module.File = .{
.status = .never_loaded,
.source_loaded = true,
@ -5402,12 +5475,16 @@ pub fn putAstErrorsIntoBundle(
.tree = tree,
.tree_loaded = true,
.zir = undefined,
.pkg = undefined,
.mod = undefined,
.root_decl = .none,
};
file.pkg = try Package.create(gpa, null, path);
defer file.pkg.destroy(gpa);
file.mod = try Package.Module.create(gpa, .{
.root = Package.Path.cwd(),
.root_src_path = file.sub_file_path,
.fully_qualified_name = "root",
});
defer gpa.destroy(file.mod);
file.zir = try AstGen.generate(gpa, file.tree);
file.zir_loaded = true;
@ -5933,7 +6010,7 @@ pub fn cmdAstCheck(
.stat = undefined,
.tree = undefined,
.zir = undefined,
.pkg = undefined,
.mod = undefined,
.root_decl = .none,
};
if (zig_source_file) |file_name| {
@ -5971,8 +6048,11 @@ pub fn cmdAstCheck(
file.stat.size = source.len;
}
file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
file.mod = try Package.Module.create(arena, .{
.root = Package.Path.cwd(),
.root_src_path = file.sub_file_path,
.fully_qualified_name = "root",
});
file.tree = try Ast.parse(gpa, file.source, .zig);
file.tree_loaded = true;
@ -6067,7 +6147,7 @@ pub fn cmdDumpZir(
.stat = undefined,
.tree = undefined,
.zir = try Module.loadZirCache(gpa, f),
.pkg = undefined,
.mod = undefined,
.root_decl = .none,
};
@ -6136,12 +6216,15 @@ pub fn cmdChangelist(
},
.tree = undefined,
.zir = undefined,
.pkg = undefined,
.mod = undefined,
.root_decl = .none,
};
file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
file.mod = try Package.Module.create(arena, .{
.root = Package.Path.cwd(),
.root_src_path = file.sub_file_path,
.fully_qualified_name = "root",
});
const source = try arena.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0);
const amt = try f.readAll(source);
@ -6623,7 +6706,9 @@ fn cmdFetch(
args: []const []const u8,
) !void {
const color: Color = .auto;
var opt_url: ?[]const u8 = null;
const work_around_btrfs_bug = builtin.os.tag == .linux and
std.process.hasEnvVarConstant("ZIG_BTRFS_WORKAROUND");
var opt_path_or_url: ?[]const u8 = null;
var override_global_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_GLOBAL_CACHE_DIR");
{
@ -6643,15 +6728,15 @@ fn cmdFetch(
} else {
fatal("unrecognized parameter: '{s}'", .{arg});
}
} else if (opt_url != null) {
} else if (opt_path_or_url != null) {
fatal("unexpected extra parameter: '{s}'", .{arg});
} else {
opt_url = arg;
opt_path_or_url = arg;
}
}
}
const url = opt_url orelse fatal("missing url or path parameter", .{});
const path_or_url = opt_path_or_url orelse fatal("missing url or path parameter", .{});
var thread_pool: ThreadPool = undefined;
try thread_pool.init(.{ .allocator = gpa });
@ -6664,19 +6749,6 @@ fn cmdFetch(
const root_prog_node = progress.start("Fetch", 0);
defer root_prog_node.end();
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
var report: Package.Report = .{
.ast = null,
.directory = .{
.handle = fs.cwd(),
.path = null,
},
.error_bundle = &wip_errors,
};
var global_cache_directory: Compilation.Directory = l: {
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
break :l .{
@ -6686,56 +6758,51 @@ fn cmdFetch(
};
defer global_cache_directory.handle.close();
var readable_resource: Package.ReadableResource = rr: {
if (fs.cwd().openIterableDir(url, .{})) |dir| {
break :rr .{
.path = try gpa.dupe(u8, url),
.resource = .{ .dir = dir },
};
} else |dir_err| {
const file_err = if (dir_err == error.NotDir) e: {
if (fs.cwd().openFile(url, .{})) |f| {
break :rr .{
.path = try gpa.dupe(u8, url),
.resource = .{ .file = f },
};
} else |err| break :e err;
} else dir_err;
const uri = std.Uri.parse(url) catch |uri_err| {
fatal("'{s}' could not be recognized as a file path ({s}) or an URL ({s})", .{
url, @errorName(file_err), @errorName(uri_err),
});
};
const fetch_location = try Package.FetchLocation.initUri(uri, 0, report);
const cwd: Cache.Directory = .{
.handle = fs.cwd(),
.path = null,
};
break :rr try fetch_location.fetch(gpa, cwd, &http_client, 0, report);
}
var job_queue: Package.Fetch.JobQueue = .{
.http_client = &http_client,
.thread_pool = &thread_pool,
.global_cache = global_cache_directory,
.recursive = false,
.work_around_btrfs_bug = work_around_btrfs_bug,
};
defer readable_resource.deinit(gpa);
defer job_queue.deinit();
var package_location = readable_resource.unpack(
gpa,
&thread_pool,
global_cache_directory,
0,
report,
root_prog_node,
) catch |err| {
if (wip_errors.root_list.items.len > 0) {
var errors = try wip_errors.toOwnedBundle("");
defer errors.deinit(gpa);
errors.renderToStdErr(renderOptions(color));
process.exit(1);
}
fatal("unable to unpack '{s}': {s}", .{ url, @errorName(err) });
var fetch: Package.Fetch = .{
.arena = std.heap.ArenaAllocator.init(gpa),
.location = .{ .path_or_url = path_or_url },
.location_tok = 0,
.hash_tok = 0,
.parent_package_root = undefined,
.parent_manifest_ast = null,
.prog_node = root_prog_node,
.job_queue = &job_queue,
.omit_missing_hash_error = true,
.allow_missing_paths_field = false,
.package_root = undefined,
.error_bundle = undefined,
.manifest = null,
.manifest_ast = undefined,
.actual_hash = undefined,
.has_build_zig = false,
.oom_flag = false,
.module = null,
};
defer package_location.deinit(gpa);
defer fetch.deinit();
const hex_digest = Package.Manifest.hexDigest(package_location.hash);
fetch.run() catch |err| switch (err) {
error.OutOfMemory => fatal("out of memory", .{}),
error.FetchFailed => {}, // error bundle checked below
};
if (fetch.error_bundle.root_list.items.len > 0) {
var errors = try fetch.error_bundle.toOwnedBundle("");
errors.renderToStdErr(renderOptions(color));
process.exit(1);
}
const hex_digest = Package.Manifest.hexDigest(fetch.actual_hash);
progress.done = true;
progress.refresh();
@ -6744,3 +6811,56 @@ fn cmdFetch(
return cleanExit();
}
fn createEmptyDependenciesModule(
arena: Allocator,
main_mod: *Package.Module,
local_cache_directory: Cache.Directory,
) !void {
var source = std.ArrayList(u8).init(arena);
try Package.Fetch.JobQueue.createEmptyDependenciesSource(&source);
_ = try createDependenciesModule(arena, source.items, main_mod, local_cache_directory);
}
/// Creates the dependencies.zig file and corresponding `Package.Module` for the
/// build runner to obtain via `@import("@dependencies")`.
fn createDependenciesModule(
arena: Allocator,
source: []const u8,
main_mod: *Package.Module,
local_cache_directory: Cache.Directory,
) !*Package.Module {
// Atomically create the file in a directory named after the hash of its contents.
const basename = "dependencies.zig";
const rand_int = std.crypto.random.int(u64);
const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++
Package.Manifest.hex64(rand_int);
{
var tmp_dir = try local_cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{});
defer tmp_dir.close();
try tmp_dir.writeFile(basename, source);
}
var hh: Cache.HashHelper = .{};
hh.addBytes(build_options.version);
hh.addBytes(source);
const hex_digest = hh.final();
const o_dir_sub_path = try arena.dupe(u8, "o" ++ fs.path.sep_str ++ hex_digest);
try Package.Fetch.renameTmpIntoCache(
local_cache_directory.handle,
tmp_dir_sub_path,
o_dir_sub_path,
);
const deps_mod = try Package.Module.create(arena, .{
.root = .{
.root_dir = local_cache_directory,
.sub_path = o_dir_sub_path,
},
.root_src_path = basename,
.fully_qualified_name = "root.@dependencies",
});
try main_mod.deps.put(arena, "@dependencies", deps_mod);
return deps_mod;
}

View File

@ -206,7 +206,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile, prog_node: *std.Progr
.zig_lib_directory = comp.zig_lib_directory,
.target = target,
.root_name = "c",
.main_pkg = null,
.main_mod = null,
.output_mode = .Lib,
.link_mode = .Dynamic,
.thread_pool = comp.thread_pool,

View File

@ -7,4 +7,4 @@ comptime {
// backend=stage2
// target=native
//
// :1:21: error: no package named 'foo' available within package 'root'
// :1:21: error: no module named 'foo' available within module root

View File

@ -5,4 +5,4 @@ export fn a() usize {
// error
// target=native
//
// :2:20: error: import of file outside package path: '../../above.zig'
// :2:20: error: import of file outside module path: '../../above.zig'

View File

@ -6,4 +6,4 @@ comptime {
// backend=stage2
// target=native
//
// :2:17: error: import of file outside package path: '../a.zig'
// :2:17: error: import of file outside module path: '../a.zig'

View File

@ -129,7 +129,7 @@ pub fn addCases(ctx: *Cases) !void {
\\}
, &[_][]const u8{
":1:1: error: file exists in multiple modules",
":1:1: note: root of module root.foo",
":1:1: note: root of module foo",
":3:17: note: imported from module root",
});
case.addSourceFile("foo.zig",