Merge pull request #14664 from mlugg/feat/new-module-cli

New module CLI
This commit is contained in:
Andrew Kelley 2023-02-21 11:43:31 -05:00 committed by GitHub
commit 7f691b3fe2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 783 additions and 247 deletions

View File

@ -748,7 +748,8 @@ set(BUILD_ZIG2_ARGS
build-exe src/main.zig -ofmt=c -lc
-OReleaseSmall
--name zig2 -femit-bin="${ZIG2_C_SOURCE}"
--pkg-begin build_options "${ZIG_CONFIG_ZIG_OUT}" --pkg-end
--mod "build_options::${ZIG_CONFIG_ZIG_OUT}"
--deps build_options
-target "${HOST_TARGET_TRIPLE}"
)
@ -765,7 +766,8 @@ set(BUILD_COMPILER_RT_ARGS
build-obj lib/compiler_rt.zig -ofmt=c
-OReleaseSmall
--name compiler_rt -femit-bin="${ZIG_COMPILER_RT_C_SOURCE}"
--pkg-begin build_options "${ZIG_CONFIG_ZIG_OUT}" --pkg-end
--mod "build_options::${ZIG_CONFIG_ZIG_OUT}"
--deps build_options
-target "${HOST_TARGET_TRIPLE}"
)

View File

@ -87,7 +87,8 @@ CheckLastExitCode
-OReleaseSmall `
--name compiler_rt `
-femit-bin="compiler_rt-x86_64-windows-msvc.c" `
--pkg-begin build_options config.zig --pkg-end `
--mod build_options::config.zig `
--deps build_options `
-target x86_64-windows-msvc
CheckLastExitCode

View File

@ -87,7 +87,8 @@ CheckLastExitCode
-OReleaseSmall `
--name compiler_rt `
-femit-bin="compiler_rt-x86_64-windows-msvc.c" `
--pkg-begin build_options config.zig --pkg-end `
--mod build_options::config.zig `
--deps build_options `
-target x86_64-windows-msvc
CheckLastExitCode

View File

@ -955,7 +955,10 @@ pub fn addFrameworkPath(self: *CompileStep, dir_path: []const u8) void {
/// package's module table using `name`.
pub fn addModule(cs: *CompileStep, name: []const u8, module: *Module) void {
cs.modules.put(cs.builder.dupe(name), module) catch @panic("OOM");
cs.addRecursiveBuildDeps(module);
var done = std.AutoHashMap(*Module, void).init(cs.builder.allocator);
defer done.deinit();
cs.addRecursiveBuildDeps(module, &done) catch @panic("OOM");
}
/// Adds a module to be used with `@import` without exposing it in the current
@ -969,10 +972,12 @@ pub fn addOptions(cs: *CompileStep, module_name: []const u8, options: *OptionsSt
addModule(cs, module_name, options.createModule());
}
fn addRecursiveBuildDeps(cs: *CompileStep, module: *Module) void {
fn addRecursiveBuildDeps(cs: *CompileStep, module: *Module, done: *std.AutoHashMap(*Module, void)) !void {
if (done.contains(module)) return;
try done.put(module, {});
module.source_file.addStepDependencies(&cs.step);
for (module.dependencies.values()) |dep| {
cs.addRecursiveBuildDeps(dep);
try cs.addRecursiveBuildDeps(dep, done);
}
}
@ -1031,22 +1036,110 @@ fn linkLibraryOrObject(self: *CompileStep, other: *CompileStep) void {
fn appendModuleArgs(
cs: *CompileStep,
zig_args: *ArrayList([]const u8),
name: []const u8,
module: *Module,
) error{OutOfMemory}!void {
try zig_args.append("--pkg-begin");
try zig_args.append(name);
try zig_args.append(module.builder.pathFromRoot(module.source_file.getPath(module.builder)));
// First, traverse the whole dependency graph and give every module a unique name, ideally one
// named after what it's called somewhere in the graph. It will help here to have both a mapping
// from module to name and a set of all the currently-used names.
var mod_names = std.AutoHashMap(*Module, []const u8).init(cs.builder.allocator);
var names = std.StringHashMap(void).init(cs.builder.allocator);
var to_name = std.ArrayList(struct {
name: []const u8,
mod: *Module,
}).init(cs.builder.allocator);
{
const keys = module.dependencies.keys();
for (module.dependencies.values(), 0..) |sub_module, i| {
const sub_name = keys[i];
try cs.appendModuleArgs(zig_args, sub_name, sub_module);
var it = cs.modules.iterator();
while (it.next()) |kv| {
// While we're traversing the root dependencies, let's make sure that no module names
// have colons in them, since the CLI forbids it. We handle this for transitive
// dependencies further down.
if (std.mem.indexOfScalar(u8, kv.key_ptr.*, ':') != null) {
@panic("Module names cannot contain colons");
}
try to_name.append(.{
.name = kv.key_ptr.*,
.mod = kv.value_ptr.*,
});
}
}
try zig_args.append("--pkg-end");
while (to_name.popOrNull()) |dep| {
if (mod_names.contains(dep.mod)) continue;
// We'll use this buffer to store the name we decide on
var buf = try cs.builder.allocator.alloc(u8, dep.name.len + 32);
// First, try just the exposed dependency name
std.mem.copy(u8, buf, dep.name);
var name = buf[0..dep.name.len];
var n: usize = 0;
while (names.contains(name)) {
// If that failed, append an incrementing number to the end
name = std.fmt.bufPrint(buf, "{s}{}", .{ dep.name, n }) catch unreachable;
n += 1;
}
try mod_names.put(dep.mod, name);
try names.put(name, {});
var it = dep.mod.dependencies.iterator();
while (it.next()) |kv| {
// Same colon-in-name check as above, but for transitive dependencies.
if (std.mem.indexOfScalar(u8, kv.key_ptr.*, ':') != null) {
@panic("Module names cannot contain colons");
}
try to_name.append(.{
.name = kv.key_ptr.*,
.mod = kv.value_ptr.*,
});
}
}
// Since the module names given to the CLI are based off of the exposed names, we already know
// that none of the CLI names have colons in them, so there's no need to check that explicitly.
// Every module in the graph is now named; output their definitions
{
var it = mod_names.iterator();
while (it.next()) |kv| {
const mod = kv.key_ptr.*;
const name = kv.value_ptr.*;
const deps_str = try constructDepString(cs.builder.allocator, mod_names, mod.dependencies);
const src = mod.builder.pathFromRoot(mod.source_file.getPath(mod.builder));
try zig_args.append("--mod");
try zig_args.append(try std.fmt.allocPrint(cs.builder.allocator, "{s}:{s}:{s}", .{ name, deps_str, src }));
}
}
// Lastly, output the root dependencies
const deps_str = try constructDepString(cs.builder.allocator, mod_names, cs.modules);
if (deps_str.len > 0) {
try zig_args.append("--deps");
try zig_args.append(deps_str);
}
}
fn constructDepString(
allocator: std.mem.Allocator,
mod_names: std.AutoHashMap(*Module, []const u8),
deps: std.StringArrayHashMap(*Module),
) ![]const u8 {
var deps_str = std.ArrayList(u8).init(allocator);
var it = deps.iterator();
while (it.next()) |kv| {
const expose = kv.key_ptr.*;
const name = mod_names.get(kv.value_ptr.*).?;
if (std.mem.eql(u8, expose, name)) {
try deps_str.writer().print("{s},", .{name});
} else {
try deps_str.writer().print("{s}={s},", .{ expose, name });
}
}
if (deps_str.items.len > 0) {
return deps_str.items[0 .. deps_str.items.len - 1]; // omit trailing comma
} else {
return "";
}
}
fn make(step: *Step) !void {
@ -1573,13 +1666,7 @@ fn make(step: *Step) !void {
try zig_args.append("--test-no-exec");
}
{
const keys = self.modules.keys();
for (self.modules.values(), 0..) |module, i| {
const name = keys[i];
try self.appendModuleArgs(&zig_args, name, module);
}
}
try self.appendModuleArgs(&zig_args);
for (self.include_dirs.items) |include_dir| {
switch (include_dir) {

View File

@ -860,17 +860,9 @@ fn walkInstruction(
const str_tok = data[inst_index].str_tok;
var path = str_tok.get(file.zir);
const maybe_other_package: ?*Package = blk: {
if (self.module.main_pkg_is_std and std.mem.eql(u8, path, "std")) {
path = "std";
break :blk self.module.main_pkg;
} else {
break :blk file.pkg.table.get(path);
}
};
// importFile cannot error out since all files
// are already loaded at this point
if (maybe_other_package) |other_package| {
if (file.pkg.table.get(path)) |other_package| {
const result = try self.packages.getOrPut(self.arena, other_package);
// Immediately add this package to the import table of our

View File

@ -1596,36 +1596,53 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
const builtin_pkg = try Package.createWithDir(
gpa,
"builtin",
zig_cache_artifact_directory,
null,
"builtin.zig",
);
errdefer builtin_pkg.destroy(gpa);
const std_pkg = try Package.createWithDir(
gpa,
"std",
options.zig_lib_directory,
"std",
"std.zig",
);
errdefer std_pkg.destroy(gpa);
// When you're testing std, the main module is std. In that case, we'll just set the std
// module to the main one, since avoiding the errors caused by duplicating it is more
// effort than it's worth.
const main_pkg_is_std = m: {
const std_path = try std.fs.path.resolve(arena, &[_][]const u8{
options.zig_lib_directory.path orelse ".",
"std",
"std.zig",
});
defer arena.free(std_path);
const main_path = try std.fs.path.resolve(arena, &[_][]const u8{
main_pkg.root_src_directory.path orelse ".",
main_pkg.root_src_path,
});
defer arena.free(main_path);
break :m mem.eql(u8, main_path, std_path);
};
const std_pkg = if (main_pkg_is_std)
main_pkg
else
try Package.createWithDir(
gpa,
options.zig_lib_directory,
"std",
"std.zig",
);
errdefer if (!main_pkg_is_std) std_pkg.destroy(gpa);
const root_pkg = if (options.is_test) root_pkg: {
// TODO: we currently have two packages named 'root' here, which is weird. This
// should be changed as part of the resolution of #12201
const test_pkg = if (options.test_runner_path) |test_runner| test_pkg: {
const test_dir = std.fs.path.dirname(test_runner);
const basename = std.fs.path.basename(test_runner);
const pkg = try Package.create(gpa, "root", test_dir, basename);
const pkg = try Package.create(gpa, test_dir, basename);
// copy package table from main_pkg to root_pkg
pkg.table = try main_pkg.table.clone(gpa);
break :test_pkg pkg;
} else try Package.createWithDir(
gpa,
"root",
options.zig_lib_directory,
null,
"test_runner.zig",
@ -1639,7 +1656,6 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
const compiler_rt_pkg = if (include_compiler_rt and options.output_mode == .Obj) compiler_rt_pkg: {
break :compiler_rt_pkg try Package.createWithDir(
gpa,
"compiler_rt",
options.zig_lib_directory,
null,
"compiler_rt.zig",
@ -1647,28 +1663,14 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
} else null;
errdefer if (compiler_rt_pkg) |p| p.destroy(gpa);
try main_pkg.addAndAdopt(gpa, builtin_pkg);
try main_pkg.add(gpa, root_pkg);
try main_pkg.addAndAdopt(gpa, std_pkg);
try main_pkg.add(gpa, "builtin", builtin_pkg);
try main_pkg.add(gpa, "root", root_pkg);
try main_pkg.add(gpa, "std", std_pkg);
if (compiler_rt_pkg) |p| {
try main_pkg.addAndAdopt(gpa, p);
try main_pkg.add(gpa, "compiler_rt", p);
}
const main_pkg_is_std = m: {
const std_path = try std.fs.path.resolve(arena, &[_][]const u8{
std_pkg.root_src_directory.path orelse ".",
std_pkg.root_src_path,
});
defer arena.free(std_path);
const main_path = try std.fs.path.resolve(arena, &[_][]const u8{
main_pkg.root_src_directory.path orelse ".",
main_pkg.root_src_path,
});
defer arena.free(main_path);
break :m mem.eql(u8, main_path, std_path);
};
// Pre-open the directory handles for cached ZIR code so that it does not need
// to redundantly happen for each AstGen operation.
const zir_sub_dir = "z";
@ -1705,7 +1707,6 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.gpa = gpa,
.comp = comp,
.main_pkg = main_pkg,
.main_pkg_is_std = main_pkg_is_std,
.root_pkg = root_pkg,
.zig_cache_artifact_directory = zig_cache_artifact_directory,
.global_zir_cache = global_zir_cache,
@ -2772,6 +2773,111 @@ fn emitOthers(comp: *Compilation) void {
}
}
fn reportMultiModuleErrors(mod: *Module) !void {
// Some cases can give you a whole bunch of multi-module errors, which it's not helpful to
// print all of, so we'll cap the number of these to emit.
var num_errors: u32 = 0;
const max_errors = 5;
// Attach the "some omitted" note to the final error message
var last_err: ?*Module.ErrorMsg = null;
for (mod.import_table.values()) |file| {
if (!file.multi_pkg) continue;
num_errors += 1;
if (num_errors > max_errors) continue;
const err = err_blk: {
// Like with errors, let's cap the number of notes to prevent a huge error spew.
const max_notes = 5;
const omitted = file.references.items.len -| max_notes;
const num_notes = file.references.items.len - omitted;
const notes = try mod.gpa.alloc(Module.ErrorMsg, if (omitted > 0) num_notes + 1 else num_notes);
errdefer mod.gpa.free(notes);
for (notes[0..num_notes], file.references.items[0..num_notes], 0..) |*note, ref, i| {
errdefer for (notes[0..i]) |*n| n.deinit(mod.gpa);
note.* = switch (ref) {
.import => |loc| blk: {
const name = try loc.file_scope.pkg.getName(mod.gpa, mod.*);
defer mod.gpa.free(name);
break :blk try Module.ErrorMsg.init(
mod.gpa,
loc,
"imported from module {s}",
.{name},
);
},
.root => |pkg| blk: {
const name = try pkg.getName(mod.gpa, mod.*);
defer mod.gpa.free(name);
break :blk try Module.ErrorMsg.init(
mod.gpa,
.{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
"root of module {s}",
.{name},
);
},
};
}
errdefer for (notes[0..num_notes]) |*n| n.deinit(mod.gpa);
if (omitted > 0) {
notes[num_notes] = try Module.ErrorMsg.init(
mod.gpa,
.{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
"{} more references omitted",
.{omitted},
);
}
errdefer if (omitted > 0) notes[num_notes].deinit(mod.gpa);
const err = try Module.ErrorMsg.create(
mod.gpa,
.{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
"file exists in multiple modules",
.{},
);
err.notes = notes;
break :err_blk err;
};
errdefer err.destroy(mod.gpa);
try mod.failed_files.putNoClobber(mod.gpa, file, err);
last_err = err;
}
// If we omitted any errors, add a note saying that
if (num_errors > max_errors) {
const err = last_err.?;
// There isn't really any meaningful place to put this note, so just attach it to the
// last failed file
var note = try Module.ErrorMsg.init(
mod.gpa,
err.src_loc,
"{} more errors omitted",
.{num_errors - max_errors},
);
errdefer note.deinit(mod.gpa);
const i = err.notes.len;
err.notes = try mod.gpa.realloc(err.notes, i + 1);
err.notes[i] = note;
}
// Now that we've reported the errors, we need to deal with
// dependencies. Any file referenced by a multi_pkg file should also be
// marked multi_pkg and have its status set to astgen_failure, as it's
// ambiguous which package they should be analyzed as a part of. We need
// to add this flag after reporting the errors however, as otherwise
// we'd get an error for every single downstream file, which wouldn't be
// very useful.
for (mod.import_table.values()) |file| {
if (file.multi_pkg) file.recursiveMarkMultiPkg(mod);
}
}
/// Having the file open for writing is problematic as far as executing the
/// binary is concerned. This will remove the write flag, or close the file,
/// or whatever is needed so that it can be executed.
@ -3098,54 +3204,7 @@ pub fn performAllTheWork(
}
if (comp.bin_file.options.module) |mod| {
for (mod.import_table.values()) |file| {
if (!file.multi_pkg) continue;
const err = err_blk: {
const notes = try mod.gpa.alloc(Module.ErrorMsg, file.references.items.len);
errdefer mod.gpa.free(notes);
for (notes, 0..) |*note, i| {
errdefer for (notes[0..i]) |*n| n.deinit(mod.gpa);
note.* = switch (file.references.items[i]) {
.import => |loc| try Module.ErrorMsg.init(
mod.gpa,
loc,
"imported from package {s}",
.{loc.file_scope.pkg.name},
),
.root => |pkg| try Module.ErrorMsg.init(
mod.gpa,
.{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
"root of package {s}",
.{pkg.name},
),
};
}
errdefer for (notes) |*n| n.deinit(mod.gpa);
const err = try Module.ErrorMsg.create(
mod.gpa,
.{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
"file exists in multiple packages",
.{},
);
err.notes = notes;
break :err_blk err;
};
errdefer err.destroy(mod.gpa);
try mod.failed_files.putNoClobber(mod.gpa, file, err);
}
// Now that we've reported the errors, we need to deal with
// dependencies. Any file referenced by a multi_pkg file should also be
// marked multi_pkg and have its status set to astgen_failure, as it's
// ambiguous which package they should be analyzed as a part of. We need
// to add this flag after reporting the errors however, as otherwise
// we'd get an error for every single downstream file, which wouldn't be
// very useful.
for (mod.import_table.values()) |file| {
if (file.multi_pkg) file.recursiveMarkMultiPkg(mod);
}
try reportMultiModuleErrors(mod);
}
{
@ -5408,7 +5467,6 @@ fn buildOutputFromZig(
var main_pkg: Package = .{
.root_src_directory = comp.zig_lib_directory,
.root_src_path = src_basename,
.name = "root",
};
defer main_pkg.deinitTable(comp.gpa);
const root_name = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len];

View File

@ -144,10 +144,6 @@ stage1_flags: packed struct {
} = .{},
job_queued_update_builtin_zig: bool = true,
/// This makes it so that we can run `zig test` on the standard library.
/// Otherwise, the logic for scanning test decls skips all of them because
/// `main_pkg != std_pkg`.
main_pkg_is_std: bool,
compile_log_text: ArrayListUnmanaged(u8) = .{},
@ -1950,7 +1946,7 @@ pub const File = struct {
prev_zir: ?*Zir = null,
/// A single reference to a file.
const Reference = union(enum) {
pub const Reference = union(enum) {
/// The file is imported directly (i.e. not as a package) with @import.
import: SrcLoc,
/// The file is the root of a package.
@ -2113,7 +2109,27 @@ pub const File = struct {
/// Add a reference to this file during AstGen.
pub fn addReference(file: *File, mod: Module, ref: Reference) !void {
try file.references.append(mod.gpa, ref);
// Don't add the same module root twice. Note that since we always add module roots at the
// front of the references array (see below), this loop is actually O(1) on valid code.
if (ref == .root) {
for (file.references.items) |other| {
switch (other) {
.root => |r| if (ref.root == r) return,
else => break, // reached the end of the "is-root" references
}
}
}
switch (ref) {
// We put root references at the front of the list both to make the above loop fast and
// to make multi-module errors more helpful (since "root-of" notes are generally more
// informative than "imported-from" notes). This path is hit very rarely, so the speed
// of the insert operation doesn't matter too much.
.root => try file.references.insert(mod.gpa, 0, ref),
// Other references we'll just put at the end.
else => try file.references.append(mod.gpa, ref),
}
const pkg = switch (ref) {
.import => |loc| loc.file_scope.pkg,
@ -2128,7 +2144,10 @@ pub const File = struct {
file.multi_pkg = true;
file.status = .astgen_failure;
std.debug.assert(file.zir_loaded);
// We can only mark children as failed if the ZIR is loaded, which may not
// be the case if there were other astgen failures in this file
if (!file.zir_loaded) return;
const imports_index = file.zir.extra[@enumToInt(Zir.ExtraIndex.imports)];
if (imports_index == 0) return;
const extra = file.zir.extraData(Zir.Inst.Imports, imports_index);
@ -3323,10 +3342,19 @@ pub fn deinit(mod: *Module) void {
// The callsite of `Compilation.create` owns the `main_pkg`, however
// Module owns the builtin and std packages that it adds.
if (mod.main_pkg.table.fetchRemove("builtin")) |kv| {
gpa.free(kv.key);
kv.value.destroy(gpa);
}
if (mod.main_pkg.table.fetchRemove("std")) |kv| {
kv.value.destroy(gpa);
gpa.free(kv.key);
// It's possible for main_pkg to be std when running 'zig test'! In this case, we must not
// destroy it, since it would lead to a double-free.
if (kv.value != mod.main_pkg) {
kv.value.destroy(gpa);
}
}
if (mod.main_pkg.table.fetchRemove("root")) |kv| {
gpa.free(kv.key);
}
if (mod.root_pkg != mod.main_pkg) {
mod.root_pkg.destroy(gpa);
@ -4808,11 +4836,14 @@ pub fn importPkg(mod: *Module, pkg: *Package) !ImportFileResult {
const gop = try mod.import_table.getOrPut(gpa, resolved_path);
errdefer _ = mod.import_table.pop();
if (gop.found_existing) return ImportFileResult{
.file = gop.value_ptr.*,
.is_new = false,
.is_pkg = true,
};
if (gop.found_existing) {
try gop.value_ptr.*.addReference(mod.*, .{ .root = pkg });
return ImportFileResult{
.file = gop.value_ptr.*,
.is_new = false,
.is_pkg = true,
};
}
const sub_file_path = try gpa.dupe(u8, pkg.root_src_path);
errdefer gpa.free(sub_file_path);
@ -5208,22 +5239,14 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
// test decl with no name. Skip the part where we check against
// the test name filter.
if (!comp.bin_file.options.is_test) break :blk false;
if (decl_pkg != mod.main_pkg) {
if (!mod.main_pkg_is_std) break :blk false;
const std_pkg = mod.main_pkg.table.get("std").?;
if (std_pkg != decl_pkg) break :blk false;
}
if (decl_pkg != mod.main_pkg) break :blk false;
try mod.test_functions.put(gpa, new_decl_index, {});
break :blk true;
},
else => blk: {
if (!is_named_test) break :blk false;
if (!comp.bin_file.options.is_test) break :blk false;
if (decl_pkg != mod.main_pkg) {
if (!mod.main_pkg_is_std) break :blk false;
const std_pkg = mod.main_pkg.table.get("std").?;
if (std_pkg != decl_pkg) break :blk false;
}
if (decl_pkg != mod.main_pkg) break :blk false;
if (comp.test_filter) |test_filter| {
if (mem.indexOf(u8, decl_name, test_filter) == null) {
break :blk false;

View File

@ -22,17 +22,16 @@ pub const Table = std.StringHashMapUnmanaged(*Package);
root_src_directory: Compilation.Directory,
/// Relative to `root_src_directory`. May contain path separators.
root_src_path: []const u8,
/// The dependency table of this module. Shared dependencies such as 'std', 'builtin', and 'root'
/// are not specified in every dependency table, but instead only in the table of `main_pkg`.
/// `Module.importFile` is responsible for detecting these names and using the correct package.
table: Table = .{},
parent: ?*Package = null,
/// Whether to free `root_src_directory` on `destroy`.
root_src_directory_owned: bool = false,
/// This information can be recovered from 'table', but it's more convenient to store on the package.
name: []const u8,
/// Allocate a Package. No references to the slices passed are kept.
pub fn create(
gpa: Allocator,
name: []const u8,
/// Null indicates the current working directory
root_src_dir_path: ?[]const u8,
/// Relative to root_src_dir_path
@ -47,9 +46,6 @@ pub fn create(
const owned_src_path = try gpa.dupe(u8, root_src_path);
errdefer gpa.free(owned_src_path);
const owned_name = try gpa.dupe(u8, name);
errdefer gpa.free(owned_name);
ptr.* = .{
.root_src_directory = .{
.path = owned_dir_path,
@ -57,7 +53,6 @@ pub fn create(
},
.root_src_path = owned_src_path,
.root_src_directory_owned = true,
.name = owned_name,
};
return ptr;
@ -65,7 +60,6 @@ pub fn create(
pub fn createWithDir(
gpa: Allocator,
name: []const u8,
directory: Compilation.Directory,
/// Relative to `directory`. If null, means `directory` is the root src dir
/// and is owned externally.
@ -79,9 +73,6 @@ pub fn createWithDir(
const owned_src_path = try gpa.dupe(u8, root_src_path);
errdefer gpa.free(owned_src_path);
const owned_name = try gpa.dupe(u8, name);
errdefer gpa.free(owned_name);
if (root_src_dir_path) |p| {
const owned_dir_path = try directory.join(gpa, &[1][]const u8{p});
errdefer gpa.free(owned_dir_path);
@ -93,14 +84,12 @@ pub fn createWithDir(
},
.root_src_directory_owned = true,
.root_src_path = owned_src_path,
.name = owned_name,
};
} else {
ptr.* = .{
.root_src_directory = directory,
.root_src_directory_owned = false,
.root_src_path = owned_src_path,
.name = owned_name,
};
}
return ptr;
@ -110,7 +99,6 @@ pub fn createWithDir(
/// inside its table; the caller is responsible for calling destroy() on them.
pub fn destroy(pkg: *Package, gpa: Allocator) void {
gpa.free(pkg.root_src_path);
gpa.free(pkg.name);
if (pkg.root_src_directory_owned) {
// If root_src_directory.path is null then the handle is the cwd()
@ -130,15 +118,97 @@ pub fn deinitTable(pkg: *Package, gpa: Allocator) void {
pkg.table.deinit(gpa);
}
pub fn add(pkg: *Package, gpa: Allocator, package: *Package) !void {
pub fn add(pkg: *Package, gpa: Allocator, name: []const u8, package: *Package) !void {
try pkg.table.ensureUnusedCapacity(gpa, 1);
pkg.table.putAssumeCapacityNoClobber(package.name, package);
const name_dupe = try gpa.dupe(u8, name);
pkg.table.putAssumeCapacityNoClobber(name_dupe, package);
}
pub fn addAndAdopt(parent: *Package, gpa: Allocator, child: *Package) !void {
assert(child.parent == null); // make up your mind, who is the parent??
child.parent = parent;
return parent.add(gpa, child);
/// Compute a readable name for the package. The returned name should be freed from gpa. This
/// function is very slow, as it traverses the whole package hierarchy to find a path to this
/// package. It should only be used for error output.
pub fn getName(target: *const Package, gpa: Allocator, mod: Module) ![]const u8 {
// we'll do a breadth-first search from the root module to try and find a short name for this
// module, using a TailQueue of module/parent pairs. note that the "parent" there is just the
// first-found shortest path - a module may be children of arbitrarily many other modules.
// also, this path may vary between executions due to hashmap iteration order, but that doesn't
// matter too much.
var node_arena = std.heap.ArenaAllocator.init(gpa);
defer node_arena.deinit();
const Parented = struct {
parent: ?*const @This(),
mod: *const Package,
};
const Queue = std.TailQueue(Parented);
var to_check: Queue = .{};
{
const new = try node_arena.allocator().create(Queue.Node);
new.* = .{ .data = .{ .parent = null, .mod = mod.root_pkg } };
to_check.prepend(new);
}
if (mod.main_pkg != mod.root_pkg) {
const new = try node_arena.allocator().create(Queue.Node);
// TODO: once #12201 is resolved, we may want a way of indicating a different name for this
new.* = .{ .data = .{ .parent = null, .mod = mod.main_pkg } };
to_check.prepend(new);
}
// set of modules we've already checked to prevent loops
var checked = std.AutoHashMap(*const Package, void).init(gpa);
defer checked.deinit();
const linked = while (to_check.pop()) |node| {
const check = &node.data;
if (checked.contains(check.mod)) continue;
try checked.put(check.mod, {});
if (check.mod == target) break check;
var it = check.mod.table.iterator();
while (it.next()) |kv| {
var new = try node_arena.allocator().create(Queue.Node);
new.* = .{ .data = .{
.parent = check,
.mod = kv.value_ptr.*,
} };
to_check.prepend(new);
}
} else {
// this can happen for e.g. @cImport packages
return gpa.dupe(u8, "<unnamed>");
};
// we found a path to the module! unfortunately, we can only traverse *up* it, so we have to put
// all the names into a buffer so we can then print them in order.
var names = std.ArrayList([]const u8).init(gpa);
defer names.deinit();
var cur: *const Parented = linked;
while (cur.parent) |parent| : (cur = parent) {
// find cur's name in parent
var it = parent.mod.table.iterator();
const name = while (it.next()) |kv| {
if (kv.value_ptr.* == cur.mod) {
break kv.key_ptr.*;
}
} else unreachable;
try names.append(name);
}
// finally, print the names into a buffer!
var buf = std.ArrayList(u8).init(gpa);
defer buf.deinit();
try buf.writer().writeAll("root");
var i: usize = names.items.len;
while (i > 0) {
i -= 1;
try buf.writer().print(".{s}", .{names.items[i]});
}
return buf.toOwnedSlice();
}
pub const build_zig_basename = "build.zig";
@ -236,7 +306,7 @@ pub fn fetchAndAddDependencies(
color,
);
try addAndAdopt(pkg, gpa, sub_pkg);
try add(pkg, gpa, fqn, sub_pkg);
try dependencies_source.writer().print(" pub const {s} = @import(\"{}\");\n", .{
std.zig.fmtId(fqn), std.zig.fmtEscapes(fqn),
@ -248,7 +318,6 @@ pub fn fetchAndAddDependencies(
pub fn createFilePkg(
gpa: Allocator,
name: []const u8,
cache_directory: Compilation.Directory,
basename: []const u8,
contents: []const u8,
@ -269,7 +338,7 @@ pub fn createFilePkg(
const o_dir_sub_path = "o" ++ fs.path.sep_str ++ hex_digest;
try renameTmpIntoCache(cache_directory.handle, tmp_dir_sub_path, o_dir_sub_path);
return createWithDir(gpa, name, cache_directory, o_dir_sub_path, basename);
return createWithDir(gpa, cache_directory, o_dir_sub_path, basename);
}
const Report = struct {
@ -363,9 +432,6 @@ fn fetchAndUnpack(
const owned_src_path = try gpa.dupe(u8, build_zig_basename);
errdefer gpa.free(owned_src_path);
const owned_name = try gpa.dupe(u8, fqn);
errdefer gpa.free(owned_name);
const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
errdefer gpa.free(build_root);
@ -380,7 +446,6 @@ fn fetchAndUnpack(
},
.root_src_directory_owned = true,
.root_src_path = owned_src_path,
.name = owned_name,
};
return ptr;
@ -455,7 +520,7 @@ fn fetchAndUnpack(
std.zig.fmtId(fqn), std.zig.fmtEscapes(build_root),
});
return createWithDir(gpa, fqn, global_cache_directory, pkg_dir_sub_path, build_zig_basename);
return createWithDir(gpa, global_cache_directory, pkg_dir_sub_path, build_zig_basename);
}
fn unpackTarball(

View File

@ -5311,7 +5311,6 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
}
const c_import_pkg = Package.create(
sema.gpa,
"c_import", // TODO: should we make this unique?
null,
c_import_res.out_zig_path,
) catch |err| switch (err) {
@ -11793,8 +11792,9 @@ fn zirImport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
return sema.fail(block, operand_src, "import of file outside package path: '{s}'", .{operand});
},
error.PackageNotFound => {
const cur_pkg = block.getFileScope().pkg;
return sema.fail(block, operand_src, "no package named '{s}' available within package '{s}'", .{ operand, cur_pkg.name });
const name = try block.getFileScope().pkg.getName(sema.gpa, mod.*);
defer sema.gpa.free(name);
return sema.fail(block, operand_src, "no package named '{s}' available within package '{s}'", .{ operand, name });
},
else => {
// TODO: these errors are file system errors; make sure an update() will

View File

@ -403,8 +403,11 @@ const usage_build_generic =
\\ ReleaseFast Optimizations on, safety off
\\ ReleaseSafe Optimizations on, safety on
\\ ReleaseSmall Optimize for small binary, safety off
\\ --pkg-begin [name] [path] Make pkg available to import and push current pkg
\\ --pkg-end Pop current pkg
\\ --mod [name]:[deps]:[src] Make a module available for dependency under the given name
\\ deps: [dep],[dep],...
\\ dep: [[import=]name]
\\ --deps [dep],[dep],... Set dependency names for the root package
\\ dep: [[import=]name]
\\ --main-pkg-path Set the directory of the root package
\\ -fPIC Force-enable Position Independent Code
\\ -fno-PIC Force-disable Position Independent Code
@ -858,15 +861,21 @@ fn buildOutputType(
var linker_export_symbol_names = std.ArrayList([]const u8).init(gpa);
defer linker_export_symbol_names.deinit();
// This package only exists to clean up the code parsing --pkg-begin and
// --pkg-end flags. Use dummy values that are safe for the destroy call.
var pkg_tree_root: Package = .{
.root_src_directory = .{ .path = null, .handle = fs.cwd() },
.root_src_path = &[0]u8{},
.name = &[0]u8{},
};
defer freePkgTree(gpa, &pkg_tree_root, false);
var cur_pkg: *Package = &pkg_tree_root;
// Contains every module specified via --mod. The dependencies are added
// after argument parsing is completed. We use a StringArrayHashMap to make
// error output consistent.
var modules = std.StringArrayHashMap(struct {
mod: *Package,
deps_str: []const u8, // still in CLI arg format
}).init(gpa);
defer {
var it = modules.iterator();
while (it.next()) |kv| kv.value_ptr.mod.destroy(gpa);
modules.deinit();
}
// The dependency string for the root package
var root_deps_str: ?[]const u8 = null;
// before arg parsing, check for the NO_COLOR environment variable
// if it exists, default the color setting to .off
@ -943,34 +952,44 @@ fn buildOutputType(
} else {
fatal("unexpected end-of-parameter mark: --", .{});
}
} else if (mem.eql(u8, arg, "--pkg-begin")) {
const opt_pkg_name = args_iter.next();
const opt_pkg_path = args_iter.next();
if (opt_pkg_name == null or opt_pkg_path == null)
fatal("Expected 2 arguments after {s}", .{arg});
} else if (mem.eql(u8, arg, "--mod")) {
const info = args_iter.nextOrFatal();
var info_it = mem.split(u8, info, ":");
const mod_name = info_it.next() orelse fatal("expected non-empty argument after {s}", .{arg});
const deps_str = info_it.next() orelse fatal("expected 'name:deps:path' after {s}", .{arg});
const root_src_orig = info_it.rest();
if (root_src_orig.len == 0) fatal("expected 'name:deps:path' after {s}", .{arg});
if (mod_name.len == 0) fatal("empty name for module at '{s}'", .{root_src_orig});
const pkg_name = opt_pkg_name.?;
const pkg_path = try introspect.resolvePath(arena, opt_pkg_path.?);
const root_src = try introspect.resolvePath(arena, root_src_orig);
const new_cur_pkg = Package.create(
gpa,
pkg_name,
fs.path.dirname(pkg_path),
fs.path.basename(pkg_path),
) catch |err| {
fatal("Failed to add package at path {s}: {s}", .{ pkg_path, @errorName(err) });
};
if (mem.eql(u8, pkg_name, "std") or mem.eql(u8, pkg_name, "root") or mem.eql(u8, pkg_name, "builtin")) {
fatal("unable to add package '{s}' -> '{s}': conflicts with builtin package", .{ pkg_name, pkg_path });
} else if (cur_pkg.table.get(pkg_name)) |prev| {
fatal("unable to add package '{s}' -> '{s}': already exists as '{s}", .{ pkg_name, pkg_path, prev.root_src_path });
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
if (mem.eql(u8, mod_name, name)) {
fatal("unable to add module '{s}' -> '{s}': conflicts with builtin module", .{ mod_name, root_src });
}
}
try cur_pkg.addAndAdopt(gpa, new_cur_pkg);
cur_pkg = new_cur_pkg;
} else if (mem.eql(u8, arg, "--pkg-end")) {
cur_pkg = cur_pkg.parent orelse
fatal("encountered --pkg-end with no matching --pkg-begin", .{});
var mod_it = modules.iterator();
while (mod_it.next()) |kv| {
if (std.mem.eql(u8, mod_name, kv.key_ptr.*)) {
fatal("unable to add module '{s}' -> '{s}': already exists as '{s}'", .{ mod_name, root_src, kv.value_ptr.mod.root_src_path });
}
}
try modules.ensureUnusedCapacity(1);
modules.put(mod_name, .{
.mod = try Package.create(
gpa,
fs.path.dirname(root_src),
fs.path.basename(root_src),
),
.deps_str = deps_str,
}) catch unreachable;
} else if (mem.eql(u8, arg, "--deps")) {
if (root_deps_str != null) {
fatal("only one --deps argument is allowed", .{});
}
root_deps_str = args_iter.nextOrFatal();
} else if (mem.eql(u8, arg, "--main-pkg-path")) {
main_pkg_path = args_iter.nextOrFatal();
} else if (mem.eql(u8, arg, "-cflags")) {
@ -2307,6 +2326,31 @@ fn buildOutputType(
},
}
{
// Resolve module dependencies
var it = modules.iterator();
while (it.next()) |kv| {
const deps_str = kv.value_ptr.deps_str;
var deps_it = ModuleDepIterator.init(deps_str);
while (deps_it.next()) |dep| {
if (dep.expose.len == 0) {
fatal("module '{s}' depends on '{s}' with a blank name", .{ kv.key_ptr.*, dep.name });
}
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
if (mem.eql(u8, dep.expose, name)) {
fatal("unable to add module '{s}' under name '{s}': conflicts with builtin module", .{ dep.name, dep.expose });
}
}
const dep_mod = modules.get(dep.name) orelse
fatal("module '{s}' depends on module '{s}' which does not exist", .{ kv.key_ptr.*, dep.name });
try kv.value_ptr.mod.add(gpa, dep.expose, dep_mod.mod);
}
}
}
if (arg_mode == .build and optimize_mode == .ReleaseSmall and strip == null)
strip = true;
@ -2886,14 +2930,14 @@ fn buildOutputType(
if (main_pkg_path) |unresolved_main_pkg_path| {
const p = try introspect.resolvePath(arena, unresolved_main_pkg_path);
if (p.len == 0) {
break :blk try Package.create(gpa, "root", null, src_path);
break :blk try Package.create(gpa, null, src_path);
} else {
const rel_src_path = try fs.path.relative(arena, p, src_path);
break :blk try Package.create(gpa, "root", p, rel_src_path);
break :blk try Package.create(gpa, p, rel_src_path);
}
} else {
const root_src_dir_path = fs.path.dirname(src_path);
break :blk Package.create(gpa, "root", root_src_dir_path, fs.path.basename(src_path)) catch |err| {
break :blk Package.create(gpa, root_src_dir_path, fs.path.basename(src_path)) catch |err| {
if (root_src_dir_path) |p| {
fatal("unable to open '{s}': {s}", .{ p, @errorName(err) });
} else {
@ -2904,23 +2948,24 @@ fn buildOutputType(
} else null;
defer if (main_pkg) |p| p.destroy(gpa);
// Transfer packages added with --pkg-begin/--pkg-end to the root package
if (main_pkg) |pkg| {
var it = pkg_tree_root.table.valueIterator();
while (it.next()) |p| {
if (p.*.parent == &pkg_tree_root) {
p.*.parent = pkg;
// Transfer packages added with --deps to the root package
if (main_pkg) |mod| {
var it = ModuleDepIterator.init(root_deps_str orelse "");
while (it.next()) |dep| {
if (dep.expose.len == 0) {
fatal("root module depends on '{s}' with a blank name", .{dep.name});
}
}
pkg.table = pkg_tree_root.table;
pkg_tree_root.table = .{};
} else {
// Remove any dangling pointers just in case.
var it = pkg_tree_root.table.valueIterator();
while (it.next()) |p| {
if (p.*.parent == &pkg_tree_root) {
p.*.parent = null;
for ([_][]const u8{ "std", "root", "builtin" }) |name| {
if (mem.eql(u8, dep.expose, name)) {
fatal("unable to add module '{s}' under name '{s}': conflicts with builtin module", .{ dep.name, dep.expose });
}
}
const dep_mod = modules.get(dep.name) orelse
fatal("root module depends on module '{s}' which does not exist", .{dep.name});
try mod.add(gpa, dep.expose, dep_mod.mod);
}
}
@ -3400,6 +3445,32 @@ fn buildOutputType(
return cleanExit();
}
const ModuleDepIterator = struct {
split: mem.SplitIterator(u8),
fn init(deps_str: []const u8) ModuleDepIterator {
return .{ .split = mem.split(u8, deps_str, ",") };
}
const Dependency = struct {
expose: []const u8,
name: []const u8,
};
fn next(it: *ModuleDepIterator) ?Dependency {
if (it.split.buffer.len == 0) return null; // don't return "" for the first iteration on ""
const str = it.split.next() orelse return null;
if (mem.indexOfScalar(u8, str, '=')) |i| {
return .{
.expose = str[0..i],
.name = str[i + 1 ..],
};
} else {
return .{ .expose = str, .name = str };
}
}
};
fn parseCrossTargetOrReportFatalError(
allocator: Allocator,
opts: std.zig.CrossTarget.ParseOptions,
@ -3626,18 +3697,6 @@ fn updateModule(gpa: Allocator, comp: *Compilation, hook: AfterUpdateHook) !void
}
}
fn freePkgTree(gpa: Allocator, pkg: *Package, free_parent: bool) void {
{
var it = pkg.table.valueIterator();
while (it.next()) |value| {
freePkgTree(gpa, value.*, true);
}
}
if (free_parent) {
pkg.destroy(gpa);
}
}
fn cmdTranslateC(comp: *Compilation, arena: Allocator, enable_cache: bool) !void {
if (!build_options.have_llvm)
fatal("cannot translate-c: compiler built without LLVM extensions", .{});
@ -4141,7 +4200,6 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
var main_pkg: Package = .{
.root_src_directory = zig_lib_directory,
.root_src_path = "build_runner.zig",
.name = "root",
};
if (!build_options.omit_pkg_fetching_code) {
@ -4184,22 +4242,20 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
const deps_pkg = try Package.createFilePkg(
gpa,
"@dependencies",
local_cache_directory,
"dependencies.zig",
dependencies_source.items,
);
mem.swap(Package.Table, &main_pkg.table, &deps_pkg.table);
try main_pkg.addAndAdopt(gpa, deps_pkg);
try main_pkg.add(gpa, "@dependencies", deps_pkg);
}
var build_pkg: Package = .{
.root_src_directory = build_directory,
.root_src_path = build_zig_basename,
.name = "@build",
};
try main_pkg.addAndAdopt(gpa, &build_pkg);
try main_pkg.add(gpa, "@build", &build_pkg);
const comp = Compilation.create(gpa, .{
.zig_lib_directory = zig_lib_directory,
@ -4434,7 +4490,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
.root_decl = .none,
};
file.pkg = try Package.create(gpa, "root", null, file.sub_file_path);
file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
file.zir = try AstGen.generate(gpa, file.tree);
@ -4645,7 +4701,7 @@ fn fmtPathFile(
.root_decl = .none,
};
file.pkg = try Package.create(fmt.gpa, "root", null, file.sub_file_path);
file.pkg = try Package.create(fmt.gpa, null, file.sub_file_path);
defer file.pkg.destroy(fmt.gpa);
if (stat.size > max_src_size)
@ -5357,7 +5413,7 @@ pub fn cmdAstCheck(
file.stat.size = source.len;
}
file.pkg = try Package.create(gpa, "root", null, file.sub_file_path);
file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
file.tree = try Ast.parse(gpa, file.source, .zig);
@ -5476,7 +5532,7 @@ pub fn cmdChangelist(
.root_decl = .none,
};
file.pkg = try Package.create(gpa, "root", null, file.sub_file_path);
file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
const source = try arena.allocSentinel(u8, @intCast(usize, stat.size), 0);

View File

@ -583,6 +583,11 @@ pub const TestContext = struct {
path: []const u8,
};
pub const DepModule = struct {
name: []const u8,
path: []const u8,
};
pub const Backend = enum {
stage1,
stage2,
@ -611,6 +616,7 @@ pub const TestContext = struct {
link_libc: bool = false,
files: std.ArrayList(File),
deps: std.ArrayList(DepModule),
result: anyerror!void = {},
@ -618,6 +624,13 @@ pub const TestContext = struct {
case.files.append(.{ .path = name, .src = src }) catch @panic("out of memory");
}
pub fn addDepModule(case: *Case, name: []const u8, path: []const u8) void {
case.deps.append(.{
.name = name,
.path = path,
}) catch @panic("out of memory");
}
/// Adds a subcase in which the module is updated with `src`, and a C
/// header is generated.
pub fn addHeader(self: *Case, src: [:0]const u8, result: [:0]const u8) void {
@ -767,6 +780,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.files = std.ArrayList(File).init(ctx.arena),
.deps = std.ArrayList(DepModule).init(ctx.arena),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@ -787,6 +801,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.files = std.ArrayList(File).init(ctx.arena),
.deps = std.ArrayList(DepModule).init(ctx.arena),
.link_libc = true,
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
@ -801,6 +816,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.files = std.ArrayList(File).init(ctx.arena),
.deps = std.ArrayList(DepModule).init(ctx.arena),
.backend = .llvm,
.link_libc = true,
}) catch @panic("out of memory");
@ -818,6 +834,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Obj,
.files = std.ArrayList(File).init(ctx.arena),
.deps = std.ArrayList(DepModule).init(ctx.arena),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@ -834,6 +851,7 @@ pub const TestContext = struct {
.output_mode = .Exe,
.is_test = true,
.files = std.ArrayList(File).init(ctx.arena),
.deps = std.ArrayList(DepModule).init(ctx.arena),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@ -858,6 +876,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Obj,
.files = std.ArrayList(File).init(ctx.arena),
.deps = std.ArrayList(DepModule).init(ctx.arena),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@ -1145,6 +1164,7 @@ pub const TestContext = struct {
.output_mode = output_mode,
.link_libc = backend == .llvm,
.files = std.ArrayList(TestContext.File).init(ctx.cases.allocator),
.deps = std.ArrayList(DepModule).init(ctx.cases.allocator),
});
try cases.append(next);
}
@ -1497,9 +1517,25 @@ pub const TestContext = struct {
var main_pkg: Package = .{
.root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir },
.root_src_path = tmp_src_path,
.name = "root",
};
defer main_pkg.table.deinit(allocator);
defer {
var it = main_pkg.table.iterator();
while (it.next()) |kv| {
allocator.free(kv.key_ptr.*);
kv.value_ptr.*.destroy(allocator);
}
main_pkg.table.deinit(allocator);
}
for (case.deps.items) |dep| {
var pkg = try Package.create(
allocator,
tmp_dir_path,
dep.path,
);
errdefer pkg.destroy(allocator);
try main_pkg.add(allocator, dep.name, pkg);
}
const bin_name = try std.zig.binNameAlloc(arena, .{
.root_name = "test_case",

Binary file not shown.

View File

@ -288,4 +288,26 @@ pub fn addCases(ctx: *TestContext) !void {
//, &[_][]const u8{
// "tmp.zig:4:1: error: unable to inline function",
//});
{
const case = ctx.obj("file in multiple modules", .{});
case.backend = .stage2;
case.addSourceFile("foo.zig",
\\const dummy = 0;
);
case.addDepModule("foo", "foo.zig");
case.addError(
\\comptime {
\\ _ = @import("foo");
\\ _ = @import("foo.zig");
\\}
, &[_][]const u8{
":1:1: error: file exists in multiple modules",
":1:1: note: root of module root.foo",
":3:17: note: imported from module root",
});
}
}

View File

@ -97,6 +97,7 @@ pub fn addPtx(
.updates = std.ArrayList(TestContext.Update).init(ctx.cases.allocator),
.output_mode = .Obj,
.files = std.ArrayList(TestContext.File).init(ctx.cases.allocator),
.deps = std.ArrayList(TestContext.DepModule).init(ctx.cases.allocator),
.link_libc = false,
.backend = .llvm,
// Bug in Debug mode

View File

@ -107,4 +107,10 @@ pub fn addCases(cases: *tests.StandaloneContext) void {
cases.addBuildFile("test/standalone/emit_asm_and_bin/build.zig", .{});
cases.addBuildFile("test/standalone/issue_12588/build.zig", .{});
cases.addBuildFile("test/standalone/embed_generated_file/build.zig", .{});
cases.addBuildFile("test/standalone/dep_diamond/build.zig", .{});
cases.addBuildFile("test/standalone/dep_triangle/build.zig", .{});
cases.addBuildFile("test/standalone/dep_recursive/build.zig", .{});
cases.addBuildFile("test/standalone/dep_mutually_recursive/build.zig", .{});
cases.addBuildFile("test/standalone/dep_shared_builtin/build.zig", .{});
}

View File

@ -0,0 +1 @@
pub const shared = @import("shared");

View File

@ -0,0 +1,28 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const optimize = b.standardOptimizeOption(.{});
const shared = b.createModule(.{
.source_file = .{ .path = "shared.zig" },
});
const exe = b.addExecutable(.{
.name = "test",
.root_source_file = .{ .path = "test.zig" },
.optimize = optimize,
});
exe.addAnonymousModule("foo", .{
.source_file = .{ .path = "foo.zig" },
.dependencies = &.{.{ .name = "shared", .module = shared }},
});
exe.addAnonymousModule("bar", .{
.source_file = .{ .path = "bar.zig" },
.dependencies = &.{.{ .name = "shared", .module = shared }},
});
const run = exe.run();
const test_step = b.step("test", "Test it");
test_step.dependOn(&run.step);
}

View File

@ -0,0 +1 @@
pub const shared = @import("shared");

View File

@ -0,0 +1 @@
// (empty)

View File

@ -0,0 +1,7 @@
const foo = @import("foo");
const bar = @import("bar");
const assert = @import("std").debug.assert;
pub fn main() void {
assert(foo.shared == bar.shared);
}

View File

@ -0,0 +1,6 @@
const assert = @import("std").debug.assert;
pub const foo = @import("foo");
comptime {
assert(foo.bar == @This());
}

View File

@ -0,0 +1,26 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const optimize = b.standardOptimizeOption(.{});
const foo = b.createModule(.{
.source_file = .{ .path = "foo.zig" },
});
const bar = b.createModule(.{
.source_file = .{ .path = "bar.zig" },
});
foo.dependencies.put("bar", bar) catch @panic("OOM");
bar.dependencies.put("foo", foo) catch @panic("OOM");
const exe = b.addExecutable(.{
.name = "test",
.root_source_file = .{ .path = "test.zig" },
.optimize = optimize,
});
exe.addModule("foo", foo);
const run = exe.run();
const test_step = b.step("test", "Test it");
test_step.dependOn(&run.step);
}

View File

@ -0,0 +1,6 @@
const assert = @import("std").debug.assert;
pub const bar = @import("bar");
comptime {
assert(bar.foo == @This());
}

View File

@ -0,0 +1,7 @@
const foo = @import("foo");
const assert = @import("std").debug.assert;
pub fn main() void {
assert(foo == foo.bar.foo);
assert(foo == foo.bar.foo.bar.foo);
}

View File

@ -0,0 +1,22 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const optimize = b.standardOptimizeOption(.{});
const foo = b.createModule(.{
.source_file = .{ .path = "foo.zig" },
});
foo.dependencies.put("foo", foo) catch @panic("OOM");
const exe = b.addExecutable(.{
.name = "test",
.root_source_file = .{ .path = "test.zig" },
.optimize = optimize,
});
exe.addModule("foo", foo);
const run = exe.run();
const test_step = b.step("test", "Test it");
test_step.dependOn(&run.step);
}

View File

@ -0,0 +1,6 @@
const assert = @import("std").debug.assert;
pub const foo = @import("foo");
comptime {
assert(foo == @This());
}

View File

@ -0,0 +1,8 @@
const foo = @import("foo");
const shared = @import("shared");
const assert = @import("std").debug.assert;
pub fn main() void {
assert(foo == foo.foo);
assert(foo == foo.foo.foo);
}

View File

@ -0,0 +1,19 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "test",
.root_source_file = .{ .path = "test.zig" },
.optimize = optimize,
});
exe.addAnonymousModule("foo", .{
.source_file = .{ .path = "foo.zig" },
});
const run = exe.run();
const test_step = b.step("test", "Test it");
test_step.dependOn(&run.step);
}

View File

@ -0,0 +1,3 @@
pub const std = @import("std");
pub const builtin = @import("builtin");
pub const root = @import("root");

View File

@ -0,0 +1,11 @@
const std = @import("std");
const builtin = @import("builtin");
const root = @import("root");
const foo = @import("foo");
pub fn main() void {
std.debug.assert(root == @This());
std.debug.assert(std == foo.std);
std.debug.assert(builtin == foo.builtin);
std.debug.assert(root == foo.root);
}

View File

@ -0,0 +1,25 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const optimize = b.standardOptimizeOption(.{});
const shared = b.createModule(.{
.source_file = .{ .path = "shared.zig" },
});
const exe = b.addExecutable(.{
.name = "test",
.root_source_file = .{ .path = "test.zig" },
.optimize = optimize,
});
exe.addAnonymousModule("foo", .{
.source_file = .{ .path = "foo.zig" },
.dependencies = &.{.{ .name = "shared", .module = shared }},
});
exe.addModule("shared", shared);
const run = exe.run();
const test_step = b.step("test", "Test it");
test_step.dependOn(&run.step);
}

View File

@ -0,0 +1 @@
pub const shared = @import("shared");

View File

@ -0,0 +1 @@
// (empty)

View File

@ -0,0 +1,7 @@
const foo = @import("foo");
const shared = @import("shared");
const assert = @import("std").debug.assert;
pub fn main() void {
assert(foo.shared == shared);
}