Merge pull request #8470 from ziglang/stage2-start

stage2: blaze the trail for std lib integration
This commit is contained in:
Andrew Kelley 2021-04-09 10:15:46 -07:00 committed by GitHub
commit f75cdd1acd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 662 additions and 290 deletions

View File

@ -7,7 +7,7 @@
const root = @import("root");
const std = @import("std.zig");
const builtin = std.builtin;
const builtin = @import("builtin");
const assert = std.debug.assert;
const uefi = std.os.uefi;
const tlcsprng = @import("crypto/tlcsprng.zig");
@ -17,39 +17,101 @@ var argc_argv_ptr: [*]usize = undefined;
const start_sym_name = if (builtin.arch.isMIPS()) "__start" else "_start";
comptime {
if (builtin.output_mode == .Lib and builtin.link_mode == .Dynamic) {
if (builtin.os.tag == .windows and !@hasDecl(root, "_DllMainCRTStartup")) {
@export(_DllMainCRTStartup, .{ .name = "_DllMainCRTStartup" });
// The self-hosted compiler is not fully capable of handling all of this start.zig file.
// Until then, we have simplified logic here for self-hosted. TODO remove this once
// self-hosted is capable enough to handle all of the real start.zig logic.
if (builtin.zig_is_stage2) {
if (builtin.output_mode == .Exe) {
if (builtin.link_libc or builtin.object_format == .c) {
if (!@hasDecl(root, "main")) {
@export(main2, "main");
}
} else {
if (!@hasDecl(root, "_start")) {
@export(_start2, "_start");
}
}
}
} else if (builtin.output_mode == .Exe or @hasDecl(root, "main")) {
if (builtin.link_libc and @hasDecl(root, "main")) {
if (@typeInfo(@TypeOf(root.main)).Fn.calling_convention != .C) {
@export(main, .{ .name = "main", .linkage = .Weak });
} else {
if (builtin.output_mode == .Lib and builtin.link_mode == .Dynamic) {
if (builtin.os.tag == .windows and !@hasDecl(root, "_DllMainCRTStartup")) {
@export(_DllMainCRTStartup, .{ .name = "_DllMainCRTStartup" });
}
} else if (builtin.os.tag == .windows) {
if (!@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup") and
!@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup"))
{
@export(WinStartup, .{ .name = "wWinMainCRTStartup" });
} else if (@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup") and
!@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup"))
{
@compileError("WinMain not supported; declare wWinMain or main instead");
} else if (@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup") and
!@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup"))
{
@export(wWinMainCRTStartup, .{ .name = "wWinMainCRTStartup" });
} else if (builtin.output_mode == .Exe or @hasDecl(root, "main")) {
if (builtin.link_libc and @hasDecl(root, "main")) {
if (@typeInfo(@TypeOf(root.main)).Fn.calling_convention != .C) {
@export(main, .{ .name = "main", .linkage = .Weak });
}
} else if (builtin.os.tag == .windows) {
if (!@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup") and
!@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup"))
{
@export(WinStartup, .{ .name = "wWinMainCRTStartup" });
} else if (@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup") and
!@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup"))
{
@compileError("WinMain not supported; declare wWinMain or main instead");
} else if (@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup") and
!@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup"))
{
@export(wWinMainCRTStartup, .{ .name = "wWinMainCRTStartup" });
}
} else if (builtin.os.tag == .uefi) {
if (!@hasDecl(root, "EfiMain")) @export(EfiMain, .{ .name = "EfiMain" });
} else if (builtin.arch.isWasm() and builtin.os.tag == .freestanding) {
if (!@hasDecl(root, start_sym_name)) @export(wasm_freestanding_start, .{ .name = start_sym_name });
} else if (builtin.os.tag != .other and builtin.os.tag != .freestanding) {
if (!@hasDecl(root, start_sym_name)) @export(_start, .{ .name = start_sym_name });
}
} else if (builtin.os.tag == .uefi) {
if (!@hasDecl(root, "EfiMain")) @export(EfiMain, .{ .name = "EfiMain" });
} else if (builtin.arch.isWasm() and builtin.os.tag == .freestanding) {
if (!@hasDecl(root, start_sym_name)) @export(wasm_freestanding_start, .{ .name = start_sym_name });
} else if (builtin.os.tag != .other and builtin.os.tag != .freestanding) {
if (!@hasDecl(root, start_sym_name)) @export(_start, .{ .name = start_sym_name });
}
}
}
// Simplified start code for stage2 until it supports more language features ///
fn main2() callconv(.C) c_int {
root.main();
return 0;
}
fn _start2() callconv(.Naked) noreturn {
root.main();
exit2(0);
}
fn exit2(code: u8) noreturn {
switch (builtin.arch) {
.x86_64 => {
asm volatile ("syscall"
:
: [number] "{rax}" (231),
[arg1] "{rdi}" (code)
: "rcx", "r11", "memory"
);
},
.arm => {
asm volatile ("svc #0"
:
: [number] "{r7}" (1),
[arg1] "{r0}" (code)
: "memory"
);
},
.aarch64 => {
asm volatile ("svc #0"
:
: [number] "{x8}" (93),
[arg1] "{x0}" (code)
: "memory", "cc"
);
},
else => @compileError("TODO"),
}
unreachable;
}
////////////////////////////////////////////////////////////////////////////////
fn _DllMainCRTStartup(
hinstDLL: std.os.windows.HINSTANCE,
fdwReason: std.os.windows.DWORD,

View File

@ -92,7 +92,7 @@ pub const zig = @import("zig.zig");
pub const start = @import("start.zig");
// This forces the start.zig file to be imported, and the comptime logic inside that
// file decides whether to export any appropriate start symbols.
// file decides whether to export any appropriate start symbols, and call main.
comptime {
_ = start;
}

View File

@ -18,16 +18,19 @@ pub const CrossTarget = @import("zig/cross_target.zig").CrossTarget;
pub const SrcHash = [16]u8;
/// If the source is small enough, it is used directly as the hash.
/// If it is long, blake3 hash is computed.
pub fn hashSrc(src: []const u8) SrcHash {
var out: SrcHash = undefined;
if (src.len <= @typeInfo(SrcHash).Array.len) {
std.mem.copy(u8, &out, src);
std.mem.set(u8, out[src.len..], 0);
} else {
std.crypto.hash.Blake3.hash(src, &out, .{});
}
std.crypto.hash.Blake3.hash(src, &out, .{});
return out;
}
pub fn hashName(parent_hash: SrcHash, sep: []const u8, name: []const u8) SrcHash {
var out: SrcHash = undefined;
var hasher = std.crypto.hash.Blake3.init(.{});
hasher.update(&parent_hash);
hasher.update(sep);
hasher.update(name);
hasher.final(&out);
return out;
}

View File

@ -823,7 +823,31 @@ pub fn structInitExpr(
.none, .none_or_ref => return mod.failNode(scope, node, "TODO implement structInitExpr none", .{}),
.ref => unreachable, // struct literal not valid as l-value
.ty => |ty_inst| {
return mod.failNode(scope, node, "TODO implement structInitExpr ty", .{});
const fields_list = try gpa.alloc(zir.Inst.StructInit.Item, struct_init.ast.fields.len);
defer gpa.free(fields_list);
for (struct_init.ast.fields) |field_init, i| {
const name_token = tree.firstToken(field_init) - 2;
const str_index = try gz.identAsString(name_token);
const field_ty_inst = try gz.addPlNode(.field_type, field_init, zir.Inst.FieldType{
.container_type = ty_inst,
.name_start = str_index,
});
fields_list[i] = .{
.field_type = astgen.refToIndex(field_ty_inst).?,
.init = try expr(gz, scope, .{ .ty = field_ty_inst }, field_init),
};
}
const init_inst = try gz.addPlNode(.struct_init, node, zir.Inst.StructInit{
.fields_len = @intCast(u32, fields_list.len),
});
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
fields_list.len * @typeInfo(zir.Inst.StructInit.Item).Struct.fields.len);
for (fields_list) |field| {
_ = gz.astgen.addExtraAssumeCapacity(field);
}
return rvalue(gz, scope, rl, init_inst, node);
},
.ptr => |ptr_inst| {
const field_ptr_list = try gpa.alloc(zir.Inst.Index, struct_init.ast.fields.len);
@ -1245,6 +1269,7 @@ fn blockExprStmts(
.fn_type_var_args,
.fn_type_cc,
.fn_type_cc_var_args,
.has_decl,
.int,
.float,
.float128,
@ -1320,6 +1345,8 @@ fn blockExprStmts(
.switch_capture_else,
.switch_capture_else_ref,
.struct_init_empty,
.struct_init,
.field_type,
.struct_decl,
.struct_decl_packed,
.struct_decl_extern,
@ -1329,6 +1356,7 @@ fn blockExprStmts(
.opaque_decl,
.int_to_enum,
.enum_to_int,
.type_info,
=> break :b false,
// ZIR instructions that are always either `noreturn` or `void`.
@ -1336,6 +1364,7 @@ fn blockExprStmts(
.dbg_stmt_node,
.ensure_result_used,
.ensure_result_non_error,
.@"export",
.set_eval_branch_quota,
.compile_log,
.ensure_err_payload_void,
@ -2347,7 +2376,7 @@ fn arrayAccess(
),
else => return rvalue(gz, scope, rl, try gz.addBin(
.elem_val,
try expr(gz, scope, .none, node_datas[node].lhs),
try expr(gz, scope, .none_or_ref, node_datas[node].lhs),
try expr(gz, scope, .{ .ty = .usize_type }, node_datas[node].rhs),
), node),
}
@ -4146,6 +4175,36 @@ fn builtinCall(
return rvalue(gz, scope, rl, result, node);
},
.@"export" => {
// TODO: @export is supposed to be able to export things other than functions.
// Instead of `comptimeExpr` here we need `decl_ref`.
const fn_to_export = try comptimeExpr(gz, scope, .none, params[0]);
// TODO: the second parameter here is supposed to be
// `std.builtin.ExportOptions`, not a string.
const export_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
_ = try gz.addPlNode(.@"export", node, zir.Inst.Bin{
.lhs = fn_to_export,
.rhs = export_name,
});
return rvalue(gz, scope, rl, .void_value, node);
},
.has_decl => {
const container_type = try typeExpr(gz, scope, params[0]);
const name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
const result = try gz.addPlNode(.has_decl, node, zir.Inst.Bin{
.lhs = container_type,
.rhs = name,
});
return rvalue(gz, scope, rl, result, node);
},
.type_info => {
const operand = try typeExpr(gz, scope, params[0]);
const result = try gz.addUnNode(.type_info, operand, node);
return rvalue(gz, scope, rl, result, node);
},
.add_with_overflow,
.align_cast,
.align_of,
@ -4175,11 +4234,9 @@ fn builtinCall(
.error_name,
.error_return_trace,
.err_set_cast,
.@"export",
.fence,
.field_parent_ptr,
.float_to_int,
.has_decl,
.has_field,
.int_to_float,
.int_to_ptr,
@ -4224,7 +4281,6 @@ fn builtinCall(
.This,
.truncate,
.Type,
.type_info,
.type_name,
.union_init,
=> return mod.failNode(scope, node, "TODO: implement builtin function {s}", .{

View File

@ -932,38 +932,56 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
artifact_sub_dir,
};
// TODO when we implement serialization and deserialization of incremental compilation metadata,
// this is where we would load it. We have open a handle to the directory where
// the output either already is, or will be.
// If we rely on stage1, we must not redundantly add these packages.
const use_stage1 = build_options.is_stage1 and use_llvm;
if (!use_stage1) {
const builtin_pkg = try Package.createWithDir(
gpa,
zig_cache_artifact_directory,
null,
"builtin.zig",
);
errdefer builtin_pkg.destroy(gpa);
const std_pkg = try Package.createWithDir(
gpa,
options.zig_lib_directory,
"std",
"std.zig",
);
errdefer std_pkg.destroy(gpa);
try root_pkg.addAndAdopt(gpa, "builtin", builtin_pkg);
try root_pkg.add(gpa, "root", root_pkg);
try root_pkg.addAndAdopt(gpa, "std", std_pkg);
try std_pkg.add(gpa, "builtin", builtin_pkg);
try std_pkg.add(gpa, "root", root_pkg);
}
// TODO when we implement serialization and deserialization of incremental
// compilation metadata, this is where we would load it. We have open a handle
// to the directory where the output either already is, or will be.
// However we currently do not have serialization of such metadata, so for now
// we set up an empty Module that does the entire compilation fresh.
const root_scope = rs: {
if (mem.endsWith(u8, root_pkg.root_src_path, ".zig")) {
const root_scope = try gpa.create(Module.Scope.File);
const struct_ty = try Type.Tag.empty_struct.create(
gpa,
&root_scope.root_container,
);
root_scope.* = .{
// TODO this is duped so it can be freed in Container.deinit
.sub_file_path = try gpa.dupe(u8, root_pkg.root_src_path),
.source = .{ .unloaded = {} },
.tree = undefined,
.status = .never_loaded,
.pkg = root_pkg,
.root_container = .{
.file_scope = root_scope,
.decls = .{},
.ty = struct_ty,
},
};
break :rs root_scope;
} else if (mem.endsWith(u8, root_pkg.root_src_path, ".zir")) {
return error.ZirFilesUnsupported;
} else {
unreachable;
}
const root_scope = try gpa.create(Module.Scope.File);
errdefer gpa.destroy(root_scope);
const struct_ty = try Type.Tag.empty_struct.create(gpa, &root_scope.root_container);
root_scope.* = .{
// TODO this is duped so it can be freed in Container.deinit
.sub_file_path = try gpa.dupe(u8, root_pkg.root_src_path),
.source = .{ .unloaded = {} },
.tree = undefined,
.status = .never_loaded,
.pkg = root_pkg,
.root_container = .{
.file_scope = root_scope,
.decls = .{},
.ty = struct_ty,
.parent_name_hash = root_pkg.namespace_hash,
},
};
const module = try arena.create(Module);
@ -1365,7 +1383,8 @@ pub fn update(self: *Compilation) !void {
self.c_object_work_queue.writeItemAssumeCapacity(entry.key);
}
const use_stage1 = build_options.omit_stage2 or build_options.is_stage1 and self.bin_file.options.use_llvm;
const use_stage1 = build_options.omit_stage2 or
(build_options.is_stage1 and self.bin_file.options.use_llvm);
if (!use_stage1) {
if (self.bin_file.options.module) |module| {
module.compile_log_text.shrinkAndFree(module.gpa, 0);
@ -2490,7 +2509,7 @@ pub fn addCCArgs(
try argv.append("-fPIC");
}
},
.shared_library, .assembly, .ll, .bc, .unknown, .static_library, .object, .zig, .zir => {},
.shared_library, .assembly, .ll, .bc, .unknown, .static_library, .object, .zig => {},
}
if (out_dep_path) |p| {
try argv.appendSlice(&[_][]const u8{ "-MD", "-MV", "-MF", p });
@ -2564,7 +2583,6 @@ pub const FileExt = enum {
object,
static_library,
zig,
zir,
unknown,
pub fn clangSupportsDepFile(ext: FileExt) bool {
@ -2578,7 +2596,6 @@ pub const FileExt = enum {
.object,
.static_library,
.zig,
.zir,
.unknown,
=> false,
};
@ -2650,8 +2667,6 @@ pub fn classifyFileExt(filename: []const u8) FileExt {
return .h;
} else if (mem.endsWith(u8, filename, ".zig")) {
return .zig;
} else if (mem.endsWith(u8, filename, ".zir")) {
return .zir;
} else if (hasSharedLibraryExt(filename)) {
return .shared_library;
} else if (hasStaticLibraryExt(filename)) {
@ -2672,7 +2687,6 @@ test "classifyFileExt" {
std.testing.expectEqual(FileExt.shared_library, classifyFileExt("foo.so.1.2.3"));
std.testing.expectEqual(FileExt.unknown, classifyFileExt("foo.so.1.2.3~"));
std.testing.expectEqual(FileExt.zig, classifyFileExt("foo.zig"));
std.testing.expectEqual(FileExt.zir, classifyFileExt("foo.zir"));
}
fn haveFramePointer(comp: *const Compilation) bool {
@ -2867,6 +2881,8 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
const target = comp.getTarget();
const generic_arch_name = target.cpu.arch.genericName();
const use_stage1 = build_options.omit_stage2 or
(build_options.is_stage1 and comp.bin_file.options.use_llvm);
@setEvalBranchQuota(4000);
try buffer.writer().print(
@ -2879,6 +2895,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
\\/// Zig version. When writing code that supports multiple versions of Zig, prefer
\\/// feature detection (i.e. with `@hasDecl` or `@hasField`) over version checks.
\\pub const zig_version = try @import("std").SemanticVersion.parse("{s}");
\\pub const zig_is_stage2 = {};
\\
\\pub const output_mode = OutputMode.{};
\\pub const link_mode = LinkMode.{};
@ -2892,6 +2909,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
\\
, .{
build_options.version,
!use_stage1,
std.zig.fmtId(@tagName(comp.bin_file.options.output_mode)),
std.zig.fmtId(@tagName(comp.bin_file.options.link_mode)),
comp.bin_file.options.is_test,
@ -3101,6 +3119,7 @@ fn buildOutputFromZig(
.handle = special_dir,
},
.root_src_path = src_basename,
.namespace_hash = Package.root_namespace_hash,
};
const root_name = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len];
const target = comp.getTarget();

View File

@ -150,9 +150,15 @@ pub const Decl = struct {
/// The direct parent container of the Decl.
/// Reference to externally owned memory.
container: *Scope.Container,
/// The AST Node decl index or ZIR Inst index that contains this declaration.
/// An integer that can be checked against the corresponding incrementing
/// generation field of Module. This is used to determine whether `complete` status
/// represents pre- or post- re-analysis.
generation: u32,
/// The AST Node index or ZIR Inst index that contains this declaration.
/// Must be recomputed when the corresponding source file is modified.
src_index: usize,
src_node: ast.Node.Index,
/// The most recent value of the Decl after a successful semantic analysis.
typed_value: union(enum) {
never_succeeded: void,
@ -198,11 +204,6 @@ pub const Decl = struct {
/// Whether the corresponding AST decl has a `pub` keyword.
is_pub: bool,
/// An integer that can be checked against the corresponding incrementing
/// generation field of Module. This is used to determine whether `complete` status
/// represents pre- or post- re-analysis.
generation: u32,
/// Represents the position of the code in the output file.
/// This is populated regardless of semantic analysis and code generation.
link: link.File.LinkBlock,
@ -249,11 +250,11 @@ pub const Decl = struct {
}
pub fn relativeToNodeIndex(decl: Decl, offset: i32) ast.Node.Index {
return @bitCast(ast.Node.Index, offset + @bitCast(i32, decl.srcNode()));
return @bitCast(ast.Node.Index, offset + @bitCast(i32, decl.src_node));
}
pub fn nodeIndexToRelative(decl: Decl, node_index: ast.Node.Index) i32 {
return @bitCast(i32, node_index) - @bitCast(i32, decl.srcNode());
return @bitCast(i32, node_index) - @bitCast(i32, decl.src_node);
}
pub fn tokSrcLoc(decl: Decl, token_index: ast.TokenIndex) LazySrcLoc {
@ -271,14 +272,9 @@ pub const Decl = struct {
};
}
pub fn srcNode(decl: Decl) u32 {
const tree = &decl.container.file_scope.tree;
return tree.rootDecls()[decl.src_index];
}
pub fn srcToken(decl: Decl) u32 {
const tree = &decl.container.file_scope.tree;
return tree.firstToken(decl.srcNode());
return tree.firstToken(decl.src_node);
}
pub fn srcByteOffset(decl: Decl) u32 {
@ -678,6 +674,7 @@ pub const Scope = struct {
base: Scope = Scope{ .tag = base_tag },
file_scope: *Scope.File,
parent_name_hash: NameHash,
/// Direct children of the file.
decls: std.AutoArrayHashMapUnmanaged(*Decl, void) = .{},
@ -696,8 +693,7 @@ pub const Scope = struct {
}
pub fn fullyQualifiedNameHash(cont: *Container, name: []const u8) NameHash {
// TODO container scope qualified names.
return std.zig.hashSrc(name);
return std.zig.hashName(cont.parent_name_hash, ".", name);
}
pub fn renderFullyQualifiedName(cont: Container, name: []const u8, writer: anytype) !void {
@ -2296,6 +2292,20 @@ pub const InnerError = error{ OutOfMemory, AnalysisFail };
pub fn deinit(mod: *Module) void {
const gpa = mod.gpa;
// The callsite of `Compilation.create` owns the `root_pkg`, however
// Module owns the builtin and std packages that it adds.
if (mod.root_pkg.table.remove("builtin")) |entry| {
gpa.free(entry.key);
entry.value.destroy(gpa);
}
if (mod.root_pkg.table.remove("std")) |entry| {
gpa.free(entry.key);
entry.value.destroy(gpa);
}
if (mod.root_pkg.table.remove("root")) |entry| {
gpa.free(entry.key);
}
mod.compile_log_text.deinit(gpa);
mod.zig_cache_artifact_directory.handle.close();
@ -2458,7 +2468,7 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
const tree = try mod.getAstTree(decl.container.file_scope);
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
const decl_node = tree.rootDecls()[decl.src_index];
const decl_node = decl.src_node;
switch (node_tags[decl_node]) {
.fn_decl => {
const fn_proto = node_datas[decl_node].lhs;
@ -2513,6 +2523,7 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
const block_expr = node_datas[decl_node].lhs;
_ = try AstGen.comptimeExpr(&gen_scope, &gen_scope.base, .none, block_expr);
_ = try gen_scope.addBreak(.break_inline, 0, .void_value);
const code = try gen_scope.finish();
if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
@ -3294,7 +3305,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
var outdated_decls = std.AutoArrayHashMap(*Decl, void).init(mod.gpa);
defer outdated_decls.deinit();
for (decls) |decl_node, decl_i| switch (node_tags[decl_node]) {
for (decls) |decl_node| switch (node_tags[decl_node]) {
.fn_decl => {
const fn_proto = node_datas[decl_node].lhs;
const body = node_datas[decl_node].rhs;
@ -3306,7 +3317,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
body,
tree.fnProtoSimple(&params, fn_proto),
@ -3317,7 +3327,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
body,
tree.fnProtoMulti(fn_proto),
@ -3329,7 +3338,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
body,
tree.fnProtoOne(&params, fn_proto),
@ -3340,7 +3348,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
body,
tree.fnProto(fn_proto),
@ -3355,7 +3362,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
0,
tree.fnProtoSimple(&params, decl_node),
@ -3366,7 +3372,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
0,
tree.fnProtoMulti(decl_node),
@ -3378,7 +3383,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
0,
tree.fnProtoOne(&params, decl_node),
@ -3389,7 +3393,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
0,
tree.fnProto(decl_node),
@ -3400,7 +3403,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
tree.globalVarDecl(decl_node),
),
@ -3409,7 +3411,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
tree.localVarDecl(decl_node),
),
@ -3418,7 +3419,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
tree.simpleVarDecl(decl_node),
),
@ -3427,7 +3427,6 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
&deleted_decls,
&outdated_decls,
decl_node,
decl_i,
tree.*,
tree.alignedVarDecl(decl_node),
),
@ -3440,38 +3439,21 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
const name_hash = container_scope.fullyQualifiedNameHash(name);
const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_node, name_hash, contents_hash);
container_scope.decls.putAssumeCapacity(new_decl, {});
mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
},
.container_field_init => try mod.semaContainerField(
container_scope,
&deleted_decls,
decl_node,
decl_i,
tree.*,
tree.containerFieldInit(decl_node),
),
.container_field_align => try mod.semaContainerField(
container_scope,
&deleted_decls,
decl_node,
decl_i,
tree.*,
tree.containerFieldAlign(decl_node),
),
.container_field => try mod.semaContainerField(
container_scope,
&deleted_decls,
decl_node,
decl_i,
tree.*,
tree.containerField(decl_node),
),
// Container fields are handled in AstGen.
.container_field_init,
.container_field_align,
.container_field,
=> continue,
.test_decl => {
log.err("TODO: analyze test decl", .{});
if (mod.comp.bin_file.options.is_test) {
log.err("TODO: analyze test decl", .{});
}
},
.@"usingnamespace" => {
log.err("TODO: analyze usingnamespace decl", .{});
@ -3508,7 +3490,6 @@ fn semaContainerFn(
deleted_decls: *std.AutoArrayHashMap(*Decl, void),
outdated_decls: *std.AutoArrayHashMap(*Decl, void),
decl_node: ast.Node.Index,
decl_i: usize,
tree: ast.Tree,
body_node: ast.Node.Index,
fn_proto: ast.full.FnProto,
@ -3517,24 +3498,30 @@ fn semaContainerFn(
defer tracy.end();
// We will create a Decl for it regardless of analysis status.
const name_tok = fn_proto.name_token orelse {
const name_token = fn_proto.name_token orelse {
// This problem will go away with #1717.
@panic("TODO missing function name");
};
const name = tree.tokenSlice(name_tok); // TODO use identifierTokenString
const name = tree.tokenSlice(name_token); // TODO use identifierTokenString
const name_hash = container_scope.fullyQualifiedNameHash(name);
const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
if (mod.decl_table.get(name_hash)) |decl| {
// Update the AST Node index of the decl, even if its contents are unchanged, it may
// have been re-ordered.
decl.src_index = decl_i;
const prev_src_node = decl.src_node;
decl.src_node = decl_node;
if (deleted_decls.swapRemove(decl) == null) {
decl.analysis = .sema_failure;
const msg = try ErrorMsg.create(mod.gpa, .{
.container = .{ .file_scope = container_scope.file_scope },
.lazy = .{ .token_abs = name_tok },
.lazy = .{ .token_abs = name_token },
}, "redefinition of '{s}'", .{decl.name});
errdefer msg.destroy(mod.gpa);
const other_src_loc: SrcLoc = .{
.container = .{ .file_scope = decl.container.file_scope },
.lazy = .{ .node_abs = prev_src_node },
};
try mod.errNoteNonLazy(other_src_loc, msg, "previous definition here", .{});
try mod.failed_decls.putNoClobber(mod.gpa, decl, msg);
} else {
if (!srcHashEql(decl.contents_hash, contents_hash)) {
@ -3558,7 +3545,7 @@ fn semaContainerFn(
}
}
} else {
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_node, name_hash, contents_hash);
container_scope.decls.putAssumeCapacity(new_decl, {});
if (fn_proto.extern_export_token) |maybe_export_token| {
const token_tags = tree.tokens.items(.tag);
@ -3566,6 +3553,7 @@ fn semaContainerFn(
mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
}
}
new_decl.is_pub = fn_proto.visib_token != null;
}
}
@ -3575,7 +3563,6 @@ fn semaContainerVar(
deleted_decls: *std.AutoArrayHashMap(*Decl, void),
outdated_decls: *std.AutoArrayHashMap(*Decl, void),
decl_node: ast.Node.Index,
decl_i: usize,
tree: ast.Tree,
var_decl: ast.full.VarDecl,
) !void {
@ -3589,21 +3576,27 @@ fn semaContainerVar(
if (mod.decl_table.get(name_hash)) |decl| {
// Update the AST Node index of the decl, even if its contents are unchanged, it may
// have been re-ordered.
decl.src_index = decl_i;
const prev_src_node = decl.src_node;
decl.src_node = decl_node;
if (deleted_decls.swapRemove(decl) == null) {
decl.analysis = .sema_failure;
const err_msg = try ErrorMsg.create(mod.gpa, .{
const msg = try ErrorMsg.create(mod.gpa, .{
.container = .{ .file_scope = container_scope.file_scope },
.lazy = .{ .token_abs = name_token },
}, "redefinition of '{s}'", .{decl.name});
errdefer err_msg.destroy(mod.gpa);
try mod.failed_decls.putNoClobber(mod.gpa, decl, err_msg);
errdefer msg.destroy(mod.gpa);
const other_src_loc: SrcLoc = .{
.container = .{ .file_scope = decl.container.file_scope },
.lazy = .{ .node_abs = prev_src_node },
};
try mod.errNoteNonLazy(other_src_loc, msg, "previous definition here", .{});
try mod.failed_decls.putNoClobber(mod.gpa, decl, msg);
} else if (!srcHashEql(decl.contents_hash, contents_hash)) {
try outdated_decls.put(decl, {});
decl.contents_hash = contents_hash;
}
} else {
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_node, name_hash, contents_hash);
container_scope.decls.putAssumeCapacity(new_decl, {});
if (var_decl.extern_export_token) |maybe_export_token| {
const token_tags = tree.tokens.items(.tag);
@ -3614,21 +3607,6 @@ fn semaContainerVar(
}
}
fn semaContainerField(
mod: *Module,
container_scope: *Scope.Container,
deleted_decls: *std.AutoArrayHashMap(*Decl, void),
decl_node: ast.Node.Index,
decl_i: usize,
tree: ast.Tree,
field: ast.full.ContainerField,
) !void {
const tracy = trace(@src());
defer tracy.end();
log.err("TODO: analyze container field", .{});
}
pub fn deleteDecl(
mod: *Module,
decl: *Decl,
@ -3811,7 +3789,7 @@ fn markOutdatedDecl(mod: *Module, decl: *Decl) !void {
fn allocateNewDecl(
mod: *Module,
scope: *Scope,
src_index: usize,
src_node: ast.Node.Index,
contents_hash: std.zig.SrcHash,
) !*Decl {
// If we have emit-h then we must allocate a bigger structure to store the emit-h state.
@ -3827,7 +3805,7 @@ fn allocateNewDecl(
new_decl.* = .{
.name = "",
.container = scope.namespace(),
.src_index = src_index,
.src_node = src_node,
.typed_value = .{ .never_succeeded = {} },
.analysis = .unreferenced,
.deletion_flag = false,
@ -3858,12 +3836,12 @@ fn createNewDecl(
mod: *Module,
scope: *Scope,
decl_name: []const u8,
src_index: usize,
src_node: ast.Node.Index,
name_hash: Scope.NameHash,
contents_hash: std.zig.SrcHash,
) !*Decl {
try mod.decl_table.ensureCapacity(mod.gpa, mod.decl_table.items().len + 1);
const new_decl = try mod.allocateNewDecl(scope, src_index, contents_hash);
const new_decl = try mod.allocateNewDecl(scope, src_node, contents_hash);
errdefer mod.gpa.destroy(new_decl);
new_decl.name = try mem.dupeZ(mod.gpa, u8, decl_name);
mod.decl_table.putAssumeCapacityNoClobber(name_hash, new_decl);
@ -4076,7 +4054,7 @@ pub fn createAnonymousDecl(
defer mod.gpa.free(name);
const name_hash = scope.namespace().fullyQualifiedNameHash(name);
const src_hash: std.zig.SrcHash = undefined;
const new_decl = try mod.createNewDecl(scope, name, scope_decl.src_index, name_hash, src_hash);
const new_decl = try mod.createNewDecl(scope, name, scope_decl.src_node, name_hash, src_hash);
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
decl_arena_state.* = decl_arena.state;
@ -4112,7 +4090,7 @@ pub fn createContainerDecl(
defer mod.gpa.free(name);
const name_hash = scope.namespace().fullyQualifiedNameHash(name);
const src_hash: std.zig.SrcHash = undefined;
const new_decl = try mod.createNewDecl(scope, name, scope_decl.src_index, name_hash, src_hash);
const new_decl = try mod.createNewDecl(scope, name, scope_decl.src_node, name_hash, src_hash);
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
decl_arena_state.* = decl_arena.state;

View File

@ -4,18 +4,29 @@ const std = @import("std");
const fs = std.fs;
const mem = std.mem;
const Allocator = mem.Allocator;
const assert = std.debug.assert;
const Compilation = @import("Compilation.zig");
const Module = @import("Module.zig");
pub const Table = std.StringHashMapUnmanaged(*Package);
pub const root_namespace_hash: Module.Scope.NameHash = .{
0, 0, 6, 6, 6, 0, 0, 0,
6, 9, 0, 0, 0, 4, 2, 0,
};
root_src_directory: Compilation.Directory,
/// Relative to `root_src_directory`. May contain path separators.
root_src_path: []const u8,
table: Table = .{},
parent: ?*Package = null,
namespace_hash: Module.Scope.NameHash,
/// Whether to free `root_src_directory` on `destroy`.
root_src_directory_owned: bool = false,
/// Allocate a Package. No references to the slices passed are kept.
/// Don't forget to set `namespace_hash` later.
pub fn create(
gpa: *Allocator,
/// Null indicates the current working directory
@ -38,27 +49,69 @@ pub fn create(
.handle = if (owned_dir_path) |p| try fs.cwd().openDir(p, .{}) else fs.cwd(),
},
.root_src_path = owned_src_path,
.root_src_directory_owned = true,
.namespace_hash = undefined,
};
return ptr;
}
/// Free all memory associated with this package and recursively call destroy
/// on all packages in its table
pub fn createWithDir(
gpa: *Allocator,
directory: Compilation.Directory,
/// Relative to `directory`. If null, means `directory` is the root src dir
/// and is owned externally.
root_src_dir_path: ?[]const u8,
/// Relative to root_src_dir_path
root_src_path: []const u8,
) !*Package {
const ptr = try gpa.create(Package);
errdefer gpa.destroy(ptr);
const owned_src_path = try gpa.dupe(u8, root_src_path);
errdefer gpa.free(owned_src_path);
if (root_src_dir_path) |p| {
const owned_dir_path = try directory.join(gpa, &[1][]const u8{p});
errdefer gpa.free(owned_dir_path);
ptr.* = .{
.root_src_directory = .{
.path = owned_dir_path,
.handle = try directory.handle.openDir(p, .{}),
},
.root_src_directory_owned = true,
.root_src_path = owned_src_path,
.namespace_hash = undefined,
};
} else {
ptr.* = .{
.root_src_directory = directory,
.root_src_directory_owned = false,
.root_src_path = owned_src_path,
.namespace_hash = undefined,
};
}
return ptr;
}
/// Free all memory associated with this package. It does not destroy any packages
/// inside its table; the caller is responsible for calling destroy() on them.
pub fn destroy(pkg: *Package, gpa: *Allocator) void {
gpa.free(pkg.root_src_path);
// If root_src_directory.path is null then the handle is the cwd()
// which shouldn't be closed.
if (pkg.root_src_directory.path) |p| {
gpa.free(p);
pkg.root_src_directory.handle.close();
if (pkg.root_src_directory_owned) {
// If root_src_directory.path is null then the handle is the cwd()
// which shouldn't be closed.
if (pkg.root_src_directory.path) |p| {
gpa.free(p);
pkg.root_src_directory.handle.close();
}
}
{
var it = pkg.table.iterator();
while (it.next()) |kv| {
kv.value.destroy(gpa);
gpa.free(kv.key);
}
}
@ -72,3 +125,10 @@ pub fn add(pkg: *Package, gpa: *Allocator, name: []const u8, package: *Package)
const name_dupe = try mem.dupe(gpa, u8, name);
pkg.table.putAssumeCapacityNoClobber(name_dupe, package);
}
pub fn addAndAdopt(parent: *Package, gpa: *Allocator, name: []const u8, child: *Package) !void {
assert(child.parent == null); // make up your mind, who is the parent??
child.parent = parent;
child.namespace_hash = std.zig.hashName(parent.namespace_hash, ":", name);
return parent.add(gpa, name, child);
}

View File

@ -199,6 +199,7 @@ pub fn analyzeBody(
.fn_type_cc => try sema.zirFnTypeCc(block, inst, false),
.fn_type_cc_var_args => try sema.zirFnTypeCc(block, inst, true),
.fn_type_var_args => try sema.zirFnType(block, inst, true),
.has_decl => try sema.zirHasDecl(block, inst),
.import => try sema.zirImport(block, inst),
.indexable_ptr_len => try sema.zirIndexablePtrLen(block, inst),
.int => try sema.zirInt(block, inst),
@ -258,11 +259,14 @@ pub fn analyzeBody(
.switch_capture_multi_ref => try sema.zirSwitchCapture(block, inst, true, true),
.switch_capture_else => try sema.zirSwitchCaptureElse(block, inst, false),
.switch_capture_else_ref => try sema.zirSwitchCaptureElse(block, inst, true),
.type_info => try sema.zirTypeInfo(block, inst),
.typeof => try sema.zirTypeof(block, inst),
.typeof_elem => try sema.zirTypeofElem(block, inst),
.typeof_peer => try sema.zirTypeofPeer(block, inst),
.xor => try sema.zirBitwise(block, inst, .xor),
.struct_init_empty => try sema.zirStructInitEmpty(block, inst),
.struct_init => try sema.zirStructInit(block, inst),
.field_type => try sema.zirFieldType(block, inst),
.struct_decl => try sema.zirStructDecl(block, inst, .Auto),
.struct_decl_packed => try sema.zirStructDecl(block, inst, .Packed),
@ -342,6 +346,10 @@ pub fn analyzeBody(
try sema.zirValidateStructInitPtr(block, inst);
continue;
},
.@"export" => {
try sema.zirExport(block, inst);
continue;
},
// Special case instructions to handle comptime control flow.
.repeat_inline => {
@ -593,6 +601,10 @@ fn zirStructDecl(
const struct_obj = try new_decl_arena.allocator.create(Module.Struct);
const struct_ty = try Type.Tag.@"struct".create(&new_decl_arena.allocator, struct_obj);
const struct_val = try Value.Tag.ty.create(&new_decl_arena.allocator, struct_ty);
const new_decl = try sema.mod.createAnonymousDecl(&block.base, &new_decl_arena, .{
.ty = Type.initTag(.type),
.val = struct_val,
});
struct_obj.* = .{
.owner_decl = sema.owner_decl,
.fields = fields_map,
@ -600,12 +612,9 @@ fn zirStructDecl(
.container = .{
.ty = struct_ty,
.file_scope = block.getFileScope(),
.parent_name_hash = new_decl.fullyQualifiedNameHash(),
},
};
const new_decl = try sema.mod.createAnonymousDecl(&block.base, &new_decl_arena, .{
.ty = Type.initTag(.type),
.val = struct_val,
});
return sema.analyzeDeclVal(block, src, new_decl);
}
@ -1333,6 +1342,28 @@ fn analyzeBlockBody(
return &merges.block_inst.base;
}
fn zirExport(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!void {
const tracy = trace(@src());
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const extra = sema.code.extraData(zir.Inst.Bin, inst_data.payload_index).data;
const src = inst_data.src();
const lhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
const rhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
// TODO (see corresponding TODO in AstGen) this is supposed to be a `decl_ref`
// instruction, which could reference any decl, which is then supposed to get
// exported, regardless of whether or not it is a function.
const target_fn = try sema.resolveInstConst(block, lhs_src, extra.lhs);
// TODO (see corresponding TODO in AstGen) this is supposed to be
// `std.builtin.ExportOptions`, not a string.
const export_name = try sema.resolveConstString(block, rhs_src, extra.rhs);
const actual_fn = target_fn.val.castTag(.function).?.data;
try sema.mod.analyzeExport(&block.base, src, export_name, actual_fn.owner_decl);
}
fn zirBreakpoint(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!void {
const tracy = trace(@src());
defer tracy.end();
@ -1402,9 +1433,6 @@ fn zirDbgStmtNode(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerE
}
fn zirDeclRef(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
const decl = sema.owner_decl.dependencies.entries.items[inst_data.payload_index].key;
@ -1412,9 +1440,6 @@ fn zirDeclRef(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError
}
fn zirDeclVal(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
const decl = sema.owner_decl.dependencies.entries.items[inst_data.payload_index].key;
@ -2543,7 +2568,10 @@ fn zirElemVal(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError
const bin_inst = sema.code.instructions.items(.data)[inst].bin;
const array = try sema.resolveInst(bin_inst.lhs);
const array_ptr = try sema.analyzeRef(block, sema.src, array);
const array_ptr = if (array.ty.zigTypeTag() == .Pointer)
array
else
try sema.analyzeRef(block, sema.src, array);
const elem_index = try sema.resolveInst(bin_inst.rhs);
const result_ptr = try sema.elemPtr(block, sema.src, array_ptr, elem_index, sema.src);
return sema.analyzeLoad(block, sema.src, result_ptr, sema.src);
@ -2558,7 +2586,10 @@ fn zirElemValNode(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerE
const elem_index_src: LazySrcLoc = .{ .node_offset_array_access_index = inst_data.src_node };
const extra = sema.code.extraData(zir.Inst.Bin, inst_data.payload_index).data;
const array = try sema.resolveInst(extra.lhs);
const array_ptr = try sema.analyzeRef(block, src, array);
const array_ptr = if (array.ty.zigTypeTag() == .Pointer)
array
else
try sema.analyzeRef(block, src, array);
const elem_index = try sema.resolveInst(extra.rhs);
const result_ptr = try sema.elemPtr(block, src, array_ptr, elem_index, elem_index_src);
return sema.analyzeLoad(block, src, result_ptr, src);
@ -3595,6 +3626,34 @@ fn validateSwitchNoRange(
return sema.mod.failWithOwnedErrorMsg(&block.base, msg);
}
fn zirHasDecl(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const extra = sema.code.extraData(zir.Inst.Bin, inst_data.payload_index).data;
const src = inst_data.src();
const lhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
const rhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
const container_type = try sema.resolveType(block, lhs_src, extra.lhs);
const decl_name = try sema.resolveConstString(block, rhs_src, extra.rhs);
const mod = sema.mod;
const arena = sema.arena;
const container_scope = container_type.getContainerScope() orelse return mod.fail(
&block.base,
lhs_src,
"expected struct, enum, union, or opaque, found '{}'",
.{container_type},
);
if (mod.lookupDeclName(&container_scope.base, decl_name)) |decl| {
// TODO if !decl.is_pub and inDifferentFiles() return false
return mod.constBool(arena, src, true);
} else {
return mod.constBool(arena, src, false);
}
}
fn zirImport(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@ -4021,6 +4080,12 @@ fn zirCmp(
return block.addBinOp(src, bool_type, tag, casted_lhs, casted_rhs);
}
fn zirTypeInfo(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src = inst_data.src();
return sema.mod.fail(&block.base, src, "TODO: implement Sema.zirTypeInfo", .{});
}
fn zirTypeof(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src = inst_data.src();
@ -4438,6 +4503,18 @@ fn zirStructInitEmpty(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) In
});
}
fn zirStructInit(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
return sema.mod.fail(&block.base, src, "TODO: Sema.zirStructInit", .{});
}
fn zirFieldType(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
return sema.mod.fail(&block.base, src, "TODO: Sema.zirFieldType", .{});
}
fn requireFunctionBlock(sema: *Sema, block: *Scope.Block, src: LazySrcLoc) !void {
if (sema.func == null) {
return sema.mod.fail(&block.base, src, "instruction illegal outside function body", .{});
@ -4632,7 +4709,8 @@ fn namedFieldPtr(
.Struct, .Opaque, .Union => {
if (child_type.getContainerScope()) |container_scope| {
if (mod.lookupDeclName(&container_scope.base, field_name)) |decl| {
// TODO if !decl.is_pub and inDifferentFiles() "{} is private"
if (!decl.is_pub and !(decl.container.file_scope == block.base.namespace().file_scope))
return mod.fail(&block.base, src, "'{s}' is private", .{field_name});
return sema.analyzeDeclRef(block, src, decl);
}
@ -4660,7 +4738,8 @@ fn namedFieldPtr(
.Enum => {
if (child_type.getContainerScope()) |container_scope| {
if (mod.lookupDeclName(&container_scope.base, field_name)) |decl| {
// TODO if !decl.is_pub and inDifferentFiles() "{} is private"
if (!decl.is_pub and !(decl.container.file_scope == block.base.namespace().file_scope))
return mod.fail(&block.base, src, "'{s}' is private", .{field_name});
return sema.analyzeDeclRef(block, src, decl);
}
}
@ -4731,37 +4810,51 @@ fn elemPtr(
elem_index: *Inst,
elem_index_src: LazySrcLoc,
) InnerError!*Inst {
const elem_ty = switch (array_ptr.ty.zigTypeTag()) {
const array_ty = switch (array_ptr.ty.zigTypeTag()) {
.Pointer => array_ptr.ty.elemType(),
else => return sema.mod.fail(&block.base, array_ptr.src, "expected pointer, found '{}'", .{array_ptr.ty}),
};
if (!elem_ty.isIndexable()) {
return sema.mod.fail(&block.base, src, "array access of non-array type '{}'", .{elem_ty});
if (!array_ty.isIndexable()) {
return sema.mod.fail(&block.base, src, "array access of non-array type '{}'", .{array_ty});
}
if (elem_ty.isSinglePointer() and elem_ty.elemType().zigTypeTag() == .Array) {
if (array_ty.isSinglePointer() and array_ty.elemType().zigTypeTag() == .Array) {
// we have to deref the ptr operand to get the actual array pointer
const array_ptr_deref = try sema.analyzeLoad(block, src, array_ptr, array_ptr.src);
if (array_ptr_deref.value()) |array_ptr_val| {
if (elem_index.value()) |index_val| {
// Both array pointer and index are compile-time known.
const index_u64 = index_val.toUnsignedInt();
// @intCast here because it would have been impossible to construct a value that
// required a larger index.
const elem_ptr = try array_ptr_val.elemPtr(sema.arena, @intCast(usize, index_u64));
const pointee_type = elem_ty.elemType().elemType();
return sema.mod.constInst(sema.arena, src, .{
.ty = try Type.Tag.single_const_pointer.create(sema.arena, pointee_type),
.val = elem_ptr,
});
}
}
return sema.elemPtrArray(block, src, array_ptr_deref, elem_index, elem_index_src);
}
if (array_ty.zigTypeTag() == .Array) {
return sema.elemPtrArray(block, src, array_ptr, elem_index, elem_index_src);
}
return sema.mod.fail(&block.base, src, "TODO implement more analyze elemptr", .{});
}
fn elemPtrArray(
sema: *Sema,
block: *Scope.Block,
src: LazySrcLoc,
array_ptr: *Inst,
elem_index: *Inst,
elem_index_src: LazySrcLoc,
) InnerError!*Inst {
if (array_ptr.value()) |array_ptr_val| {
if (elem_index.value()) |index_val| {
// Both array pointer and index are compile-time known.
const index_u64 = index_val.toUnsignedInt();
// @intCast here because it would have been impossible to construct a value that
// required a larger index.
const elem_ptr = try array_ptr_val.elemPtr(sema.arena, @intCast(usize, index_u64));
const pointee_type = array_ptr.ty.elemType().elemType();
return sema.mod.constInst(sema.arena, src, .{
.ty = try Type.Tag.single_const_pointer.create(sema.arena, pointee_type),
.val = elem_ptr,
});
}
}
return sema.mod.fail(&block.base, src, "TODO implement more analyze elemptr for arrays", .{});
}
fn coerce(
sema: *Sema,
block: *Scope.Block,
@ -5244,9 +5337,9 @@ fn analyzeImport(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, target_strin
try std.fs.path.resolve(sema.gpa, &[_][]const u8{ cur_pkg_dir_path, target_string });
errdefer sema.gpa.free(resolved_path);
if (sema.mod.import_table.get(resolved_path)) |some| {
if (sema.mod.import_table.get(resolved_path)) |cached_import| {
sema.gpa.free(resolved_path);
return some;
return cached_import;
}
if (found_pkg == null) {
@ -5264,6 +5357,11 @@ fn analyzeImport(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, target_strin
const struct_ty = try Type.Tag.empty_struct.create(sema.gpa, &file_scope.root_container);
errdefer sema.gpa.destroy(struct_ty.castTag(.empty_struct).?);
const container_name_hash: Scope.NameHash = if (found_pkg) |pkg|
pkg.namespace_hash
else
std.zig.hashName(cur_pkg.namespace_hash, "/", resolved_path);
file_scope.* = .{
.sub_file_path = resolved_path,
.source = .{ .unloaded = {} },
@ -5274,6 +5372,7 @@ fn analyzeImport(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, target_strin
.file_scope = file_scope,
.decls = .{},
.ty = struct_ty,
.parent_name_hash = container_name_hash,
},
};
sema.mod.analyzeContainer(&file_scope.root_container) catch |err| switch (err) {

View File

@ -417,7 +417,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
const node_datas = tree.nodes.items(.data);
const token_starts = tree.tokens.items(.start);
const fn_decl = tree.rootDecls()[module_fn.owner_decl.src_index];
const fn_decl = module_fn.owner_decl.src_node;
assert(node_tags[fn_decl] == .fn_decl);
const block = node_datas[fn_decl].rhs;
const lbrace_src = token_starts[tree.firstToken(block)];

View File

@ -2228,10 +2228,9 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
const node_datas = tree.nodes.items(.data);
const token_starts = tree.tokens.items(.start);
const file_ast_decls = tree.rootDecls();
// TODO Look into improving the performance here by adding a token-index-to-line
// lookup table. Currently this involves scanning over the source code for newlines.
const fn_decl = file_ast_decls[decl.src_index];
const fn_decl = decl.src_node;
assert(node_tags[fn_decl] == .fn_decl);
const block = node_datas[fn_decl].rhs;
const lbrace = tree.firstToken(block);
@ -2755,10 +2754,9 @@ pub fn updateDeclLineNumber(self: *Elf, module: *Module, decl: *const Module.Dec
const node_datas = tree.nodes.items(.data);
const token_starts = tree.tokens.items(.start);
const file_ast_decls = tree.rootDecls();
// TODO Look into improving the performance here by adding a token-index-to-line
// lookup table. Currently this involves scanning over the source code for newlines.
const fn_decl = file_ast_decls[decl.src_index];
const fn_decl = decl.src_node;
assert(node_tags[fn_decl] == .fn_decl);
const block = node_datas[fn_decl].rhs;
const lbrace = tree.firstToken(block);

View File

@ -909,10 +909,9 @@ pub fn updateDeclLineNumber(self: *DebugSymbols, module: *Module, decl: *const M
const node_datas = tree.nodes.items(.data);
const token_starts = tree.tokens.items(.start);
const file_ast_decls = tree.rootDecls();
// TODO Look into improving the performance here by adding a token-index-to-line
// lookup table. Currently this involves scanning over the source code for newlines.
const fn_decl = file_ast_decls[decl.src_index];
const fn_decl = decl.src_node;
assert(node_tags[fn_decl] == .fn_decl);
const block = node_datas[fn_decl].rhs;
const lbrace = tree.firstToken(block);
@ -959,10 +958,9 @@ pub fn initDeclDebugBuffers(
const node_datas = tree.nodes.items(.data);
const token_starts = tree.tokens.items(.start);
const file_ast_decls = tree.rootDecls();
// TODO Look into improving the performance here by adding a token-index-to-line
// lookup table. Currently this involves scanning over the source code for newlines.
const fn_decl = file_ast_decls[decl.src_index];
const fn_decl = decl.src_node;
assert(node_tags[fn_decl] == .fn_decl);
const block = node_datas[fn_decl].rhs;
const lbrace = tree.firstToken(block);

View File

@ -505,7 +505,6 @@ fn buildOutputType(
var emit_bin: EmitBin = .yes_default_path;
var emit_asm: Emit = .no;
var emit_llvm_ir: Emit = .no;
var emit_zir: Emit = .no;
var emit_docs: Emit = .no;
var emit_analysis: Emit = .no;
var target_arch_os_abi: []const u8 = "native";
@ -599,15 +598,15 @@ fn buildOutputType(
var test_exec_args = std.ArrayList(?[]const u8).init(gpa);
defer test_exec_args.deinit();
const pkg_tree_root = try gpa.create(Package);
// This package only exists to clean up the code parsing --pkg-begin and
// --pkg-end flags. Use dummy values that are safe for the destroy call.
pkg_tree_root.* = .{
var pkg_tree_root: Package = .{
.root_src_directory = .{ .path = null, .handle = fs.cwd() },
.root_src_path = &[0]u8{},
.namespace_hash = Package.root_namespace_hash,
};
defer pkg_tree_root.destroy(gpa);
var cur_pkg: *Package = pkg_tree_root;
defer freePkgTree(gpa, &pkg_tree_root, false);
var cur_pkg: *Package = &pkg_tree_root;
switch (arg_mode) {
.build, .translate_c, .zig_test, .run => {
@ -658,8 +657,7 @@ fn buildOutputType(
) catch |err| {
fatal("Failed to add package at path {s}: {s}", .{ pkg_path, @errorName(err) });
};
new_cur_pkg.parent = cur_pkg;
try cur_pkg.add(gpa, pkg_name, new_cur_pkg);
try cur_pkg.addAndAdopt(gpa, pkg_name, new_cur_pkg);
cur_pkg = new_cur_pkg;
} else if (mem.eql(u8, arg, "--pkg-end")) {
cur_pkg = cur_pkg.parent orelse
@ -924,12 +922,6 @@ fn buildOutputType(
emit_bin = .{ .yes = arg["-femit-bin=".len..] };
} else if (mem.eql(u8, arg, "-fno-emit-bin")) {
emit_bin = .no;
} else if (mem.eql(u8, arg, "-femit-zir")) {
emit_zir = .yes_default_path;
} else if (mem.startsWith(u8, arg, "-femit-zir=")) {
emit_zir = .{ .yes = arg["-femit-zir=".len..] };
} else if (mem.eql(u8, arg, "-fno-emit-zir")) {
emit_zir = .no;
} else if (mem.eql(u8, arg, "-femit-h")) {
emit_h = .yes_default_path;
} else if (mem.startsWith(u8, arg, "-femit-h=")) {
@ -1026,7 +1018,7 @@ fn buildOutputType(
.extra_flags = try arena.dupe([]const u8, extra_cflags.items),
});
},
.zig, .zir => {
.zig => {
if (root_src_file) |other| {
fatal("found another zig file '{s}' after root source file '{s}'", .{ arg, other });
} else {
@ -1087,7 +1079,7 @@ fn buildOutputType(
.unknown, .shared_library, .object, .static_library => {
try link_objects.append(it.only_arg);
},
.zig, .zir => {
.zig => {
if (root_src_file) |other| {
fatal("found another zig file '{s}' after root source file '{s}'", .{ it.only_arg, other });
} else {
@ -1725,13 +1717,6 @@ fn buildOutputType(
var emit_docs_resolved = try emit_docs.resolve("docs");
defer emit_docs_resolved.deinit();
switch (emit_zir) {
.no => {},
.yes_default_path, .yes => {
fatal("The -femit-zir implementation has been intentionally deleted so that it can be rewritten as a proper backend.", .{});
},
}
const root_pkg: ?*Package = if (root_src_file) |src_path| blk: {
if (main_pkg_path) |p| {
const rel_src_path = try fs.path.relative(gpa, p, src_path);
@ -1747,6 +1732,7 @@ fn buildOutputType(
if (root_pkg) |pkg| {
pkg.table = pkg_tree_root.table;
pkg_tree_root.table = .{};
pkg.namespace_hash = pkg_tree_root.namespace_hash;
}
const self_exe_path = try fs.selfExePathAlloc(arena);
@ -2155,6 +2141,18 @@ fn updateModule(gpa: *Allocator, comp: *Compilation, hook: AfterUpdateHook) !voi
}
}
fn freePkgTree(gpa: *Allocator, pkg: *Package, free_parent: bool) void {
{
var it = pkg.table.iterator();
while (it.next()) |kv| {
freePkgTree(gpa, kv.value, true);
}
}
if (free_parent) {
pkg.destroy(gpa);
}
}
fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !void {
if (!build_options.have_llvm)
fatal("cannot translate-c: compiler built without LLVM extensions", .{});
@ -2509,6 +2507,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
.handle = try zig_lib_directory.handle.openDir(std_special, .{}),
},
.root_src_path = "build_runner.zig",
.namespace_hash = Package.root_namespace_hash,
};
defer root_pkg.root_src_directory.handle.close();
@ -2554,8 +2553,9 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
var build_pkg: Package = .{
.root_src_directory = build_directory,
.root_src_path = build_zig_basename,
.namespace_hash = undefined,
};
try root_pkg.table.put(arena, "@build", &build_pkg);
try root_pkg.addAndAdopt(arena, "@build", &build_pkg);
var global_cache_directory: Compilation.Directory = l: {
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);

View File

@ -9137,6 +9137,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
buf_appendf(contents, "pub const position_independent_executable = %s;\n", bool_to_str(g->have_pie));
buf_appendf(contents, "pub const strip_debug_info = %s;\n", bool_to_str(g->strip_debug_symbols));
buf_appendf(contents, "pub const code_model = CodeModel.default;\n");
buf_appendf(contents, "pub const zig_is_stage2 = false;\n");
{
TargetSubsystem detected_subsystem = detect_subsystem(g);

View File

@ -122,11 +122,6 @@ pub const TestContext = struct {
path: []const u8,
};
pub const Extension = enum {
Zig,
ZIR,
};
/// A `Case` consists of a list of `Update`. The same `Compilation` is used for each
/// update, so each update's source is treated as a single file being
/// updated by the test harness and incrementally compiled.
@ -141,7 +136,6 @@ pub const TestContext = struct {
/// to Executable.
output_mode: std.builtin.OutputMode,
updates: std.ArrayList(Update),
extension: Extension,
object_format: ?std.builtin.ObjectFormat = null,
emit_h: bool = false,
llvm_backend: bool = false,
@ -238,14 +232,12 @@ pub const TestContext = struct {
ctx: *TestContext,
name: []const u8,
target: CrossTarget,
extension: Extension,
) *Case {
ctx.cases.append(Case{
.name = name,
.target = target,
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.extension = extension,
.files = std.ArrayList(File).init(ctx.cases.allocator),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
@ -253,7 +245,7 @@ pub const TestContext = struct {
/// Adds a test case for Zig input, producing an executable
pub fn exe(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
return ctx.addExe(name, target, .Zig);
return ctx.addExe(name, target);
}
/// Adds a test case for ZIR input, producing an executable
@ -269,7 +261,6 @@ pub const TestContext = struct {
.target = target,
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.extension = .Zig,
.object_format = .c,
.files = std.ArrayList(File).init(ctx.cases.allocator),
}) catch @panic("out of memory");
@ -284,7 +275,6 @@ pub const TestContext = struct {
.target = target,
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.extension = .Zig,
.files = std.ArrayList(File).init(ctx.cases.allocator),
.llvm_backend = true,
}) catch @panic("out of memory");
@ -295,14 +285,12 @@ pub const TestContext = struct {
ctx: *TestContext,
name: []const u8,
target: CrossTarget,
extension: Extension,
) *Case {
ctx.cases.append(Case{
.name = name,
.target = target,
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Obj,
.extension = extension,
.files = std.ArrayList(File).init(ctx.cases.allocator),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
@ -310,7 +298,7 @@ pub const TestContext = struct {
/// Adds a test case for Zig input, producing an object file.
pub fn obj(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
return ctx.addObj(name, target, .Zig);
return ctx.addObj(name, target);
}
/// Adds a test case for ZIR input, producing an object file.
@ -319,13 +307,12 @@ pub const TestContext = struct {
}
/// Adds a test case for Zig or ZIR input, producing C code.
pub fn addC(ctx: *TestContext, name: []const u8, target: CrossTarget, ext: Extension) *Case {
pub fn addC(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
ctx.cases.append(Case{
.name = name,
.target = target,
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Obj,
.extension = ext,
.object_format = .c,
.files = std.ArrayList(File).init(ctx.cases.allocator),
}) catch @panic("out of memory");
@ -333,21 +320,20 @@ pub const TestContext = struct {
}
pub fn c(ctx: *TestContext, name: []const u8, target: CrossTarget, src: [:0]const u8, comptime out: [:0]const u8) void {
ctx.addC(name, target, .Zig).addCompareObjectFile(src, zig_h ++ out);
ctx.addC(name, target).addCompareObjectFile(src, zig_h ++ out);
}
pub fn h(ctx: *TestContext, name: []const u8, target: CrossTarget, src: [:0]const u8, comptime out: [:0]const u8) void {
ctx.addC(name, target, .Zig).addHeader(src, zig_h ++ out);
ctx.addC(name, target).addHeader(src, zig_h ++ out);
}
pub fn addCompareOutput(
ctx: *TestContext,
name: []const u8,
extension: Extension,
src: [:0]const u8,
expected_stdout: []const u8,
) void {
ctx.addExe(name, .{}, extension).addCompareOutput(src, expected_stdout);
ctx.addExe(name, .{}).addCompareOutput(src, expected_stdout);
}
/// Adds a test case that compiles the Zig source given in `src`, executes
@ -358,7 +344,7 @@ pub const TestContext = struct {
src: [:0]const u8,
expected_stdout: []const u8,
) void {
return ctx.addCompareOutput(name, .Zig, src, expected_stdout);
return ctx.addCompareOutput(name, src, expected_stdout);
}
/// Adds a test case that compiles the ZIR source given in `src`, executes
@ -376,11 +362,10 @@ pub const TestContext = struct {
ctx: *TestContext,
name: []const u8,
target: CrossTarget,
extension: Extension,
src: [:0]const u8,
result: [:0]const u8,
) void {
ctx.addObj(name, target, extension).addTransform(src, result);
ctx.addObj(name, target).addTransform(src, result);
}
/// Adds a test case that compiles the Zig given in `src` to ZIR and tests
@ -392,7 +377,7 @@ pub const TestContext = struct {
src: [:0]const u8,
result: [:0]const u8,
) void {
ctx.addTransform(name, target, .Zig, src, result);
ctx.addTransform(name, target, src, result);
}
/// Adds a test case that cleans up the ZIR source given in `src`, and
@ -411,11 +396,10 @@ pub const TestContext = struct {
ctx: *TestContext,
name: []const u8,
target: CrossTarget,
extension: Extension,
src: [:0]const u8,
expected_errors: []const []const u8,
) void {
ctx.addObj(name, target, extension).addError(src, expected_errors);
ctx.addObj(name, target).addError(src, expected_errors);
}
/// Adds a test case that ensures that the Zig given in `src` fails to
@ -428,7 +412,7 @@ pub const TestContext = struct {
src: [:0]const u8,
expected_errors: []const []const u8,
) void {
ctx.addError(name, target, .Zig, src, expected_errors);
ctx.addError(name, target, src, expected_errors);
}
/// Adds a test case that ensures that the ZIR given in `src` fails to
@ -448,10 +432,9 @@ pub const TestContext = struct {
ctx: *TestContext,
name: []const u8,
target: CrossTarget,
extension: Extension,
src: [:0]const u8,
) void {
ctx.addObj(name, target, extension).compiles(src);
ctx.addObj(name, target).compiles(src);
}
/// Adds a test case that asserts that the Zig given in `src` compiles
@ -462,7 +445,7 @@ pub const TestContext = struct {
target: CrossTarget,
src: [:0]const u8,
) void {
ctx.addCompiles(name, target, .Zig, src);
ctx.addCompiles(name, target, src);
}
/// Adds a test case that asserts that the ZIR given in `src` compiles
@ -489,7 +472,7 @@ pub const TestContext = struct {
expected_errors: []const []const u8,
fixed_src: [:0]const u8,
) void {
var case = ctx.addObj(name, target, .Zig);
var case = ctx.addObj(name, target);
case.addError(src, expected_errors);
case.compiles(fixed_src);
}
@ -614,15 +597,14 @@ pub const TestContext = struct {
.path = try std.fs.path.join(arena, &[_][]const u8{ tmp_dir_path, "zig-cache" }),
};
const tmp_src_path = switch (case.extension) {
.Zig => "test_case.zig",
.ZIR => "test_case.zir",
};
const tmp_src_path = "test_case.zig";
var root_pkg: Package = .{
.root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir },
.root_src_path = tmp_src_path,
.namespace_hash = Package.root_namespace_hash,
};
defer root_pkg.table.deinit(allocator);
const bin_name = try std.zig.binNameAlloc(arena, .{
.root_name = "test_case",
@ -639,13 +621,10 @@ pub const TestContext = struct {
.directory = emit_directory,
.basename = bin_name,
};
const emit_h: ?Compilation.EmitLoc = if (case.emit_h)
.{
.directory = emit_directory,
.basename = "test_case.h",
}
else
null;
const emit_h: ?Compilation.EmitLoc = if (case.emit_h) .{
.directory = emit_directory,
.basename = "test_case.h",
} else null;
const comp = try Compilation.create(allocator, .{
.local_cache_directory = zig_cache_directory,
.global_cache_directory = global_cache_directory,

View File

@ -328,6 +328,9 @@ pub const Inst = struct {
error_union_type,
/// `error.Foo` syntax. Uses the `str_tok` field of the Data union.
error_value,
/// Implements the `@export` builtin function.
/// Uses the `pl_node` union field. Payload is `Bin`.
@"export",
/// Given a pointer to a struct or object that contains virtual fields, returns a pointer
/// to the named field. The field name is stored in string_bytes. Used by a.b syntax.
/// Uses `pl_node` field. The AST node is the a.b syntax. Payload is Field.
@ -360,6 +363,9 @@ pub const Inst = struct {
fn_type_cc,
/// Same as `fn_type_cc` but the function is variadic.
fn_type_cc_var_args,
/// Implements the `@hasDecl` builtin.
/// Uses the `pl_node` union field. Payload is `Bin`.
has_decl,
/// `@import(operand)`.
/// Uses the `un_node` field.
import,
@ -668,12 +674,21 @@ pub const Inst = struct {
/// A struct literal with a specified type, with no fields.
/// Uses the `un_node` field.
struct_init_empty,
/// Given a struct, union, enum, or opaque and a field name, returns the field type.
/// Uses the `pl_node` field. Payload is `FieldType`.
field_type,
/// Finalizes a typed struct initialization, performs validation, and returns the
/// struct value.
/// Uses the `pl_node` field. Payload is `StructInit`.
struct_init,
/// Converts an integer into an enum value.
/// Uses `pl_node` with payload `Bin`. `lhs` is enum type, `rhs` is operand.
int_to_enum,
/// Converts an enum value into an integer. Resulting type will be the tag type
/// of the enum. Uses `un_node`.
enum_to_int,
/// Implements the `@typeInfo` builtin. Uses `un_node`.
type_info,
/// Returns whether the instruction is one of the control flow "noreturn" types.
/// Function calls do not count.
@ -737,6 +752,7 @@ pub const Inst = struct {
.elem_val_node,
.ensure_result_used,
.ensure_result_non_error,
.@"export",
.floatcast,
.field_ptr,
.field_val,
@ -746,6 +762,7 @@ pub const Inst = struct {
.fn_type_var_args,
.fn_type_cc,
.fn_type_cc_var_args,
.has_decl,
.int,
.float,
.float128,
@ -831,8 +848,11 @@ pub const Inst = struct {
.switch_block_ref_under_multi,
.validate_struct_init_ptr,
.struct_init_empty,
.struct_init,
.field_type,
.int_to_enum,
.enum_to_int,
.type_info,
=> false,
.@"break",
@ -1543,6 +1563,24 @@ pub const Inst = struct {
return @bitCast(f128, int_bits);
}
};
/// Trailing is an item per field.
pub const StructInit = struct {
fields_len: u32,
pub const Item = struct {
/// The `field_type` ZIR instruction for this field init.
field_type: Index,
/// The field init expression to be used as the field value.
init: Ref,
};
};
pub const FieldType = struct {
container_type: Ref,
/// Offset into `string_bytes`, null terminated.
name_start: u32,
};
};
pub const SpecialProng = enum { none, @"else", under };
@ -1617,6 +1655,7 @@ const Writer = struct {
.typeof_elem,
.struct_init_empty,
.enum_to_int,
.type_info,
=> try self.writeUnNode(stream, inst),
.ref,
@ -1657,6 +1696,8 @@ const Writer = struct {
.union_decl,
.enum_decl,
.enum_decl_nonexhaustive,
.struct_init,
.field_type,
=> try self.writePlNode(stream, inst),
.add,
@ -1676,12 +1717,14 @@ const Writer = struct {
.cmp_gt,
.cmp_neq,
.div,
.has_decl,
.mod_rem,
.shl,
.shr,
.xor,
.store_node,
.error_union_type,
.@"export",
.merge_error_sets,
.bit_and,
.bit_or,

View File

@ -282,10 +282,10 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
\\source.zig:10:8: [address] in main (test)
\\ foo();
\\ ^
\\start.zig:342:29: [address] in std.start.posixCallMainAndExit (test)
\\start.zig:404:29: [address] in std.start.posixCallMainAndExit (test)
\\ return root.main();
\\ ^
\\start.zig:163:5: [address] in std.start._start (test)
\\start.zig:225:5: [address] in std.start._start (test)
\\ @call(.{ .modifier = .never_inline }, posixCallMainAndExit, .{});
\\ ^
\\
@ -294,7 +294,7 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
switch (std.Target.current.cpu.arch) {
.aarch64 => "", // TODO disabled; results in segfault
else =>
\\start.zig:163:5: [address] in std.start._start (test)
\\start.zig:225:5: [address] in std.start._start (test)
\\ @call(.{ .modifier = .never_inline }, posixCallMainAndExit, .{});
\\ ^
\\

View File

@ -941,6 +941,32 @@ pub fn addCases(ctx: *TestContext) !void {
"",
);
// Array access to a global array.
case.addCompareOutput(
\\const hello = "hello".*;
\\export fn _start() noreturn {
\\ assert(hello[1] == 'e');
\\
\\ exit();
\\}
\\
\\pub fn assert(ok: bool) void {
\\ if (!ok) unreachable; // assertion failure
\\}
\\
\\fn exit() noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (0)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
// 64bit set stack
case.addCompareOutput(
\\export fn _start() noreturn {
@ -1022,7 +1048,7 @@ pub fn addCases(ctx: *TestContext) !void {
"Hello, World!\n",
);
try case.files.append(.{
.src =
.src =
\\pub fn print() void {
\\ asm volatile ("syscall"
\\ :
@ -1038,11 +1064,61 @@ pub fn addCases(ctx: *TestContext) !void {
.path = "print.zig",
});
}
{
var case = ctx.exe("import private", linux_x64);
case.addError(
\\export fn _start() noreturn {
\\ @import("print.zig").print();
\\ exit();
\\}
\\
\\fn exit() noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (@as(usize, 0))
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
&.{":2:25: error: 'print' is private"},
);
try case.files.append(.{
.src =
\\fn print() void {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (@as(usize, 1)),
\\ [arg1] "{rdi}" (@as(usize, 1)),
\\ [arg2] "{rsi}" (@ptrToInt("Hello, World!\n")),
\\ [arg3] "{rdx}" (@as(usize, 14))
\\ : "rcx", "r11", "memory"
\\ );
\\ return;
\\}
,
.path = "print.zig",
});
}
ctx.compileError("function redefinition", linux_x64,
\\// dummy comment
\\fn entry() void {}
\\fn entry() void {}
, &[_][]const u8{":2:4: error: redefinition of 'entry'"});
, &[_][]const u8{
":3:4: error: redefinition of 'entry'",
":2:1: note: previous definition here",
});
ctx.compileError("global variable redefinition", linux_x64,
\\// dummy comment
\\var foo = false;
\\var foo = true;
, &[_][]const u8{
":3:5: error: redefinition of 'foo'",
":2:1: note: previous definition here",
});
ctx.compileError("compileError", linux_x64,
\\export fn _start() noreturn {