mirror of
https://github.com/ziglang/zig.git
synced 2026-01-20 22:35:24 +00:00
stage2: improvements towards zig test
* There is now a main_pkg in addition to root_pkg. They are usually the
same. When using `zig test`, main_pkg is the user's source file and
root_pkg has the test runner.
* scanDecl no longer looks for test decls outside the package being
tested. honoring `--test-filter` is still TODO.
* test runner main function has a void return value rather than
`anyerror!void`
* Sema is improved to generate better AIR for for loops on slices.
* Sema: fix incorrect capacity calculation in zirBoolBr
* Sema: add compile errors for trying to use slice fields as an lvalue.
* Sema: fix type coercion for error unions
* Sema: fix analyzeVarRef generating garbage AIR
* C codegen: fix renderValue for error unions with 0 bit payload
* C codegen: implement function pointer calls
* CLI: fix usage text
Adds 4 new AIR instructions:
* slice_len, slice_ptr: to get the ptr and len fields of a slice.
* slice_elem_val, ptr_slice_elem_val: to get the element value of
a slice, and a pointer to a slice.
AstGen gains a new functionality:
* One of the unused flags of struct decls is now used to indicate
structs that are known to have non-zero size based on the AST alone.
This commit is contained in:
parent
f9798108f8
commit
7b8cb881df
@ -21,9 +21,9 @@ fn processArgs() void {
|
||||
std.testing.zig_exe_path = args[1];
|
||||
}
|
||||
|
||||
pub fn main() anyerror!void {
|
||||
pub fn main() void {
|
||||
if (builtin.zig_is_stage2) {
|
||||
return main2();
|
||||
return main2() catch @panic("test failure");
|
||||
}
|
||||
processArgs();
|
||||
const test_fn_list = builtin.test_functions;
|
||||
|
||||
30
src/Air.zig
30
src/Air.zig
@ -247,6 +247,21 @@ pub const Inst = struct {
|
||||
/// Given a pointer to a struct and a field index, returns a pointer to the field.
|
||||
/// Uses the `ty_pl` field, payload is `StructField`.
|
||||
struct_field_ptr,
|
||||
/// Given a slice value, return the length.
|
||||
/// Result type is always usize.
|
||||
/// Uses the `ty_op` field.
|
||||
slice_len,
|
||||
/// Given a slice value, return the pointer.
|
||||
/// Uses the `ty_op` field.
|
||||
slice_ptr,
|
||||
/// Given a slice value, and element index, return the element value at that index.
|
||||
/// Result type is the element type of the slice operand.
|
||||
/// Uses the `bin_op` field.
|
||||
slice_elem_val,
|
||||
/// Given a pointer to a slice, and element index, return the element value at that index.
|
||||
/// Result type is the element type of the slice operand (2 element type operations).
|
||||
/// Uses the `bin_op` field.
|
||||
ptr_slice_elem_val,
|
||||
|
||||
pub fn fromCmpOp(op: std.math.CompareOperator) Tag {
|
||||
return switch (op) {
|
||||
@ -450,6 +465,7 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
|
||||
.unwrap_errunion_err_ptr,
|
||||
.wrap_errunion_payload,
|
||||
.wrap_errunion_err,
|
||||
.slice_ptr,
|
||||
=> return air.getRefType(datas[inst].ty_op.ty),
|
||||
|
||||
.loop,
|
||||
@ -465,12 +481,24 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
|
||||
.store,
|
||||
=> return Type.initTag(.void),
|
||||
|
||||
.ptrtoint => return Type.initTag(.usize),
|
||||
.ptrtoint,
|
||||
.slice_len,
|
||||
=> return Type.initTag(.usize),
|
||||
|
||||
.call => {
|
||||
const callee_ty = air.typeOf(datas[inst].pl_op.operand);
|
||||
return callee_ty.fnReturnType();
|
||||
},
|
||||
|
||||
.slice_elem_val => {
|
||||
const slice_ty = air.typeOf(datas[inst].bin_op.lhs);
|
||||
return slice_ty.elemType();
|
||||
},
|
||||
.ptr_slice_elem_val => {
|
||||
const ptr_slice_ty = air.typeOf(datas[inst].bin_op.lhs);
|
||||
const slice_ty = ptr_slice_ty.elemType();
|
||||
return slice_ty.elemType();
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
190
src/AstGen.zig
190
src/AstGen.zig
@ -3470,6 +3470,7 @@ fn structDeclInner(
|
||||
.fields_len = 0,
|
||||
.body_len = 0,
|
||||
.decls_len = 0,
|
||||
.known_has_bits = false,
|
||||
});
|
||||
return indexToRef(decl_inst);
|
||||
}
|
||||
@ -3510,6 +3511,7 @@ fn structDeclInner(
|
||||
var bit_bag = ArrayListUnmanaged(u32){};
|
||||
defer bit_bag.deinit(gpa);
|
||||
|
||||
var known_has_bits = false;
|
||||
var cur_bit_bag: u32 = 0;
|
||||
var field_index: usize = 0;
|
||||
for (container_decl.ast.members) |member_node| {
|
||||
@ -3657,6 +3659,8 @@ fn structDeclInner(
|
||||
try typeExpr(&block_scope, &block_scope.base, member.ast.type_expr);
|
||||
fields_data.appendAssumeCapacity(@enumToInt(field_type));
|
||||
|
||||
known_has_bits = known_has_bits or nodeImpliesRuntimeBits(tree, member.ast.type_expr);
|
||||
|
||||
const have_align = member.ast.align_expr != 0;
|
||||
const have_value = member.ast.value_expr != 0;
|
||||
const is_comptime = member.comptime_token != null;
|
||||
@ -3706,6 +3710,7 @@ fn structDeclInner(
|
||||
.body_len = @intCast(u32, block_scope.instructions.items.len),
|
||||
.fields_len = @intCast(u32, field_index),
|
||||
.decls_len = @intCast(u32, wip_decls.decl_index),
|
||||
.known_has_bits = known_has_bits,
|
||||
});
|
||||
|
||||
try astgen.extra.ensureUnusedCapacity(gpa, bit_bag.items.len +
|
||||
@ -8150,6 +8155,189 @@ fn nodeMayEvalToError(tree: *const ast.Tree, start_node: ast.Node.Index) enum {
|
||||
}
|
||||
}
|
||||
|
||||
fn nodeImpliesRuntimeBits(tree: *const ast.Tree, start_node: ast.Node.Index) bool {
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
|
||||
var node = start_node;
|
||||
while (true) {
|
||||
switch (node_tags[node]) {
|
||||
.root,
|
||||
.@"usingnamespace",
|
||||
.test_decl,
|
||||
.switch_case,
|
||||
.switch_case_one,
|
||||
.container_field_init,
|
||||
.container_field_align,
|
||||
.container_field,
|
||||
.asm_output,
|
||||
.asm_input,
|
||||
.global_var_decl,
|
||||
.local_var_decl,
|
||||
.simple_var_decl,
|
||||
.aligned_var_decl,
|
||||
=> unreachable,
|
||||
|
||||
.@"return",
|
||||
.@"break",
|
||||
.@"continue",
|
||||
.bit_not,
|
||||
.bool_not,
|
||||
.@"defer",
|
||||
.@"errdefer",
|
||||
.address_of,
|
||||
.negation,
|
||||
.negation_wrap,
|
||||
.@"resume",
|
||||
.array_type,
|
||||
.@"suspend",
|
||||
.@"anytype",
|
||||
.fn_decl,
|
||||
.anyframe_literal,
|
||||
.integer_literal,
|
||||
.float_literal,
|
||||
.enum_literal,
|
||||
.string_literal,
|
||||
.multiline_string_literal,
|
||||
.char_literal,
|
||||
.true_literal,
|
||||
.false_literal,
|
||||
.null_literal,
|
||||
.undefined_literal,
|
||||
.unreachable_literal,
|
||||
.identifier,
|
||||
.error_set_decl,
|
||||
.container_decl,
|
||||
.container_decl_trailing,
|
||||
.container_decl_two,
|
||||
.container_decl_two_trailing,
|
||||
.container_decl_arg,
|
||||
.container_decl_arg_trailing,
|
||||
.tagged_union,
|
||||
.tagged_union_trailing,
|
||||
.tagged_union_two,
|
||||
.tagged_union_two_trailing,
|
||||
.tagged_union_enum_tag,
|
||||
.tagged_union_enum_tag_trailing,
|
||||
.@"asm",
|
||||
.asm_simple,
|
||||
.add,
|
||||
.add_wrap,
|
||||
.array_cat,
|
||||
.array_mult,
|
||||
.assign,
|
||||
.assign_bit_and,
|
||||
.assign_bit_or,
|
||||
.assign_bit_shift_left,
|
||||
.assign_bit_shift_right,
|
||||
.assign_bit_xor,
|
||||
.assign_div,
|
||||
.assign_sub,
|
||||
.assign_sub_wrap,
|
||||
.assign_mod,
|
||||
.assign_add,
|
||||
.assign_add_wrap,
|
||||
.assign_mul,
|
||||
.assign_mul_wrap,
|
||||
.bang_equal,
|
||||
.bit_and,
|
||||
.bit_or,
|
||||
.bit_shift_left,
|
||||
.bit_shift_right,
|
||||
.bit_xor,
|
||||
.bool_and,
|
||||
.bool_or,
|
||||
.div,
|
||||
.equal_equal,
|
||||
.error_union,
|
||||
.greater_or_equal,
|
||||
.greater_than,
|
||||
.less_or_equal,
|
||||
.less_than,
|
||||
.merge_error_sets,
|
||||
.mod,
|
||||
.mul,
|
||||
.mul_wrap,
|
||||
.switch_range,
|
||||
.field_access,
|
||||
.sub,
|
||||
.sub_wrap,
|
||||
.slice,
|
||||
.slice_open,
|
||||
.slice_sentinel,
|
||||
.deref,
|
||||
.array_access,
|
||||
.error_value,
|
||||
.while_simple,
|
||||
.while_cont,
|
||||
.for_simple,
|
||||
.if_simple,
|
||||
.@"catch",
|
||||
.@"orelse",
|
||||
.array_init_one,
|
||||
.array_init_one_comma,
|
||||
.array_init_dot_two,
|
||||
.array_init_dot_two_comma,
|
||||
.array_init_dot,
|
||||
.array_init_dot_comma,
|
||||
.array_init,
|
||||
.array_init_comma,
|
||||
.struct_init_one,
|
||||
.struct_init_one_comma,
|
||||
.struct_init_dot_two,
|
||||
.struct_init_dot_two_comma,
|
||||
.struct_init_dot,
|
||||
.struct_init_dot_comma,
|
||||
.struct_init,
|
||||
.struct_init_comma,
|
||||
.@"while",
|
||||
.@"if",
|
||||
.@"for",
|
||||
.@"switch",
|
||||
.switch_comma,
|
||||
.call_one,
|
||||
.call_one_comma,
|
||||
.async_call_one,
|
||||
.async_call_one_comma,
|
||||
.call,
|
||||
.call_comma,
|
||||
.async_call,
|
||||
.async_call_comma,
|
||||
.block_two,
|
||||
.block_two_semicolon,
|
||||
.block,
|
||||
.block_semicolon,
|
||||
.builtin_call,
|
||||
.builtin_call_comma,
|
||||
.builtin_call_two,
|
||||
.builtin_call_two_comma,
|
||||
=> return false,
|
||||
|
||||
// Forward the question to the LHS sub-expression.
|
||||
.grouped_expression,
|
||||
.@"try",
|
||||
.@"await",
|
||||
.@"comptime",
|
||||
.@"nosuspend",
|
||||
.unwrap_optional,
|
||||
=> node = node_datas[node].lhs,
|
||||
|
||||
.fn_proto_simple,
|
||||
.fn_proto_multi,
|
||||
.fn_proto_one,
|
||||
.fn_proto,
|
||||
.ptr_type_aligned,
|
||||
.ptr_type_sentinel,
|
||||
.ptr_type,
|
||||
.ptr_type_bit_range,
|
||||
.optional_type,
|
||||
.anyframe_type,
|
||||
.array_type_sentinel,
|
||||
=> return true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Applies `rl` semantics to `inst`. Expressions which do not do their own handling of
|
||||
/// result locations must call this function on their result.
|
||||
/// As an example, if the `ResultLoc` is `ptr`, it will write the result to the pointer.
|
||||
@ -9556,6 +9744,7 @@ const GenZir = struct {
|
||||
fields_len: u32,
|
||||
decls_len: u32,
|
||||
layout: std.builtin.TypeInfo.ContainerLayout,
|
||||
known_has_bits: bool,
|
||||
}) !void {
|
||||
const astgen = gz.astgen;
|
||||
const gpa = astgen.gpa;
|
||||
@ -9585,6 +9774,7 @@ const GenZir = struct {
|
||||
.has_body_len = args.body_len != 0,
|
||||
.has_fields_len = args.fields_len != 0,
|
||||
.has_decls_len = args.decls_len != 0,
|
||||
.known_has_bits = args.known_has_bits,
|
||||
.name_strategy = gz.anon_name_strategy,
|
||||
.layout = args.layout,
|
||||
}),
|
||||
|
||||
@ -622,7 +622,7 @@ pub const InitOptions = struct {
|
||||
global_cache_directory: Directory,
|
||||
target: Target,
|
||||
root_name: []const u8,
|
||||
root_pkg: ?*Package,
|
||||
main_pkg: ?*Package,
|
||||
output_mode: std.builtin.OutputMode,
|
||||
thread_pool: *ThreadPool,
|
||||
dynamic_linker: ?[]const u8 = null,
|
||||
@ -826,7 +826,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
|
||||
const ofmt = options.object_format orelse options.target.getObjectFormat();
|
||||
|
||||
const use_stage1 = options.use_stage1 orelse blk: {
|
||||
// Even though we may have no Zig code to compile (depending on `options.root_pkg`),
|
||||
// Even though we may have no Zig code to compile (depending on `options.main_pkg`),
|
||||
// we may need to use stage1 for building compiler-rt and other dependencies.
|
||||
|
||||
if (build_options.omit_stage2)
|
||||
@ -846,7 +846,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
|
||||
break :blk explicit;
|
||||
|
||||
// If we have no zig code to compile, no need for LLVM.
|
||||
if (options.root_pkg == null)
|
||||
if (options.main_pkg == null)
|
||||
break :blk false;
|
||||
|
||||
// If we are outputting .c code we must use Zig backend.
|
||||
@ -929,7 +929,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
|
||||
if (use_llvm) {
|
||||
// If stage1 generates an object file, self-hosted linker is not
|
||||
// yet sophisticated enough to handle that.
|
||||
break :blk options.root_pkg != null;
|
||||
break :blk options.main_pkg != null;
|
||||
}
|
||||
|
||||
break :blk false;
|
||||
@ -1159,7 +1159,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
|
||||
if (options.target.os.tag == .wasi) cache.hash.add(wasi_exec_model);
|
||||
// TODO audit this and make sure everything is in it
|
||||
|
||||
const module: ?*Module = if (options.root_pkg) |root_pkg| blk: {
|
||||
const module: ?*Module = if (options.main_pkg) |main_pkg| blk: {
|
||||
// Options that are specific to zig source files, that cannot be
|
||||
// modified between incremental updates.
|
||||
var hash = cache.hash;
|
||||
@ -1169,13 +1169,13 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
|
||||
// incremental compilation will handle it, but we do want to namespace different
|
||||
// source file names because they are likely different compilations and therefore this
|
||||
// would be likely to cause cache hits.
|
||||
hash.addBytes(root_pkg.root_src_path);
|
||||
hash.addOptionalBytes(root_pkg.root_src_directory.path);
|
||||
hash.addBytes(main_pkg.root_src_path);
|
||||
hash.addOptionalBytes(main_pkg.root_src_directory.path);
|
||||
{
|
||||
var local_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer local_arena.deinit();
|
||||
var seen_table = std.AutoHashMap(*Package, void).init(&local_arena.allocator);
|
||||
try addPackageTableToCacheHash(&hash, &local_arena, root_pkg.table, &seen_table, .path_bytes);
|
||||
try addPackageTableToCacheHash(&hash, &local_arena, main_pkg.table, &seen_table, .path_bytes);
|
||||
}
|
||||
hash.add(valgrind);
|
||||
hash.add(single_threaded);
|
||||
@ -1212,9 +1212,26 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
|
||||
);
|
||||
errdefer std_pkg.destroy(gpa);
|
||||
|
||||
try root_pkg.addAndAdopt(gpa, "builtin", builtin_pkg);
|
||||
try root_pkg.add(gpa, "root", root_pkg);
|
||||
try root_pkg.addAndAdopt(gpa, "std", std_pkg);
|
||||
const root_pkg = if (options.is_test) root_pkg: {
|
||||
const test_pkg = try Package.createWithDir(
|
||||
gpa,
|
||||
options.zig_lib_directory,
|
||||
"std" ++ std.fs.path.sep_str ++ "special",
|
||||
"test_runner.zig",
|
||||
);
|
||||
errdefer test_pkg.destroy(gpa);
|
||||
|
||||
try test_pkg.add(gpa, "builtin", builtin_pkg);
|
||||
try test_pkg.add(gpa, "root", test_pkg);
|
||||
try test_pkg.add(gpa, "std", std_pkg);
|
||||
|
||||
break :root_pkg test_pkg;
|
||||
} else main_pkg;
|
||||
errdefer if (options.is_test) root_pkg.destroy(gpa);
|
||||
|
||||
try main_pkg.addAndAdopt(gpa, "builtin", builtin_pkg);
|
||||
try main_pkg.add(gpa, "root", root_pkg);
|
||||
try main_pkg.addAndAdopt(gpa, "std", std_pkg);
|
||||
|
||||
try std_pkg.add(gpa, "builtin", builtin_pkg);
|
||||
try std_pkg.add(gpa, "root", root_pkg);
|
||||
@ -1258,6 +1275,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
|
||||
module.* = .{
|
||||
.gpa = gpa,
|
||||
.comp = comp,
|
||||
.main_pkg = main_pkg,
|
||||
.root_pkg = root_pkg,
|
||||
.zig_cache_artifact_directory = zig_cache_artifact_directory,
|
||||
.global_zir_cache = global_zir_cache,
|
||||
@ -1684,7 +1702,7 @@ pub fn update(self: *Compilation) !void {
|
||||
|
||||
// Make sure std.zig is inside the import_table. We unconditionally need
|
||||
// it for start.zig.
|
||||
const std_pkg = module.root_pkg.table.get("std").?;
|
||||
const std_pkg = module.main_pkg.table.get("std").?;
|
||||
_ = try module.importPkg(std_pkg);
|
||||
|
||||
// Normally we rely on importing std to in turn import the root source file
|
||||
@ -1692,7 +1710,7 @@ pub fn update(self: *Compilation) !void {
|
||||
// so in order to run AstGen on the root source file we put it into the
|
||||
// import_table here.
|
||||
if (use_stage1) {
|
||||
_ = try module.importPkg(module.root_pkg);
|
||||
_ = try module.importPkg(module.main_pkg);
|
||||
}
|
||||
|
||||
// Put a work item in for every known source file to detect if
|
||||
@ -3873,7 +3891,7 @@ fn buildOutputFromZig(
|
||||
var special_dir = try comp.zig_lib_directory.handle.openDir(special_sub, .{});
|
||||
defer special_dir.close();
|
||||
|
||||
var root_pkg: Package = .{
|
||||
var main_pkg: Package = .{
|
||||
.root_src_directory = .{
|
||||
.path = special_path,
|
||||
.handle = special_dir,
|
||||
@ -3899,7 +3917,7 @@ fn buildOutputFromZig(
|
||||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.root_pkg = &root_pkg,
|
||||
.main_pkg = &main_pkg,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
@ -3969,8 +3987,8 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
// Here we use the legacy stage1 C++ compiler to compile Zig code.
|
||||
const mod = comp.bin_file.options.module.?;
|
||||
const directory = mod.zig_cache_artifact_directory; // Just an alias to make it shorter to type.
|
||||
const main_zig_file = try mod.root_pkg.root_src_directory.join(arena, &[_][]const u8{
|
||||
mod.root_pkg.root_src_path,
|
||||
const main_zig_file = try mod.main_pkg.root_src_directory.join(arena, &[_][]const u8{
|
||||
mod.main_pkg.root_src_path,
|
||||
});
|
||||
const zig_lib_dir = comp.zig_lib_directory.path.?;
|
||||
const builtin_zig_path = try directory.join(arena, &[_][]const u8{"builtin.zig"});
|
||||
@ -4002,7 +4020,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
_ = try man.addFile(main_zig_file, null);
|
||||
{
|
||||
var seen_table = std.AutoHashMap(*Package, void).init(&arena_allocator.allocator);
|
||||
try addPackageTableToCacheHash(&man.hash, &arena_allocator, mod.root_pkg.table, &seen_table, .{ .files = &man });
|
||||
try addPackageTableToCacheHash(&man.hash, &arena_allocator, mod.main_pkg.table, &seen_table, .{ .files = &man });
|
||||
}
|
||||
man.hash.add(comp.bin_file.options.valgrind);
|
||||
man.hash.add(comp.bin_file.options.single_threaded);
|
||||
@ -4045,7 +4063,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("stage1 {s} new_digest={s} error: {s}", .{
|
||||
mod.root_pkg.root_src_path,
|
||||
mod.main_pkg.root_src_path,
|
||||
std.fmt.fmtSliceHexLower(&digest),
|
||||
@errorName(err),
|
||||
});
|
||||
@ -4057,7 +4075,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
break :hit;
|
||||
|
||||
log.debug("stage1 {s} digest={s} match - skipping invocation", .{
|
||||
mod.root_pkg.root_src_path,
|
||||
mod.main_pkg.root_src_path,
|
||||
std.fmt.fmtSliceHexLower(&digest),
|
||||
});
|
||||
var flags_bytes: [1]u8 = undefined;
|
||||
@ -4083,7 +4101,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
return;
|
||||
}
|
||||
log.debug("stage1 {s} prev_digest={s} new_digest={s}", .{
|
||||
mod.root_pkg.root_src_path,
|
||||
mod.main_pkg.root_src_path,
|
||||
std.fmt.fmtSliceHexLower(prev_digest),
|
||||
std.fmt.fmtSliceHexLower(&digest),
|
||||
});
|
||||
@ -4109,7 +4127,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
|
||||
comp.stage1_cache_manifest = &man;
|
||||
|
||||
const main_pkg_path = mod.root_pkg.root_src_directory.path orelse "";
|
||||
const main_pkg_path = mod.main_pkg.root_src_directory.path orelse "";
|
||||
|
||||
const stage1_module = stage1.create(
|
||||
@enumToInt(comp.bin_file.options.optimize_mode),
|
||||
@ -4142,7 +4160,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
const emit_llvm_bc_path = try stage1LocPath(arena, comp.emit_llvm_bc, directory);
|
||||
const emit_analysis_path = try stage1LocPath(arena, comp.emit_analysis, directory);
|
||||
const emit_docs_path = try stage1LocPath(arena, comp.emit_docs, directory);
|
||||
const stage1_pkg = try createStage1Pkg(arena, "root", mod.root_pkg, null);
|
||||
const stage1_pkg = try createStage1Pkg(arena, "root", mod.main_pkg, null);
|
||||
const test_filter = comp.test_filter orelse ""[0..0];
|
||||
const test_name_prefix = comp.test_name_prefix orelse ""[0..0];
|
||||
const subsystem = if (comp.bin_file.options.subsystem) |s|
|
||||
@ -4173,7 +4191,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
.test_name_prefix_ptr = test_name_prefix.ptr,
|
||||
.test_name_prefix_len = test_name_prefix.len,
|
||||
.userdata = @ptrToInt(comp),
|
||||
.root_pkg = stage1_pkg,
|
||||
.main_pkg = stage1_pkg,
|
||||
.code_model = @enumToInt(comp.bin_file.options.machine_code_model),
|
||||
.subsystem = subsystem,
|
||||
.err_color = @enumToInt(comp.color),
|
||||
@ -4239,7 +4257,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
// means that the next invocation will have an unnecessary cache miss.
|
||||
const stage1_flags_byte = @bitCast(u8, mod.stage1_flags);
|
||||
log.debug("stage1 {s} final digest={s} flags={x}", .{
|
||||
mod.root_pkg.root_src_path, std.fmt.fmtSliceHexLower(&digest), stage1_flags_byte,
|
||||
mod.main_pkg.root_src_path, std.fmt.fmtSliceHexLower(&digest), stage1_flags_byte,
|
||||
});
|
||||
var digest_plus_flags: [digest.len + 2]u8 = undefined;
|
||||
digest_plus_flags[0..digest.len].* = digest;
|
||||
@ -4333,7 +4351,7 @@ pub fn build_crt_file(
|
||||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.root_pkg = null,
|
||||
.main_pkg = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
|
||||
@ -243,6 +243,8 @@ fn analyzeInst(
|
||||
.bool_and,
|
||||
.bool_or,
|
||||
.store,
|
||||
.slice_elem_val,
|
||||
.ptr_slice_elem_val,
|
||||
=> {
|
||||
const o = inst_datas[inst].bin_op;
|
||||
return trackOperands(a, new_set, inst, main_tomb, .{ o.lhs, o.rhs, .none });
|
||||
@ -273,6 +275,8 @@ fn analyzeInst(
|
||||
.unwrap_errunion_err_ptr,
|
||||
.wrap_errunion_payload,
|
||||
.wrap_errunion_err,
|
||||
.slice_ptr,
|
||||
.slice_len,
|
||||
=> {
|
||||
const o = inst_datas[inst].ty_op;
|
||||
return trackOperands(a, new_set, inst, main_tomb, .{ o.operand, .none, .none });
|
||||
|
||||
@ -35,8 +35,11 @@ comp: *Compilation,
|
||||
|
||||
/// Where our incremental compilation metadata serialization will go.
|
||||
zig_cache_artifact_directory: Compilation.Directory,
|
||||
/// Pointer to externally managed resource. `null` if there is no zig file being compiled.
|
||||
/// Pointer to externally managed resource.
|
||||
root_pkg: *Package,
|
||||
/// Normally, `main_pkg` and `root_pkg` are the same. The exception is `zig test`, in which
|
||||
/// `root_pkg` is the test runner, and `main_pkg` is the user's source file which has the tests.
|
||||
main_pkg: *Package,
|
||||
|
||||
/// Used by AstGen worker to load and store ZIR cache.
|
||||
global_zir_cache: Compilation.Directory,
|
||||
@ -598,6 +601,9 @@ pub const Struct = struct {
|
||||
layout_wip,
|
||||
have_layout,
|
||||
},
|
||||
/// If true, definitely nonzero size at runtime. If false, resolving the fields
|
||||
/// is necessary to determine whether it has bits at runtime.
|
||||
known_has_bits: bool,
|
||||
|
||||
pub const Field = struct {
|
||||
/// Uses `noreturn` to indicate `anytype`.
|
||||
@ -2048,19 +2054,22 @@ pub fn deinit(mod: *Module) void {
|
||||
|
||||
mod.deletion_set.deinit(gpa);
|
||||
|
||||
// The callsite of `Compilation.create` owns the `root_pkg`, however
|
||||
// The callsite of `Compilation.create` owns the `main_pkg`, however
|
||||
// Module owns the builtin and std packages that it adds.
|
||||
if (mod.root_pkg.table.fetchRemove("builtin")) |kv| {
|
||||
if (mod.main_pkg.table.fetchRemove("builtin")) |kv| {
|
||||
gpa.free(kv.key);
|
||||
kv.value.destroy(gpa);
|
||||
}
|
||||
if (mod.root_pkg.table.fetchRemove("std")) |kv| {
|
||||
if (mod.main_pkg.table.fetchRemove("std")) |kv| {
|
||||
gpa.free(kv.key);
|
||||
kv.value.destroy(gpa);
|
||||
}
|
||||
if (mod.root_pkg.table.fetchRemove("root")) |kv| {
|
||||
if (mod.main_pkg.table.fetchRemove("root")) |kv| {
|
||||
gpa.free(kv.key);
|
||||
}
|
||||
if (mod.root_pkg != mod.main_pkg) {
|
||||
mod.root_pkg.destroy(gpa);
|
||||
}
|
||||
|
||||
mod.compile_log_text.deinit(gpa);
|
||||
|
||||
@ -2148,7 +2157,7 @@ pub fn astGenFile(mod: *Module, file: *Scope.File) !void {
|
||||
|
||||
const stat = try source_file.stat();
|
||||
|
||||
const want_local_cache = file.pkg == mod.root_pkg;
|
||||
const want_local_cache = file.pkg == mod.main_pkg;
|
||||
const digest = hash: {
|
||||
var path_hash: Cache.HashHelper = .{};
|
||||
path_hash.addBytes(build_options.version);
|
||||
@ -2792,6 +2801,7 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
|
||||
.zir_index = undefined, // set below
|
||||
.layout = .Auto,
|
||||
.status = .none,
|
||||
.known_has_bits = undefined,
|
||||
.namespace = .{
|
||||
.parent = null,
|
||||
.ty = struct_ty,
|
||||
@ -3301,10 +3311,23 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) SemaError!voi
|
||||
gop.value_ptr.* = new_decl;
|
||||
// Exported decls, comptime decls, usingnamespace decls, and
|
||||
// test decls if in test mode, get analyzed.
|
||||
const decl_pkg = namespace.file_scope.pkg;
|
||||
const want_analysis = is_exported or switch (decl_name_index) {
|
||||
0 => true, // comptime decl
|
||||
1 => mod.comp.bin_file.options.is_test, // test decl
|
||||
else => is_named_test and mod.comp.bin_file.options.is_test,
|
||||
1 => blk: {
|
||||
// test decl with no name. Skip the part where we check against
|
||||
// the test name filter.
|
||||
if (!mod.comp.bin_file.options.is_test) break :blk false;
|
||||
if (decl_pkg != mod.main_pkg) break :blk false;
|
||||
break :blk true;
|
||||
},
|
||||
else => blk: {
|
||||
if (!is_named_test) break :blk false;
|
||||
if (!mod.comp.bin_file.options.is_test) break :blk false;
|
||||
if (decl_pkg != mod.main_pkg) break :blk false;
|
||||
// TODO check the name against --test-filter
|
||||
break :blk true;
|
||||
},
|
||||
};
|
||||
if (want_analysis) {
|
||||
mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
|
||||
|
||||
174
src/Sema.zig
174
src/Sema.zig
@ -768,6 +768,8 @@ pub fn analyzeStructDecl(
|
||||
assert(extended.opcode == .struct_decl);
|
||||
const small = @bitCast(Zir.Inst.StructDecl.Small, extended.small);
|
||||
|
||||
struct_obj.known_has_bits = small.known_has_bits;
|
||||
|
||||
var extra_index: usize = extended.operand;
|
||||
extra_index += @boolToInt(small.has_src_node);
|
||||
extra_index += @boolToInt(small.has_body_len);
|
||||
@ -812,6 +814,7 @@ fn zirStructDecl(
|
||||
.zir_index = inst,
|
||||
.layout = small.layout,
|
||||
.status = .none,
|
||||
.known_has_bits = undefined,
|
||||
.namespace = .{
|
||||
.parent = sema.owner_decl.namespace,
|
||||
.ty = struct_ty,
|
||||
@ -1259,8 +1262,13 @@ fn zirIndexablePtrLen(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Co
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
|
||||
const src = inst_data.src();
|
||||
const array_ptr = sema.resolveInst(inst_data.operand);
|
||||
const array_ptr_src = src;
|
||||
|
||||
const elem_ty = sema.typeOf(array_ptr).elemType();
|
||||
if (elem_ty.isSlice()) {
|
||||
const slice_inst = try sema.analyzeLoad(block, src, array_ptr, array_ptr_src);
|
||||
return sema.analyzeSliceLen(block, src, slice_inst);
|
||||
}
|
||||
if (!elem_ty.isIndexable()) {
|
||||
const cond_src: LazySrcLoc = .{ .node_offset_for_cond = inst_data.src_node };
|
||||
const msg = msg: {
|
||||
@ -1283,7 +1291,7 @@ fn zirIndexablePtrLen(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Co
|
||||
return sema.mod.failWithOwnedErrorMsg(&block.base, msg);
|
||||
}
|
||||
const result_ptr = try sema.namedFieldPtr(block, src, array_ptr, "len", src);
|
||||
const result_ptr_src = src;
|
||||
const result_ptr_src = array_ptr_src;
|
||||
return sema.analyzeLoad(block, src, result_ptr, result_ptr_src);
|
||||
}
|
||||
|
||||
@ -2928,17 +2936,15 @@ fn zirErrUnionPayload(
|
||||
return sema.mod.fail(&block.base, src, "caught unexpected error '{s}'", .{name});
|
||||
}
|
||||
const data = val.castTag(.error_union).?.data;
|
||||
return sema.addConstant(
|
||||
operand_ty.castTag(.error_union).?.data.payload,
|
||||
data,
|
||||
);
|
||||
const result_ty = operand_ty.errorUnionPayload();
|
||||
return sema.addConstant(result_ty, data);
|
||||
}
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
if (safety_check and block.wantSafety()) {
|
||||
const is_non_err = try block.addUnOp(.is_err, operand);
|
||||
try sema.addSafetyCheck(block, is_non_err, .unwrap_errunion);
|
||||
}
|
||||
const result_ty = operand_ty.castTag(.error_union).?.data.payload;
|
||||
const result_ty = operand_ty.errorUnionPayload();
|
||||
return block.addTyOp(.unwrap_errunion_payload, result_ty, operand);
|
||||
}
|
||||
|
||||
@ -2961,7 +2967,8 @@ fn zirErrUnionPayloadPtr(
|
||||
if (operand_ty.elemType().zigTypeTag() != .ErrorUnion)
|
||||
return sema.mod.fail(&block.base, src, "expected error union type, found {}", .{operand_ty.elemType()});
|
||||
|
||||
const operand_pointer_ty = try Module.simplePtrType(sema.arena, operand_ty.elemType().castTag(.error_union).?.data.payload, !operand_ty.isConstPtr(), .One);
|
||||
const payload_ty = operand_ty.elemType().errorUnionPayload();
|
||||
const operand_pointer_ty = try Module.simplePtrType(sema.arena, payload_ty, !operand_ty.isConstPtr(), .One);
|
||||
|
||||
if (try sema.resolveDefinedValue(block, src, operand)) |pointer_val| {
|
||||
const val = try pointer_val.pointerDeref(sema.arena);
|
||||
@ -2999,7 +3006,7 @@ fn zirErrUnionCode(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Compi
|
||||
if (operand_ty.zigTypeTag() != .ErrorUnion)
|
||||
return sema.mod.fail(&block.base, src, "expected error union type, found '{}'", .{operand_ty});
|
||||
|
||||
const result_ty = operand_ty.castTag(.error_union).?.data.error_set;
|
||||
const result_ty = operand_ty.errorUnionSet();
|
||||
|
||||
if (try sema.resolveDefinedValue(block, src, operand)) |val| {
|
||||
assert(val.getError() != null);
|
||||
@ -3025,7 +3032,7 @@ fn zirErrUnionCodePtr(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Co
|
||||
if (operand_ty.elemType().zigTypeTag() != .ErrorUnion)
|
||||
return sema.mod.fail(&block.base, src, "expected error union type, found {}", .{operand_ty.elemType()});
|
||||
|
||||
const result_ty = operand_ty.elemType().castTag(.error_union).?.data.error_set;
|
||||
const result_ty = operand_ty.elemType().errorUnionSet();
|
||||
|
||||
if (try sema.resolveDefinedValue(block, src, operand)) |pointer_val| {
|
||||
const val = try pointer_val.pointerDeref(sema.arena);
|
||||
@ -3048,7 +3055,7 @@ fn zirEnsureErrPayloadVoid(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Inde
|
||||
const operand_ty = sema.typeOf(operand);
|
||||
if (operand_ty.zigTypeTag() != .ErrorUnion)
|
||||
return sema.mod.fail(&block.base, src, "expected error union type, found '{}'", .{operand_ty});
|
||||
if (operand_ty.castTag(.error_union).?.data.payload.zigTypeTag() != .Void) {
|
||||
if (operand_ty.errorUnionPayload().zigTypeTag() != .Void) {
|
||||
return sema.mod.fail(&block.base, src, "expression value is ignored", .{});
|
||||
}
|
||||
}
|
||||
@ -3460,14 +3467,8 @@ fn zirElemVal(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileErr
|
||||
|
||||
const bin_inst = sema.code.instructions.items(.data)[inst].bin;
|
||||
const array = sema.resolveInst(bin_inst.lhs);
|
||||
const array_ty = sema.typeOf(array);
|
||||
const array_ptr = if (array_ty.zigTypeTag() == .Pointer)
|
||||
array
|
||||
else
|
||||
try sema.analyzeRef(block, sema.src, array);
|
||||
const elem_index = sema.resolveInst(bin_inst.rhs);
|
||||
const result_ptr = try sema.elemPtr(block, sema.src, array_ptr, elem_index, sema.src);
|
||||
return sema.analyzeLoad(block, sema.src, result_ptr, sema.src);
|
||||
return sema.elemVal(block, sema.src, array, elem_index, sema.src);
|
||||
}
|
||||
|
||||
fn zirElemValNode(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@ -3479,14 +3480,8 @@ fn zirElemValNode(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Compil
|
||||
const elem_index_src: LazySrcLoc = .{ .node_offset_array_access_index = inst_data.src_node };
|
||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||
const array = sema.resolveInst(extra.lhs);
|
||||
const array_ty = sema.typeOf(array);
|
||||
const array_ptr = if (array_ty.zigTypeTag() == .Pointer)
|
||||
array
|
||||
else
|
||||
try sema.analyzeRef(block, src, array);
|
||||
const elem_index = sema.resolveInst(extra.rhs);
|
||||
const result_ptr = try sema.elemPtr(block, src, array_ptr, elem_index, elem_index_src);
|
||||
return sema.analyzeLoad(block, src, result_ptr, src);
|
||||
return sema.elemVal(block, src, array, elem_index, elem_index_src);
|
||||
}
|
||||
|
||||
fn zirElemPtr(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@ -5338,7 +5333,7 @@ fn zirBoolBr(
|
||||
|
||||
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.CondBr).Struct.fields.len +
|
||||
then_block.instructions.items.len + else_block.instructions.items.len +
|
||||
@typeInfo(Air.Block).Struct.fields.len + child_block.instructions.items.len);
|
||||
@typeInfo(Air.Block).Struct.fields.len + child_block.instructions.items.len + 1);
|
||||
|
||||
const cond_br_payload = sema.addExtraAssumeCapacity(Air.CondBr{
|
||||
.then_body_len = @intCast(u32, then_block.instructions.items.len),
|
||||
@ -6217,8 +6212,9 @@ fn zirVarExtended(
|
||||
const init_val: Value = if (small.has_init) blk: {
|
||||
const init_ref = @intToEnum(Zir.Inst.Ref, sema.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
const init_tv = try sema.resolveInstConst(block, init_src, init_ref);
|
||||
break :blk init_tv.val;
|
||||
const init_air_inst = sema.resolveInst(init_ref);
|
||||
break :blk (try sema.resolvePossiblyUndefinedValue(block, init_src, init_air_inst)) orelse
|
||||
return sema.failWithNeededComptime(block, init_src);
|
||||
} else Value.initTag(.unreachable_value);
|
||||
|
||||
if (!var_ty.isValidVarType(small.is_extern)) {
|
||||
@ -6586,7 +6582,30 @@ fn namedFieldPtr(
|
||||
},
|
||||
.Pointer => {
|
||||
const ptr_child = elem_ty.elemType();
|
||||
switch (ptr_child.zigTypeTag()) {
|
||||
if (ptr_child.isSlice()) {
|
||||
if (mem.eql(u8, field_name, "ptr")) {
|
||||
return mod.fail(
|
||||
&block.base,
|
||||
field_name_src,
|
||||
"cannot obtain reference to pointer field of slice '{}'",
|
||||
.{elem_ty},
|
||||
);
|
||||
} else if (mem.eql(u8, field_name, "len")) {
|
||||
return mod.fail(
|
||||
&block.base,
|
||||
field_name_src,
|
||||
"cannot obtain reference to length field of slice '{}'",
|
||||
.{elem_ty},
|
||||
);
|
||||
} else {
|
||||
return mod.fail(
|
||||
&block.base,
|
||||
field_name_src,
|
||||
"no member named '{s}' in '{}'",
|
||||
.{ field_name, elem_ty },
|
||||
);
|
||||
}
|
||||
} else switch (ptr_child.zigTypeTag()) {
|
||||
.Array => {
|
||||
if (mem.eql(u8, field_name, "len")) {
|
||||
return sema.addConstant(
|
||||
@ -6836,6 +6855,50 @@ fn elemPtr(
|
||||
return sema.mod.fail(&block.base, src, "TODO implement more analyze elemptr", .{});
|
||||
}
|
||||
|
||||
fn elemVal(
|
||||
sema: *Sema,
|
||||
block: *Scope.Block,
|
||||
src: LazySrcLoc,
|
||||
array_maybe_ptr: Air.Inst.Ref,
|
||||
elem_index: Air.Inst.Ref,
|
||||
elem_index_src: LazySrcLoc,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const array_ptr_src = src; // TODO better source location
|
||||
const maybe_ptr_ty = sema.typeOf(array_maybe_ptr);
|
||||
if (maybe_ptr_ty.isSinglePointer()) {
|
||||
const indexable_ty = maybe_ptr_ty.elemType();
|
||||
if (indexable_ty.isSlice()) {
|
||||
// We have a pointer to a slice and we want an element value.
|
||||
if (try sema.isComptimeKnown(block, src, array_maybe_ptr)) {
|
||||
const slice = try sema.analyzeLoad(block, src, array_maybe_ptr, array_ptr_src);
|
||||
if (try sema.resolveDefinedValue(block, src, slice)) |slice_val| {
|
||||
_ = slice_val;
|
||||
return sema.mod.fail(&block.base, src, "TODO implement Sema for elemVal for comptime known slice", .{});
|
||||
}
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
return block.addBinOp(.slice_elem_val, slice, elem_index);
|
||||
}
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
return block.addBinOp(.ptr_slice_elem_val, array_maybe_ptr, elem_index);
|
||||
}
|
||||
}
|
||||
if (maybe_ptr_ty.isSlice()) {
|
||||
if (try sema.resolveDefinedValue(block, src, array_maybe_ptr)) |slice_val| {
|
||||
_ = slice_val;
|
||||
return sema.mod.fail(&block.base, src, "TODO implement Sema for elemVal for comptime known slice", .{});
|
||||
}
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
return block.addBinOp(.slice_elem_val, array_maybe_ptr, elem_index);
|
||||
}
|
||||
|
||||
const array_ptr = if (maybe_ptr_ty.zigTypeTag() == .Pointer)
|
||||
array_maybe_ptr
|
||||
else
|
||||
try sema.analyzeRef(block, src, array_maybe_ptr);
|
||||
const ptr = try sema.elemPtr(block, src, array_ptr, elem_index, elem_index_src);
|
||||
return sema.analyzeLoad(block, src, ptr, elem_index_src);
|
||||
}
|
||||
|
||||
fn elemPtrArray(
|
||||
sema: *Sema,
|
||||
block: *Scope.Block,
|
||||
@ -6896,11 +6959,6 @@ fn coerce(
|
||||
}
|
||||
assert(inst_ty.zigTypeTag() != .Undefined);
|
||||
|
||||
// T to E!T or E to E!T
|
||||
if (dest_type.tag() == .error_union) {
|
||||
return try sema.wrapErrorUnion(block, dest_type, inst, inst_src);
|
||||
}
|
||||
|
||||
// comptime known number to other number
|
||||
if (try sema.coerceNum(block, dest_type, inst, inst_src)) |some|
|
||||
return some;
|
||||
@ -7028,6 +7086,10 @@ fn coerce(
|
||||
);
|
||||
}
|
||||
},
|
||||
.ErrorUnion => {
|
||||
// T to E!T or E to E!T
|
||||
return sema.wrapErrorUnion(block, dest_type, inst, inst_src);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
@ -7257,16 +7319,13 @@ fn analyzeVarRef(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, tv: TypedVal
|
||||
const gpa = sema.gpa;
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
try sema.air_variables.append(gpa, variable);
|
||||
const result_inst = @intCast(Air.Inst.Index, sema.air_instructions.len);
|
||||
try sema.air_instructions.append(gpa, .{
|
||||
return block.addInst(.{
|
||||
.tag = .varptr,
|
||||
.data = .{ .ty_pl = .{
|
||||
.ty = try sema.addType(ty),
|
||||
.payload = @intCast(u32, sema.air_variables.items.len - 1),
|
||||
} },
|
||||
});
|
||||
try block.instructions.append(gpa, result_inst);
|
||||
return Air.indexToRef(result_inst);
|
||||
}
|
||||
|
||||
fn analyzeRef(
|
||||
@ -7309,6 +7368,22 @@ fn analyzeLoad(
|
||||
return block.addTyOp(.load, elem_ty, ptr);
|
||||
}
|
||||
|
||||
fn analyzeSliceLen(
|
||||
sema: *Sema,
|
||||
block: *Scope.Block,
|
||||
src: LazySrcLoc,
|
||||
slice_inst: Air.Inst.Ref,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
if (try sema.resolvePossiblyUndefinedValue(block, src, slice_inst)) |slice_val| {
|
||||
if (slice_val.isUndef()) {
|
||||
return sema.addConstUndef(Type.initTag(.usize));
|
||||
}
|
||||
return sema.mod.fail(&block.base, src, "TODO implement Sema analyzeSliceLen on comptime slice", .{});
|
||||
}
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
return block.addTyOp(.slice_len, Type.initTag(.usize), slice_inst);
|
||||
}
|
||||
|
||||
fn analyzeIsNull(
|
||||
sema: *Sema,
|
||||
block: *Scope.Block,
|
||||
@ -7645,27 +7720,28 @@ fn wrapErrorUnion(
|
||||
inst_src: LazySrcLoc,
|
||||
) !Air.Inst.Ref {
|
||||
const inst_ty = sema.typeOf(inst);
|
||||
const err_union = dest_type.castTag(.error_union).?;
|
||||
const dest_err_set_ty = dest_type.errorUnionSet();
|
||||
const dest_payload_ty = dest_type.errorUnionPayload();
|
||||
if (try sema.resolvePossiblyUndefinedValue(block, inst_src, inst)) |val| {
|
||||
if (inst_ty.zigTypeTag() != .ErrorSet) {
|
||||
_ = try sema.coerce(block, err_union.data.payload, inst, inst_src);
|
||||
} else switch (err_union.data.error_set.tag()) {
|
||||
_ = try sema.coerce(block, dest_payload_ty, inst, inst_src);
|
||||
} else switch (dest_err_set_ty.tag()) {
|
||||
.anyerror => {},
|
||||
.error_set_single => {
|
||||
const expected_name = val.castTag(.@"error").?.data.name;
|
||||
const n = err_union.data.error_set.castTag(.error_set_single).?.data;
|
||||
const n = dest_err_set_ty.castTag(.error_set_single).?.data;
|
||||
if (!mem.eql(u8, expected_name, n)) {
|
||||
return sema.mod.fail(
|
||||
&block.base,
|
||||
inst_src,
|
||||
"expected type '{}', found type '{}'",
|
||||
.{ err_union.data.error_set, inst_ty },
|
||||
.{ dest_err_set_ty, inst_ty },
|
||||
);
|
||||
}
|
||||
},
|
||||
.error_set => {
|
||||
const expected_name = val.castTag(.@"error").?.data.name;
|
||||
const error_set = err_union.data.error_set.castTag(.error_set).?.data;
|
||||
const error_set = dest_err_set_ty.castTag(.error_set).?.data;
|
||||
const names = error_set.names_ptr[0..error_set.names_len];
|
||||
// TODO this is O(N). I'm putting off solving this until we solve inferred
|
||||
// error sets at the same time.
|
||||
@ -7677,19 +7753,19 @@ fn wrapErrorUnion(
|
||||
&block.base,
|
||||
inst_src,
|
||||
"expected type '{}', found type '{}'",
|
||||
.{ err_union.data.error_set, inst_ty },
|
||||
.{ dest_err_set_ty, inst_ty },
|
||||
);
|
||||
}
|
||||
},
|
||||
.error_set_inferred => {
|
||||
const expected_name = val.castTag(.@"error").?.data.name;
|
||||
const map = &err_union.data.error_set.castTag(.error_set_inferred).?.data.map;
|
||||
const map = &dest_err_set_ty.castTag(.error_set_inferred).?.data.map;
|
||||
if (!map.contains(expected_name)) {
|
||||
return sema.mod.fail(
|
||||
&block.base,
|
||||
inst_src,
|
||||
"expected type '{}', found type '{}'",
|
||||
.{ err_union.data.error_set, inst_ty },
|
||||
.{ dest_err_set_ty, inst_ty },
|
||||
);
|
||||
}
|
||||
},
|
||||
@ -7704,10 +7780,10 @@ fn wrapErrorUnion(
|
||||
|
||||
// we are coercing from E to E!T
|
||||
if (inst_ty.zigTypeTag() == .ErrorSet) {
|
||||
var coerced = try sema.coerce(block, err_union.data.error_set, inst, inst_src);
|
||||
var coerced = try sema.coerce(block, dest_err_set_ty, inst, inst_src);
|
||||
return block.addTyOp(.wrap_errunion_err, dest_type, coerced);
|
||||
} else {
|
||||
var coerced = try sema.coerce(block, err_union.data.payload, inst, inst_src);
|
||||
var coerced = try sema.coerce(block, dest_payload_ty, inst, inst_src);
|
||||
return block.addTyOp(.wrap_errunion_payload, dest_type, coerced);
|
||||
}
|
||||
}
|
||||
@ -7857,7 +7933,7 @@ fn getBuiltin(
|
||||
name: []const u8,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const std_pkg = mod.root_pkg.table.get("std").?;
|
||||
const std_pkg = mod.main_pkg.table.get("std").?;
|
||||
const std_file = (mod.importPkg(std_pkg) catch unreachable).file;
|
||||
const opt_builtin_inst = try sema.analyzeNamespaceLookup(
|
||||
block,
|
||||
|
||||
@ -2462,9 +2462,10 @@ pub const Inst = struct {
|
||||
has_body_len: bool,
|
||||
has_fields_len: bool,
|
||||
has_decls_len: bool,
|
||||
known_has_bits: bool,
|
||||
name_strategy: NameStrategy,
|
||||
layout: std.builtin.TypeInfo.ContainerLayout,
|
||||
_: u8 = undefined,
|
||||
_: u7 = undefined,
|
||||
};
|
||||
};
|
||||
|
||||
@ -3543,6 +3544,7 @@ const Writer = struct {
|
||||
break :blk decls_len;
|
||||
} else 0;
|
||||
|
||||
try self.writeFlag(stream, "known_has_bits, ", small.known_has_bits);
|
||||
try stream.print("{s}, {s}, ", .{
|
||||
@tagName(small.name_strategy), @tagName(small.layout),
|
||||
});
|
||||
|
||||
@ -853,6 +853,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.struct_field_ptr=> try self.airStructFieldPtr(inst),
|
||||
.switch_br => try self.airSwitch(inst),
|
||||
.varptr => try self.airVarPtr(inst),
|
||||
.slice_ptr => try self.airSlicePtr(inst),
|
||||
.slice_len => try self.airSliceLen(inst),
|
||||
|
||||
.slice_elem_val => try self.airSliceElemVal(inst),
|
||||
.ptr_slice_elem_val => try self.airPtrSliceElemVal(inst),
|
||||
|
||||
.constant => unreachable, // excluded from function bodies
|
||||
.const_ty => unreachable, // excluded from function bodies
|
||||
@ -1333,6 +1338,38 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
return self.finishAir(inst, result, .{ .none, .none, .none });
|
||||
}
|
||||
|
||||
fn airSlicePtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else switch (arch) {
|
||||
else => return self.fail("TODO implement slice_ptr for {}", .{self.target.cpu.arch}),
|
||||
};
|
||||
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
|
||||
}
|
||||
|
||||
fn airSliceLen(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else switch (arch) {
|
||||
else => return self.fail("TODO implement slice_len for {}", .{self.target.cpu.arch}),
|
||||
};
|
||||
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
|
||||
}
|
||||
|
||||
fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else switch (arch) {
|
||||
else => return self.fail("TODO implement slice_elem_val for {}", .{self.target.cpu.arch}),
|
||||
};
|
||||
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
|
||||
}
|
||||
|
||||
fn airPtrSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else switch (arch) {
|
||||
else => return self.fail("TODO implement ptr_slice_elem_val for {}", .{self.target.cpu.arch}),
|
||||
};
|
||||
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
|
||||
}
|
||||
|
||||
fn reuseOperand(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, op_index: Liveness.OperandInt, mcv: MCValue) bool {
|
||||
if (!self.liveness.operandDies(inst, op_index))
|
||||
return false;
|
||||
|
||||
@ -237,7 +237,8 @@ pub const DeclGen = struct {
|
||||
// This should lower to 0xaa bytes in safe modes, and for unsafe modes should
|
||||
// lower to leaving variables uninitialized (that might need to be implemented
|
||||
// outside of this function).
|
||||
return dg.fail("TODO: C backend: implement renderValue undef", .{});
|
||||
return writer.writeAll("{}");
|
||||
//return dg.fail("TODO: C backend: implement renderValue undef", .{});
|
||||
}
|
||||
switch (t.zigTypeTag()) {
|
||||
.Int => {
|
||||
@ -361,18 +362,27 @@ pub const DeclGen = struct {
|
||||
}
|
||||
},
|
||||
.ErrorSet => {
|
||||
const payload = val.castTag(.@"error").?;
|
||||
// error values will be #defined at the top of the file
|
||||
return writer.print("zig_error_{s}", .{payload.data.name});
|
||||
switch (val.tag()) {
|
||||
.@"error" => {
|
||||
const payload = val.castTag(.@"error").?;
|
||||
// error values will be #defined at the top of the file
|
||||
return writer.print("zig_error_{s}", .{payload.data.name});
|
||||
},
|
||||
else => {
|
||||
// In this case we are rendering an error union which has a
|
||||
// 0 bits payload.
|
||||
return writer.writeAll("0");
|
||||
},
|
||||
}
|
||||
},
|
||||
.ErrorUnion => {
|
||||
const error_type = t.errorUnionSet();
|
||||
const payload_type = t.errorUnionChild();
|
||||
const data = val.castTag(.error_union).?.data;
|
||||
const payload_type = t.errorUnionPayload();
|
||||
const sub_val = val.castTag(.error_union).?.data;
|
||||
|
||||
if (!payload_type.hasCodeGenBits()) {
|
||||
// We use the error type directly as the type.
|
||||
return dg.renderValue(writer, error_type, data);
|
||||
return dg.renderValue(writer, error_type, sub_val);
|
||||
}
|
||||
|
||||
try writer.writeByte('(');
|
||||
@ -383,7 +393,7 @@ pub const DeclGen = struct {
|
||||
try dg.renderValue(
|
||||
writer,
|
||||
error_type,
|
||||
data,
|
||||
sub_val,
|
||||
);
|
||||
try writer.writeAll(" }");
|
||||
} else {
|
||||
@ -391,7 +401,7 @@ pub const DeclGen = struct {
|
||||
try dg.renderValue(
|
||||
writer,
|
||||
payload_type,
|
||||
data,
|
||||
sub_val,
|
||||
);
|
||||
try writer.writeAll(", .error = 0 }");
|
||||
}
|
||||
@ -616,7 +626,7 @@ pub const DeclGen = struct {
|
||||
if (dg.typedefs.get(t)) |some| {
|
||||
return w.writeAll(some.name);
|
||||
}
|
||||
const child_type = t.errorUnionChild();
|
||||
const child_type = t.errorUnionPayload();
|
||||
const err_set_type = t.errorUnionSet();
|
||||
|
||||
if (!child_type.hasCodeGenBits()) {
|
||||
@ -926,6 +936,11 @@ fn genBody(o: *Object, body: []const Air.Inst.Index) error{ AnalysisFail, OutOfM
|
||||
.ref => try airRef(o, inst),
|
||||
.struct_field_ptr => try airStructFieldPtr(o, inst),
|
||||
.varptr => try airVarPtr(o, inst),
|
||||
.slice_ptr => try airSliceField(o, inst, ".ptr;\n"),
|
||||
.slice_len => try airSliceField(o, inst, ".len;\n"),
|
||||
|
||||
.slice_elem_val => try airSliceElemVal(o, inst, "["),
|
||||
.ptr_slice_elem_val => try airSliceElemVal(o, inst, "[0]["),
|
||||
|
||||
.unwrap_errunion_payload => try airUnwrapErrUnionPay(o, inst),
|
||||
.unwrap_errunion_err => try airUnwrapErrUnionErr(o, inst),
|
||||
@ -948,6 +963,37 @@ fn genBody(o: *Object, body: []const Air.Inst.Index) error{ AnalysisFail, OutOfM
|
||||
try writer.writeAll("}");
|
||||
}
|
||||
|
||||
fn airSliceField(o: *Object, inst: Air.Inst.Index, suffix: []const u8) !CValue {
|
||||
if (o.liveness.isUnused(inst))
|
||||
return CValue.none;
|
||||
|
||||
const ty_op = o.air.instructions.items(.data)[inst].ty_op;
|
||||
const operand = try o.resolveInst(ty_op.operand);
|
||||
const writer = o.writer();
|
||||
const local = try o.allocLocal(Type.initTag(.usize), .Const);
|
||||
try writer.writeAll(" = ");
|
||||
try o.writeCValue(writer, operand);
|
||||
try writer.writeAll(suffix);
|
||||
return local;
|
||||
}
|
||||
|
||||
fn airSliceElemVal(o: *Object, inst: Air.Inst.Index, prefix: []const u8) !CValue {
|
||||
if (o.liveness.isUnused(inst))
|
||||
return CValue.none;
|
||||
|
||||
const bin_op = o.air.instructions.items(.data)[inst].bin_op;
|
||||
const slice = try o.resolveInst(bin_op.lhs);
|
||||
const index = try o.resolveInst(bin_op.rhs);
|
||||
const writer = o.writer();
|
||||
const local = try o.allocLocal(o.air.typeOfIndex(inst), .Const);
|
||||
try writer.writeAll(" = ");
|
||||
try o.writeCValue(writer, slice);
|
||||
try writer.writeAll(prefix);
|
||||
try o.writeCValue(writer, index);
|
||||
try writer.writeAll("];\n");
|
||||
return local;
|
||||
}
|
||||
|
||||
fn airVarPtr(o: *Object, inst: Air.Inst.Index) !CValue {
|
||||
const ty_pl = o.air.instructions.items(.data)[inst].ty_pl;
|
||||
const variable = o.air.variables[ty_pl.payload];
|
||||
@ -1233,6 +1279,20 @@ fn airCall(o: *Object, inst: Air.Inst.Index) !CValue {
|
||||
const pl_op = o.air.instructions.items(.data)[inst].pl_op;
|
||||
const extra = o.air.extraData(Air.Call, pl_op.payload);
|
||||
const args = @bitCast([]const Air.Inst.Ref, o.air.extra[extra.end..][0..extra.data.args_len]);
|
||||
const fn_ty = o.air.typeOf(pl_op.operand);
|
||||
const ret_ty = fn_ty.fnReturnType();
|
||||
const unused_result = o.liveness.isUnused(inst);
|
||||
const writer = o.writer();
|
||||
|
||||
var result_local: CValue = .none;
|
||||
if (unused_result) {
|
||||
if (ret_ty.hasCodeGenBits()) {
|
||||
try writer.print("(void)", .{});
|
||||
}
|
||||
} else {
|
||||
result_local = try o.allocLocal(ret_ty, .Const);
|
||||
try writer.writeAll(" = ");
|
||||
}
|
||||
|
||||
if (o.air.value(pl_op.operand)) |func_val| {
|
||||
const fn_decl = if (func_val.castTag(.extern_fn)) |extern_fn|
|
||||
@ -1242,38 +1302,26 @@ fn airCall(o: *Object, inst: Air.Inst.Index) !CValue {
|
||||
else
|
||||
unreachable;
|
||||
|
||||
const fn_ty = fn_decl.ty;
|
||||
const ret_ty = fn_ty.fnReturnType();
|
||||
const unused_result = o.liveness.isUnused(inst);
|
||||
var result_local: CValue = .none;
|
||||
|
||||
const writer = o.writer();
|
||||
if (unused_result) {
|
||||
if (ret_ty.hasCodeGenBits()) {
|
||||
try writer.print("(void)", .{});
|
||||
}
|
||||
} else {
|
||||
result_local = try o.allocLocal(ret_ty, .Const);
|
||||
try writer.writeAll(" = ");
|
||||
}
|
||||
const fn_name = mem.spanZ(fn_decl.name);
|
||||
try writer.print("{s}(", .{fn_name});
|
||||
for (args) |arg, i| {
|
||||
if (i != 0) {
|
||||
try writer.writeAll(", ");
|
||||
}
|
||||
if (o.air.value(arg)) |val| {
|
||||
try o.dg.renderValue(writer, o.air.typeOf(arg), val);
|
||||
} else {
|
||||
const val = try o.resolveInst(arg);
|
||||
try o.writeCValue(writer, val);
|
||||
}
|
||||
}
|
||||
try writer.writeAll(");\n");
|
||||
return result_local;
|
||||
try writer.writeAll(mem.spanZ(fn_decl.name));
|
||||
} else {
|
||||
return o.dg.fail("TODO: C backend: implement function pointers", .{});
|
||||
const callee = try o.resolveInst(pl_op.operand);
|
||||
try o.writeCValue(writer, callee);
|
||||
}
|
||||
|
||||
try writer.writeAll("(");
|
||||
for (args) |arg, i| {
|
||||
if (i != 0) {
|
||||
try writer.writeAll(", ");
|
||||
}
|
||||
if (o.air.value(arg)) |val| {
|
||||
try o.dg.renderValue(writer, o.air.typeOf(arg), val);
|
||||
} else {
|
||||
const val = try o.resolveInst(arg);
|
||||
try o.writeCValue(writer, val);
|
||||
}
|
||||
}
|
||||
try writer.writeAll(");\n");
|
||||
return result_local;
|
||||
}
|
||||
|
||||
fn airDbgStmt(o: *Object, inst: Air.Inst.Index) !CValue {
|
||||
@ -1643,7 +1691,7 @@ fn airUnwrapErrUnionErr(o: *Object, inst: Air.Inst.Index) !CValue {
|
||||
const operand = try o.resolveInst(ty_op.operand);
|
||||
const operand_ty = o.air.typeOf(ty_op.operand);
|
||||
|
||||
const payload_ty = operand_ty.errorUnionChild();
|
||||
const payload_ty = operand_ty.errorUnionPayload();
|
||||
if (!payload_ty.hasCodeGenBits()) {
|
||||
if (operand_ty.zigTypeTag() == .Pointer) {
|
||||
const local = try o.allocLocal(inst_ty, .Const);
|
||||
@ -1675,7 +1723,7 @@ fn airUnwrapErrUnionPay(o: *Object, inst: Air.Inst.Index) !CValue {
|
||||
const operand = try o.resolveInst(ty_op.operand);
|
||||
const operand_ty = o.air.typeOf(ty_op.operand);
|
||||
|
||||
const payload_ty = operand_ty.errorUnionChild();
|
||||
const payload_ty = operand_ty.errorUnionPayload();
|
||||
if (!payload_ty.hasCodeGenBits()) {
|
||||
return CValue.none;
|
||||
}
|
||||
@ -1760,7 +1808,7 @@ fn airIsErr(
|
||||
const operand = try o.resolveInst(un_op);
|
||||
const operand_ty = o.air.typeOf(un_op);
|
||||
const local = try o.allocLocal(Type.initTag(.bool), .Const);
|
||||
const payload_ty = operand_ty.errorUnionChild();
|
||||
const payload_ty = operand_ty.errorUnionPayload();
|
||||
if (!payload_ty.hasCodeGenBits()) {
|
||||
try writer.print(" = {s}", .{deref_prefix});
|
||||
try o.writeCValue(writer, operand);
|
||||
|
||||
@ -646,7 +646,7 @@ pub const Context = struct {
|
||||
} };
|
||||
},
|
||||
.ErrorUnion => {
|
||||
const payload_type = ty.errorUnionChild();
|
||||
const payload_type = ty.errorUnionPayload();
|
||||
const val_type = try self.genValtype(payload_type);
|
||||
|
||||
// we emit the error value as the first local, and the payload as the following.
|
||||
@ -699,7 +699,7 @@ pub const Context = struct {
|
||||
.Struct => return self.fail("TODO: Implement struct as return type for wasm", .{}),
|
||||
.Optional => return self.fail("TODO: Implement optionals as return type for wasm", .{}),
|
||||
.ErrorUnion => {
|
||||
const val_type = try self.genValtype(return_type.errorUnionChild());
|
||||
const val_type = try self.genValtype(return_type.errorUnionPayload());
|
||||
|
||||
// write down the amount of return values
|
||||
try leb.writeULEB128(writer, @as(u32, 2));
|
||||
@ -1055,7 +1055,7 @@ pub const Context = struct {
|
||||
.ErrorUnion => {
|
||||
const data = value.castTag(.error_union).?.data;
|
||||
const error_type = ty.errorUnionSet();
|
||||
const payload_type = ty.errorUnionChild();
|
||||
const payload_type = ty.errorUnionPayload();
|
||||
if (value.getError()) |_| {
|
||||
// write the error value
|
||||
try self.emitConstant(data, error_type);
|
||||
|
||||
@ -943,7 +943,7 @@ fn buildSharedLib(
|
||||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = comp.getTarget(),
|
||||
.root_name = lib.name,
|
||||
.root_pkg = null,
|
||||
.main_pkg = null,
|
||||
.output_mode = .Lib,
|
||||
.link_mode = .Dynamic,
|
||||
.thread_pool = comp.thread_pool,
|
||||
|
||||
@ -169,7 +169,7 @@ pub fn buildLibCXX(comp: *Compilation) !void {
|
||||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.root_pkg = null,
|
||||
.main_pkg = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
@ -301,7 +301,7 @@ pub fn buildLibCXXABI(comp: *Compilation) !void {
|
||||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.root_pkg = null,
|
||||
.main_pkg = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
|
||||
@ -201,7 +201,7 @@ pub fn buildTsan(comp: *Compilation) !void {
|
||||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.root_pkg = null,
|
||||
.main_pkg = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
|
||||
@ -101,7 +101,7 @@ pub fn buildStaticLib(comp: *Compilation) !void {
|
||||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = target,
|
||||
.root_name = root_name,
|
||||
.root_pkg = null,
|
||||
.main_pkg = null,
|
||||
.output_mode = output_mode,
|
||||
.thread_pool = comp.thread_pool,
|
||||
.libc_installation = comp.bin_file.options.libc_installation,
|
||||
|
||||
30
src/main.zig
30
src/main.zig
@ -263,12 +263,12 @@ pub fn mainArgs(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
}
|
||||
|
||||
const usage_build_generic =
|
||||
\\Usage: zig build-exe <options> [files]
|
||||
\\ zig build-lib <options> [files]
|
||||
\\ zig build-obj <options> [files]
|
||||
\\ zig test <options> [files]
|
||||
\\ zig run <options> [file] [-- [args]]
|
||||
\\ zig translate-c <options> [file]
|
||||
\\Usage: zig build-exe [options] [files]
|
||||
\\ zig build-lib [options] [files]
|
||||
\\ zig build-obj [options] [files]
|
||||
\\ zig test [options] [files]
|
||||
\\ zig run [options] [files] [-- [args]]
|
||||
\\ zig translate-c [options] [file]
|
||||
\\
|
||||
\\Supported file types:
|
||||
\\ .zig Zig source code
|
||||
@ -1915,7 +1915,7 @@ fn buildOutputType(
|
||||
};
|
||||
defer emit_docs_resolved.deinit();
|
||||
|
||||
const root_pkg: ?*Package = if (root_src_file) |src_path| blk: {
|
||||
const main_pkg: ?*Package = if (root_src_file) |src_path| blk: {
|
||||
if (main_pkg_path) |p| {
|
||||
const rel_src_path = try fs.path.relative(gpa, p, src_path);
|
||||
defer gpa.free(rel_src_path);
|
||||
@ -1924,10 +1924,10 @@ fn buildOutputType(
|
||||
break :blk try Package.create(gpa, fs.path.dirname(src_path), fs.path.basename(src_path));
|
||||
}
|
||||
} else null;
|
||||
defer if (root_pkg) |p| p.destroy(gpa);
|
||||
defer if (main_pkg) |p| p.destroy(gpa);
|
||||
|
||||
// Transfer packages added with --pkg-begin/--pkg-end to the root package
|
||||
if (root_pkg) |pkg| {
|
||||
if (main_pkg) |pkg| {
|
||||
pkg.table = pkg_tree_root.table;
|
||||
pkg_tree_root.table = .{};
|
||||
}
|
||||
@ -1980,7 +1980,7 @@ fn buildOutputType(
|
||||
if (arg_mode == .run) {
|
||||
break :l global_cache_directory;
|
||||
}
|
||||
if (root_pkg) |pkg| {
|
||||
if (main_pkg) |pkg| {
|
||||
const cache_dir_path = try pkg.root_src_directory.join(arena, &[_][]const u8{"zig-cache"});
|
||||
const dir = try pkg.root_src_directory.handle.makeOpenPath("zig-cache", .{});
|
||||
cleanup_local_cache_dir = dir;
|
||||
@ -2018,7 +2018,7 @@ fn buildOutputType(
|
||||
.dynamic_linker = target_info.dynamic_linker.get(),
|
||||
.sysroot = sysroot,
|
||||
.output_mode = output_mode,
|
||||
.root_pkg = root_pkg,
|
||||
.main_pkg = main_pkg,
|
||||
.emit_bin = emit_bin_loc,
|
||||
.emit_h = emit_h_resolved.data,
|
||||
.emit_asm = emit_asm_resolved.data,
|
||||
@ -2823,7 +2823,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
const std_special = "std" ++ fs.path.sep_str ++ "special";
|
||||
const special_dir_path = try zig_lib_directory.join(arena, &[_][]const u8{std_special});
|
||||
|
||||
var root_pkg: Package = .{
|
||||
var main_pkg: Package = .{
|
||||
.root_src_directory = .{
|
||||
.path = special_dir_path,
|
||||
.handle = zig_lib_directory.handle.openDir(std_special, .{}) catch |err| {
|
||||
@ -2832,7 +2832,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
},
|
||||
.root_src_path = "build_runner.zig",
|
||||
};
|
||||
defer root_pkg.root_src_directory.handle.close();
|
||||
defer main_pkg.root_src_directory.handle.close();
|
||||
|
||||
var cleanup_build_dir: ?fs.Dir = null;
|
||||
defer if (cleanup_build_dir) |*dir| dir.close();
|
||||
@ -2881,7 +2881,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
.root_src_directory = build_directory,
|
||||
.root_src_path = build_zig_basename,
|
||||
};
|
||||
try root_pkg.addAndAdopt(arena, "@build", &build_pkg);
|
||||
try main_pkg.addAndAdopt(arena, "@build", &build_pkg);
|
||||
|
||||
var global_cache_directory: Compilation.Directory = l: {
|
||||
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
|
||||
@ -2938,7 +2938,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
.is_native_abi = cross_target.isNativeAbi(),
|
||||
.dynamic_linker = target_info.dynamic_linker.get(),
|
||||
.output_mode = .Exe,
|
||||
.root_pkg = &root_pkg,
|
||||
.main_pkg = &main_pkg,
|
||||
.emit_bin = emit_bin,
|
||||
.emit_h = null,
|
||||
.optimize_mode = .Debug,
|
||||
|
||||
@ -197,7 +197,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
|
||||
.zig_lib_directory = comp.zig_lib_directory,
|
||||
.target = comp.getTarget(),
|
||||
.root_name = "c",
|
||||
.root_pkg = null,
|
||||
.main_pkg = null,
|
||||
.output_mode = .Lib,
|
||||
.link_mode = .Dynamic,
|
||||
.thread_pool = comp.thread_pool,
|
||||
|
||||
@ -124,6 +124,8 @@ const Writer = struct {
|
||||
.bool_and,
|
||||
.bool_or,
|
||||
.store,
|
||||
.slice_elem_val,
|
||||
.ptr_slice_elem_val,
|
||||
=> try w.writeBinOp(s, inst),
|
||||
|
||||
.is_null,
|
||||
@ -161,6 +163,8 @@ const Writer = struct {
|
||||
.unwrap_errunion_err_ptr,
|
||||
.wrap_errunion_payload,
|
||||
.wrap_errunion_err,
|
||||
.slice_ptr,
|
||||
.slice_len,
|
||||
=> try w.writeTyOp(s, inst),
|
||||
|
||||
.block,
|
||||
|
||||
@ -107,7 +107,7 @@ pub const Module = extern struct {
|
||||
test_name_prefix_ptr: [*]const u8,
|
||||
test_name_prefix_len: usize,
|
||||
userdata: usize,
|
||||
root_pkg: *Pkg,
|
||||
main_pkg: *Pkg,
|
||||
main_progress_node: ?*std.Progress.Node,
|
||||
code_model: CodeModel,
|
||||
subsystem: TargetSubsystem,
|
||||
|
||||
@ -126,7 +126,7 @@ void zig_stage1_build_object(struct ZigStage1 *stage1) {
|
||||
|
||||
g->main_progress_node = stage1->main_progress_node;
|
||||
|
||||
add_package(g, stage1->root_pkg, g->main_pkg);
|
||||
add_package(g, stage1->main_pkg, g->main_pkg);
|
||||
|
||||
codegen_build_object(g);
|
||||
}
|
||||
|
||||
@ -176,7 +176,7 @@ struct ZigStage1 {
|
||||
size_t test_name_prefix_len;
|
||||
|
||||
void *userdata;
|
||||
struct ZigStage1Pkg *root_pkg;
|
||||
struct ZigStage1Pkg *main_pkg;
|
||||
struct Stage2ProgressNode *main_progress_node;
|
||||
|
||||
enum CodeModel code_model;
|
||||
|
||||
@ -465,7 +465,7 @@ int main(int argc, char **argv) {
|
||||
stage1->verbose_llvm_cpu_features = verbose_llvm_cpu_features;
|
||||
stage1->emit_o_ptr = emit_bin_path;
|
||||
stage1->emit_o_len = strlen(emit_bin_path);
|
||||
stage1->root_pkg = cur_pkg;
|
||||
stage1->main_pkg = cur_pkg;
|
||||
stage1->err_color = color;
|
||||
stage1->link_libc = link_libc;
|
||||
stage1->link_libcpp = link_libcpp;
|
||||
|
||||
@ -848,11 +848,11 @@ pub const TestContext = struct {
|
||||
.path = local_cache_path,
|
||||
};
|
||||
|
||||
var root_pkg: Package = .{
|
||||
var main_pkg: Package = .{
|
||||
.root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir },
|
||||
.root_src_path = tmp_src_path,
|
||||
};
|
||||
defer root_pkg.table.deinit(allocator);
|
||||
defer main_pkg.table.deinit(allocator);
|
||||
|
||||
const bin_name = try std.zig.binNameAlloc(arena, .{
|
||||
.root_name = "test_case",
|
||||
@ -896,7 +896,7 @@ pub const TestContext = struct {
|
||||
.optimize_mode = case.optimize_mode,
|
||||
.emit_bin = emit_bin,
|
||||
.emit_h = emit_h,
|
||||
.root_pkg = &root_pkg,
|
||||
.main_pkg = &main_pkg,
|
||||
.keep_source_files_loaded = true,
|
||||
.object_format = case.object_format,
|
||||
.is_native_os = case.target.isNativeOs(),
|
||||
|
||||
33
src/type.zig
33
src/type.zig
@ -525,9 +525,19 @@ pub const Type = extern union {
|
||||
const b_data = b.castTag(.error_union).?.data;
|
||||
return a_data.error_set.eql(b_data.error_set) and a_data.payload.eql(b_data.payload);
|
||||
},
|
||||
.ErrorSet => {
|
||||
const a_is_anyerror = a.tag() == .anyerror;
|
||||
const b_is_anyerror = b.tag() == .anyerror;
|
||||
|
||||
if (a_is_anyerror and b_is_anyerror) return true;
|
||||
if (a_is_anyerror or b_is_anyerror) return false;
|
||||
|
||||
std.debug.panic("TODO implement Type equality comparison of {} and {}", .{
|
||||
a.tag(), b.tag(),
|
||||
});
|
||||
},
|
||||
.Opaque,
|
||||
.Float,
|
||||
.ErrorSet,
|
||||
.BoundFn,
|
||||
.Frame,
|
||||
=> std.debug.panic("TODO implement Type equality comparison of {} and {}", .{ a, b }),
|
||||
@ -1190,6 +1200,9 @@ pub const Type = extern union {
|
||||
.@"struct" => {
|
||||
// TODO introduce lazy value mechanism
|
||||
const struct_obj = self.castTag(.@"struct").?.data;
|
||||
if (struct_obj.known_has_bits) {
|
||||
return true;
|
||||
}
|
||||
assert(struct_obj.status == .have_field_types or
|
||||
struct_obj.status == .layout_wip or
|
||||
struct_obj.status == .have_layout);
|
||||
@ -1645,7 +1658,7 @@ pub const Type = extern union {
|
||||
} else if (!payload.payload.hasCodeGenBits()) {
|
||||
return payload.error_set.abiSize(target);
|
||||
}
|
||||
@panic("TODO abiSize error union");
|
||||
std.debug.panic("TODO abiSize error union {}", .{self});
|
||||
},
|
||||
};
|
||||
}
|
||||
@ -2038,7 +2051,7 @@ pub const Type = extern union {
|
||||
return ty.optionalChild(&buf).isValidVarType(is_extern);
|
||||
},
|
||||
.Pointer, .Array, .Vector => ty = ty.elemType(),
|
||||
.ErrorUnion => ty = ty.errorUnionChild(),
|
||||
.ErrorUnion => ty = ty.errorUnionPayload(),
|
||||
|
||||
.Fn => @panic("TODO fn isValidVarType"),
|
||||
.Struct => {
|
||||
@ -2119,13 +2132,10 @@ pub const Type = extern union {
|
||||
}
|
||||
|
||||
/// Asserts that the type is an error union.
|
||||
pub fn errorUnionChild(self: Type) Type {
|
||||
pub fn errorUnionPayload(self: Type) Type {
|
||||
return switch (self.tag()) {
|
||||
.anyerror_void_error_union => Type.initTag(.anyerror),
|
||||
.error_union => {
|
||||
const payload = self.castTag(.error_union).?;
|
||||
return payload.data.payload;
|
||||
},
|
||||
.anyerror_void_error_union => Type.initTag(.void),
|
||||
.error_union => self.castTag(.error_union).?.data.payload,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
@ -2133,10 +2143,7 @@ pub const Type = extern union {
|
||||
pub fn errorUnionSet(self: Type) Type {
|
||||
return switch (self.tag()) {
|
||||
.anyerror_void_error_union => Type.initTag(.anyerror),
|
||||
.error_union => {
|
||||
const payload = self.castTag(.error_union).?;
|
||||
return payload.data.error_set;
|
||||
},
|
||||
.error_union => self.castTag(.error_union).?.data.error_set,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
const builtin = @import("builtin");
|
||||
|
||||
comptime {
|
||||
test {
|
||||
// Tests that pass for both.
|
||||
{}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user