Merge pull request #12796 from Vexu/referenced-by-v2

stage2: add referenced by trace to compile errors attempt #2 (+ some fixes)
This commit is contained in:
Veikka Tuominen 2022-09-16 23:49:00 +03:00 committed by GitHub
commit b2aedb0709
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 492 additions and 52 deletions

View File

@ -185,6 +185,16 @@ pub fn main() !void {
builder.use_stage1 = true;
} else if (mem.eql(u8, arg, "-fno-stage1")) {
builder.use_stage1 = false;
} else if (mem.eql(u8, arg, "-freference-trace")) {
builder.reference_trace = 256;
} else if (mem.startsWith(u8, arg, "-freference-trace=")) {
const num = arg["-freference-trace=".len..];
builder.reference_trace = std.fmt.parseUnsigned(u32, num, 10) catch |err| {
std.debug.print("unable to parse reference_trace count '{s}': {s}", .{ num, @errorName(err) });
process.exit(1);
};
} else if (mem.eql(u8, arg, "-fno-reference-trace")) {
builder.reference_trace = null;
} else if (mem.eql(u8, arg, "--")) {
builder.args = argsRest(args, arg_idx);
break;
@ -308,6 +318,8 @@ fn usage(builder: *Builder, already_ran_build: bool, out_stream: anytype) !void
\\Advanced Options:
\\ -fstage1 Force using bootstrap compiler as the codegen backend
\\ -fno-stage1 Prevent using bootstrap compiler as the codegen backend
\\ -freference-trace[=num] How many lines of reference trace should be shown per compile error
\\ -fno-reference-trace Disable reference trace
\\ --build-file [file] Override path to build.zig
\\ --cache-dir [path] Override path to local Zig cache directory
\\ --global-cache-dir [path] Override path to global Zig cache directory

View File

@ -45,6 +45,7 @@ pub const Builder = struct {
/// The purpose of executing the command is for a human to read compile errors from the terminal
prominent_compile_errors: bool,
color: enum { auto, on, off } = .auto,
reference_trace: ?u32 = null,
use_stage1: ?bool = null,
invalid_user_input: bool,
zig_exe: []const u8,
@ -2453,6 +2454,10 @@ pub const LibExeObjStep = struct {
try zig_args.append(@tagName(builder.color));
}
if (builder.reference_trace) |some| {
try zig_args.append(try std.fmt.allocPrint(builder.allocator, "-freference-trace={d}", .{some}));
}
if (self.use_stage1) |stage1| {
if (stage1) {
try zig_args.append("-fstage1");

View File

@ -1981,7 +1981,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
else
.@"break";
if (break_tag == .break_inline) {
_ = try parent_gz.addNode(.check_comptime_control_flow, node);
_ = try parent_gz.addUnNode(.check_comptime_control_flow, Zir.indexToRef(continue_block), node);
}
_ = try parent_gz.addBreak(break_tag, continue_block, .void_value);
return Zir.Inst.Ref.unreachable_value;

View File

@ -154,6 +154,10 @@ owned_link_dir: ?std.fs.Dir,
/// Don't use this for anything other than stage1 compatibility.
color: Color = .auto,
/// How many lines of reference trace should be included per compile error.
/// Null means only show snippet on first error.
reference_trace: ?u32 = null,
libcxx_abi_version: libcxx.AbiVersion = libcxx.AbiVersion.default,
/// This mutex guards all `Compilation` mutable state.
@ -348,6 +352,7 @@ pub const AllErrors = struct {
/// Does not include the trailing newline.
source_line: ?[]const u8,
notes: []Message = &.{},
reference_trace: []Message = &.{},
/// Splits the error message up into lines to properly indent them
/// to allow for long, good-looking error messages.
@ -447,6 +452,34 @@ pub const AllErrors = struct {
for (src.notes) |note| {
try note.renderToWriter(ttyconf, stderr, "note", .Cyan, indent);
}
if (src.reference_trace.len != 0) {
ttyconf.setColor(stderr, .Reset);
ttyconf.setColor(stderr, .Dim);
try stderr.print("referenced by:\n", .{});
for (src.reference_trace) |reference| {
switch (reference) {
.src => |ref_src| try stderr.print(" {s}: {s}:{d}:{d}\n", .{
ref_src.msg,
ref_src.src_path,
ref_src.line + 1,
ref_src.column + 1,
}),
.plain => |plain| if (plain.count != 0) {
try stderr.print(
" {d} reference(s) hidden; use '-freference-trace={d}' to see all references\n",
.{ plain.count, plain.count + src.reference_trace.len - 1 },
);
} else {
try stderr.print(
" remaining reference traces hidden; use '-freference-trace' to see all reference traces\n",
.{},
);
},
}
}
try stderr.writeByte('\n');
ttyconf.setColor(stderr, .Reset);
}
},
.plain => |plain| {
ttyconf.setColor(stderr, color);
@ -572,6 +605,32 @@ pub const AllErrors = struct {
});
return;
}
const reference_trace = try allocator.alloc(Message, module_err_msg.reference_trace.len);
for (reference_trace) |*reference, i| {
const module_reference = module_err_msg.reference_trace[i];
if (module_reference.hidden != 0) {
reference.* = .{ .plain = .{ .msg = undefined, .count = module_reference.hidden } };
break;
} else if (module_reference.decl == null) {
reference.* = .{ .plain = .{ .msg = undefined, .count = 0 } };
break;
}
const source = try module_reference.src_loc.file_scope.getSource(module.gpa);
const span = try module_reference.src_loc.span(module.gpa);
const loc = std.zig.findLineColumn(source.bytes, span.main);
const file_path = try module_reference.src_loc.file_scope.fullPath(allocator);
reference.* = .{
.src = .{
.src_path = file_path,
.msg = try allocator.dupe(u8, std.mem.sliceTo(module_reference.decl.?, 0)),
.span = span,
.line = @intCast(u32, loc.line),
.column = @intCast(u32, loc.column),
.source_line = null,
},
};
}
const file_path = try module_err_msg.src_loc.file_scope.fullPath(allocator);
try errors.append(.{
.src = .{
@ -581,6 +640,7 @@ pub const AllErrors = struct {
.line = @intCast(u32, err_loc.line),
.column = @intCast(u32, err_loc.column),
.notes = notes_buf[0..note_i],
.reference_trace = reference_trace,
.source_line = try allocator.dupe(u8, err_loc.source_line),
},
});
@ -929,6 +989,7 @@ pub const InitOptions = struct {
clang_preprocessor_mode: ClangPreprocessorMode = .no,
/// This is for stage1 and should be deleted upon completion of self-hosting.
color: Color = .auto,
reference_trace: ?u32 = null,
test_filter: ?[]const u8 = null,
test_name_prefix: ?[]const u8 = null,
subsystem: ?std.Target.SubSystem = null,
@ -1838,6 +1899,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.disable_c_depfile = options.disable_c_depfile,
.owned_link_dir = owned_link_dir,
.color = options.color,
.reference_trace = options.reference_trace,
.time_report = options.time_report,
.stack_report = options.stack_report,
.unwind_tables = unwind_tables,

View File

@ -166,6 +166,11 @@ decls_free_list: std.ArrayListUnmanaged(Decl.Index) = .{},
global_assembly: std.AutoHashMapUnmanaged(Decl.Index, []u8) = .{},
reference_table: std.AutoHashMapUnmanaged(Decl.Index, struct {
referencer: Decl.Index,
src: LazySrcLoc,
}) = .{},
pub const StringLiteralContext = struct {
bytes: *std.ArrayListUnmanaged(u8),
@ -2084,6 +2089,13 @@ pub const ErrorMsg = struct {
src_loc: SrcLoc,
msg: []const u8,
notes: []ErrorMsg = &.{},
reference_trace: []Trace = &.{},
pub const Trace = struct {
decl: ?[*:0]const u8,
src_loc: SrcLoc,
hidden: u32 = 0,
};
pub fn create(
gpa: Allocator,
@ -2122,8 +2134,15 @@ pub const ErrorMsg = struct {
}
gpa.free(err_msg.notes);
gpa.free(err_msg.msg);
gpa.free(err_msg.reference_trace);
err_msg.* = undefined;
}
pub fn clearTrace(err_msg: *ErrorMsg, gpa: Allocator) void {
if (err_msg.reference_trace.len == 0) return;
gpa.free(err_msg.reference_trace);
err_msg.reference_trace = &.{};
}
};
/// Canonical reference to a position within a source file.
@ -3411,6 +3430,7 @@ pub fn deinit(mod: *Module) void {
mod.decls_free_list.deinit(gpa);
mod.allocated_decls.deinit(gpa);
mod.global_assembly.deinit(gpa);
mod.reference_table.deinit(gpa);
mod.string_literal_table.deinit(gpa);
mod.string_literal_bytes.deinit(gpa);

View File

@ -111,6 +111,7 @@ const crash_report = @import("crash_report.zig");
const build_options = @import("build_options");
pub const default_branch_quota = 1000;
pub const default_reference_trace_len = 2;
pub const InstMap = std.AutoHashMapUnmanaged(Zir.Inst.Index, Air.Inst.Ref);
@ -144,6 +145,7 @@ pub const Block = struct {
/// Non zero if a non-inline loop or a runtime conditional have been encountered.
/// Stores to to comptime variables are only allowed when var.runtime_index <= runtime_index.
runtime_index: Value.RuntimeIndex = .zero,
inline_block: Zir.Inst.Index = 0,
is_comptime: bool,
is_typeof: bool = false,
@ -1157,9 +1159,20 @@ fn analyzeBodyInner(
},
.check_comptime_control_flow => {
if (!block.is_comptime) {
if (block.runtime_cond orelse block.runtime_loop) |runtime_src| {
const inst_data = sema.code.instructions.items(.data)[inst].node;
const src = LazySrcLoc.nodeOffset(inst_data);
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src = inst_data.src();
const inline_block = Zir.refToIndex(inst_data.operand).?;
var check_block = block;
const target_runtime_index = while (true) {
if (check_block.inline_block == inline_block) {
break check_block.runtime_index;
}
check_block = check_block.parent.?;
} else unreachable;
if (@enumToInt(target_runtime_index) < @enumToInt(block.runtime_index)) {
const runtime_src = block.runtime_cond orelse block.runtime_loop.?;
const msg = msg: {
const msg = try sema.errMsg(block, src, "comptime control flow inside runtime block", .{});
errdefer msg.destroy(sema.gpa);
@ -1272,10 +1285,15 @@ fn analyzeBodyInner(
// current list of parameters and restore it later.
// Note: this probably needs to be resolved in a more general manner.
const prev_params = block.params;
const prev_inline_block = block.inline_block;
if (tags[inline_body[inline_body.len - 1]] == .repeat_inline) {
block.inline_block = inline_body[0];
}
block.params = .{};
defer {
block.params.deinit(gpa);
block.params = prev_params;
block.inline_block = prev_inline_block;
}
const opt_break_data = try sema.analyzeBodyBreak(block, inline_body);
// A runtime conditional branch that needs a post-hoc block to be
@ -1353,6 +1371,8 @@ fn analyzeBodyInner(
const else_body = sema.code.extra[extra.end + then_body.len ..][0..extra.data.else_body_len];
const cond = try sema.resolveInstConst(block, cond_src, extra.data.condition, "condition in comptime branch must be comptime known");
const inline_body = if (cond.val.toBool()) then_body else else_body;
const old_runtime_index = block.runtime_index;
defer block.runtime_index = old_runtime_index;
const break_data = (try sema.analyzeBodyBreak(block, inline_body)) orelse
break always_noreturn;
if (inst == break_data.block_inst) {
@ -1939,13 +1959,53 @@ fn failWithOwnedErrorMsg(sema: *Sema, err_msg: *Module.ErrorMsg) CompileError {
}
const mod = sema.mod;
{
ref: {
errdefer err_msg.destroy(mod.gpa);
if (err_msg.src_loc.lazy == .unneeded) {
return error.NeededSourceLocation;
}
try mod.failed_decls.ensureUnusedCapacity(mod.gpa, 1);
try mod.failed_files.ensureUnusedCapacity(mod.gpa, 1);
const max_references = blk: {
if (sema.mod.comp.reference_trace) |num| break :blk num;
// Do not add multiple traces without explicit request.
if (sema.mod.failed_decls.count() != 0) break :ref;
break :blk default_reference_trace_len;
};
var referenced_by = if (sema.func) |some| some.owner_decl else sema.owner_decl_index;
var reference_stack = std.ArrayList(Module.ErrorMsg.Trace).init(sema.gpa);
defer reference_stack.deinit();
// Avoid infinite loops.
var seen = std.AutoHashMap(Module.Decl.Index, void).init(sema.gpa);
defer seen.deinit();
var cur_reference_trace: u32 = 0;
while (sema.mod.reference_table.get(referenced_by)) |ref| : (cur_reference_trace += 1) {
const gop = try seen.getOrPut(ref.referencer);
if (gop.found_existing) break;
if (cur_reference_trace < max_references) {
const decl = sema.mod.declPtr(ref.referencer);
try reference_stack.append(.{ .decl = decl.name, .src_loc = ref.src.toSrcLoc(decl) });
}
referenced_by = ref.referencer;
}
if (sema.mod.comp.reference_trace == null and cur_reference_trace > 0) {
try reference_stack.append(.{
.decl = null,
.src_loc = undefined,
.hidden = 0,
});
} else if (cur_reference_trace > max_references) {
try reference_stack.append(.{
.decl = undefined,
.src_loc = undefined,
.hidden = cur_reference_trace - max_references,
});
}
err_msg.reference_trace = reference_stack.toOwnedSlice();
}
if (sema.owner_func) |func| {
func.state = .sema_failure;
@ -4749,6 +4809,9 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
.c_import_buf = &c_import_buf,
.runtime_cond = parent_block.runtime_cond,
.runtime_loop = parent_block.runtime_loop,
.runtime_index = parent_block.runtime_index,
};
defer child_block.instructions.deinit(sema.gpa);
@ -4847,6 +4910,9 @@ fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErro
.is_comptime = parent_block.is_comptime,
.want_safety = parent_block.want_safety,
.float_mode = parent_block.float_mode,
.runtime_cond = parent_block.runtime_cond,
.runtime_loop = parent_block.runtime_loop,
.runtime_index = parent_block.runtime_index,
};
defer child_block.instructions.deinit(gpa);
@ -5341,14 +5407,8 @@ fn zirDeclRef(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
const src = inst_data.src();
const decl_name = inst_data.get(sema.code);
const decl_index = try sema.lookupIdentifier(block, src, decl_name);
return sema.analyzeDeclRef(decl_index) catch |err| switch (err) {
error.AnalysisFail => {
const msg = sema.err orelse return err;
try sema.errNote(block, src, msg, "referenced here", .{});
return err;
},
else => return err,
};
try sema.addReferencedBy(block, src, decl_index);
return sema.analyzeDeclRef(decl_index);
}
fn zirDeclVal(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
@ -6082,6 +6142,7 @@ fn analyzeCall(
error.AnalysisFail => {
const err_msg = sema.err orelse return err;
try sema.errNote(block, call_src, err_msg, "called from here", .{});
err_msg.clearTrace(sema.gpa);
return err;
},
else => |e| return e,
@ -9743,6 +9804,9 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
.inlining = block.inlining,
.is_comptime = block.is_comptime,
.switch_else_err_ty = else_error_ty,
.runtime_cond = block.runtime_cond,
.runtime_loop = block.runtime_loop,
.runtime_index = block.runtime_index,
};
const merges = &child_block.label.?.merges;
defer child_block.instructions.deinit(gpa);
@ -14873,6 +14937,9 @@ fn zirTypeofPeer(
.inlining = block.inlining,
.is_comptime = false,
.is_typeof = true,
.runtime_cond = block.runtime_cond,
.runtime_loop = block.runtime_loop,
.runtime_index = block.runtime_index,
};
defer child_block.instructions.deinit(sema.gpa);
// Ignore the result, we only care about the instructions in `args`.
@ -17407,7 +17474,7 @@ fn reifyStruct(
if (!try sema.intFitsInType(block, src, alignment_val, Type.u32, null)) {
return sema.fail(block, src, "alignment must fit in 'u32'", .{});
}
const abi_align = @intCast(u29, alignment_val.toUnsignedInt(target));
const abi_align = @intCast(u29, (try alignment_val.getUnsignedIntAdvanced(target, sema.kit(block, src))).?);
const field_name = try name_val.toAllocatedBytes(
Type.initTag(.const_slice_u8),
@ -21653,12 +21720,19 @@ fn finishFieldCallBind(
.@"addrspace" = ptr_ty.ptrAddressSpace(),
});
const container_ty = ptr_ty.childType();
if (container_ty.zigTypeTag() == .Struct) {
if (container_ty.structFieldValueComptime(field_index)) |default_val| {
return sema.addConstant(field_ty, default_val);
}
}
if (try sema.resolveDefinedValue(block, src, object_ptr)) |struct_ptr_val| {
const pointer = try sema.addConstant(
ptr_field_ty,
try Value.Tag.field_ptr.create(arena, .{
.container_ptr = struct_ptr_val,
.container_ty = ptr_ty.childType(),
.container_ty = container_ty,
.field_index = field_index,
}),
);
@ -21704,14 +21778,8 @@ fn namespaceLookupRef(
decl_name: []const u8,
) CompileError!?Air.Inst.Ref {
const decl = (try sema.namespaceLookup(block, src, namespace, decl_name)) orelse return null;
return sema.analyzeDeclRef(decl) catch |err| switch (err) {
error.AnalysisFail => {
const msg = sema.err orelse return err;
try sema.errNote(block, src, msg, "referenced here", .{});
return err;
},
else => return err,
};
try sema.addReferencedBy(block, src, decl);
return try sema.analyzeDeclRef(decl);
}
fn namespaceLookupVal(
@ -24771,6 +24839,20 @@ fn beginComptimePtrMutation(
else => unreachable,
},
.empty_struct_value => {
const duped = try sema.arena.create(Value);
duped.* = Value.initTag(.the_only_possible_value);
return beginComptimePtrMutationInner(
sema,
block,
src,
parent.ty.structFieldType(field_index),
duped,
ptr_elem_ty,
parent.decl_ref_mut,
);
},
else => unreachable,
},
.reinterpret => |reinterpret| {
@ -25950,14 +26032,8 @@ fn analyzeDeclVal(
if (sema.decl_val_table.get(decl_index)) |result| {
return result;
}
const decl_ref = sema.analyzeDeclRef(decl_index) catch |err| switch (err) {
error.AnalysisFail => {
const msg = sema.err orelse return err;
try sema.errNote(block, src, msg, "referenced here", .{});
return err;
},
else => return err,
};
try sema.addReferencedBy(block, src, decl_index);
const decl_ref = try sema.analyzeDeclRef(decl_index);
const result = try sema.analyzeLoad(block, src, decl_ref, src);
if (Air.refToIndex(result)) |index| {
if (sema.air_instructions.items(.tag)[index] == .constant and !block.is_typeof) {
@ -25967,6 +26043,19 @@ fn analyzeDeclVal(
return result;
}
fn addReferencedBy(
sema: *Sema,
block: *Block,
src: LazySrcLoc,
decl_index: Decl.Index,
) !void {
if (sema.mod.comp.reference_trace == @as(u32, 0)) return;
try sema.mod.reference_table.put(sema.gpa, decl_index, .{
.referencer = block.src_decl,
.src = src,
});
}
fn ensureDeclAnalyzed(sema: *Sema, decl_index: Decl.Index) CompileError!void {
const decl = sema.mod.declPtr(decl_index);
if (decl.analysis == .in_progress) {

View File

@ -287,7 +287,7 @@ pub const Inst = struct {
/// Uses the `break` union field.
break_inline,
/// Checks that comptime control flow does not happen inside a runtime block.
/// Uses the `node` union field.
/// Uses the `un_node` union field.
check_comptime_control_flow,
/// Function call.
/// Uses the `pl_node` union field with payload `Call`.
@ -1600,7 +1600,7 @@ pub const Inst = struct {
.bool_br_or = .bool_br,
.@"break" = .@"break",
.break_inline = .@"break",
.check_comptime_control_flow = .node,
.check_comptime_control_flow = .un_node,
.call = .pl_node,
.cmp_lt = .pl_node,
.cmp_lte = .pl_node,

View File

@ -396,6 +396,8 @@ const usage_build_generic =
\\ -fno-Clang Prevent using Clang as the C/C++ compilation backend
\\ -fstage1 Force using bootstrap compiler as the codegen backend
\\ -fno-stage1 Prevent using bootstrap compiler as the codegen backend
\\ -freference-trace[=num] How many lines of reference trace should be shown per compile error
\\ -fno-reference-trace Disable reference trace
\\ -fsingle-threaded Code assumes there is only one thread
\\ -fno-single-threaded Code may not assume there is only one thread
\\ -fbuiltin Enable implicit builtin knowledge of functions
@ -742,6 +744,7 @@ fn buildOutputType(
var headerpad_size: ?u32 = null;
var headerpad_max_install_names: bool = false;
var dead_strip_dylibs: bool = false;
var reference_trace: ?u32 = null;
// e.g. -m3dnow or -mno-outline-atomics. They correspond to std.Target llvm cpu feature names.
// This array is populated by zig cc frontend and then has to be converted to zig-style
@ -928,14 +931,14 @@ fn buildOutputType(
fatal("expected parameter after {s}", .{arg});
};
stack_size_override = std.fmt.parseUnsigned(u64, next_arg, 0) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse stack size '{s}': {s}", .{ next_arg, @errorName(err) });
};
} else if (mem.eql(u8, arg, "--image-base")) {
const next_arg = args_iter.next() orelse {
fatal("expected parameter after {s}", .{arg});
};
image_base_override = std.fmt.parseUnsigned(u64, next_arg, 0) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse image base override '{s}': {s}", .{ next_arg, @errorName(err) });
};
} else if (mem.eql(u8, arg, "--name")) {
provided_name = args_iter.next() orelse {
@ -984,7 +987,7 @@ fn buildOutputType(
fatal("expected parameter after {s}", .{arg});
};
pagezero_size = std.fmt.parseUnsigned(u64, eatIntPrefix(next_arg, 16), 16) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse pagezero size'{s}': {s}", .{ next_arg, @errorName(err) });
};
} else if (mem.eql(u8, arg, "-search_paths_first")) {
search_strategy = .paths_first;
@ -995,7 +998,7 @@ fn buildOutputType(
fatal("expected parameter after {s}", .{arg});
};
headerpad_size = std.fmt.parseUnsigned(u32, eatIntPrefix(next_arg, 16), 16) catch |err| {
fatal("unable to parser '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse headerpat size '{s}': {s}", .{ next_arg, @errorName(err) });
};
} else if (mem.eql(u8, arg, "-headerpad_max_install_names")) {
headerpad_max_install_names = true;
@ -1214,6 +1217,15 @@ fn buildOutputType(
use_stage1 = true;
} else if (mem.eql(u8, arg, "-fno-stage1")) {
use_stage1 = false;
} else if (mem.eql(u8, arg, "-freference-trace")) {
reference_trace = 256;
} else if (mem.startsWith(u8, arg, "-freference-trace=")) {
const num = arg["-freference-trace=".len..];
reference_trace = std.fmt.parseUnsigned(u32, num, 10) catch |err| {
fatal("unable to parse reference_trace count '{s}': {s}", .{ num, @errorName(err) });
};
} else if (mem.eql(u8, arg, "-fno-reference-trace")) {
reference_trace = null;
} else if (mem.eql(u8, arg, "-rdynamic")) {
rdynamic = true;
} else if (mem.eql(u8, arg, "-fsoname")) {
@ -1785,11 +1797,11 @@ fn buildOutputType(
fatal("expected linker arg after '{s}'", .{arg});
}
linker_optimization = std.fmt.parseUnsigned(u8, linker_args.items[i], 10) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse optimization level '{s}': {s}", .{ linker_args.items[i], @errorName(err) });
};
} else if (mem.startsWith(u8, arg, "-O")) {
linker_optimization = std.fmt.parseUnsigned(u8, arg["-O".len..], 10) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse optimization level '{s}': {s}", .{ arg, @errorName(err) });
};
} else if (mem.eql(u8, arg, "-pagezero_size")) {
i += 1;
@ -1798,7 +1810,7 @@ fn buildOutputType(
}
const next_arg = linker_args.items[i];
pagezero_size = std.fmt.parseUnsigned(u64, eatIntPrefix(next_arg, 16), 16) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse pagezero size '{s}': {s}", .{ next_arg, @errorName(err) });
};
} else if (mem.eql(u8, arg, "-headerpad")) {
i += 1;
@ -1807,7 +1819,7 @@ fn buildOutputType(
}
const next_arg = linker_args.items[i];
headerpad_size = std.fmt.parseUnsigned(u32, eatIntPrefix(next_arg, 16), 16) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse headerpad size '{s}': {s}", .{ next_arg, @errorName(err) });
};
} else if (mem.eql(u8, arg, "-headerpad_max_install_names")) {
headerpad_max_install_names = true;
@ -1899,7 +1911,7 @@ fn buildOutputType(
fatal("expected linker arg after '{s}'", .{arg});
}
version.major = std.fmt.parseUnsigned(u32, linker_args.items[i], 10) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse major image version '{s}': {s}", .{ linker_args.items[i], @errorName(err) });
};
have_version = true;
} else if (mem.eql(u8, arg, "--minor-image-version")) {
@ -1908,7 +1920,7 @@ fn buildOutputType(
fatal("expected linker arg after '{s}'", .{arg});
}
version.minor = std.fmt.parseUnsigned(u32, linker_args.items[i], 10) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse minor image version '{s}': {s}", .{ linker_args.items[i], @errorName(err) });
};
have_version = true;
} else if (mem.eql(u8, arg, "-e") or mem.eql(u8, arg, "--entry")) {
@ -1923,7 +1935,7 @@ fn buildOutputType(
fatal("expected linker arg after '{s}'", .{arg});
}
stack_size_override = std.fmt.parseUnsigned(u64, linker_args.items[i], 0) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse stack size override '{s}': {s}", .{ linker_args.items[i], @errorName(err) });
};
} else if (mem.eql(u8, arg, "--image-base")) {
i += 1;
@ -1931,7 +1943,7 @@ fn buildOutputType(
fatal("expected linker arg after '{s}'", .{arg});
}
image_base_override = std.fmt.parseUnsigned(u64, linker_args.items[i], 0) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse image base override '{s}': {s}", .{ linker_args.items[i], @errorName(err) });
};
} else if (mem.eql(u8, arg, "-T") or mem.eql(u8, arg, "--script")) {
i += 1;
@ -1984,7 +1996,7 @@ fn buildOutputType(
linker_args.items[i],
10,
) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse major subsystem version '{s}': {s}", .{ linker_args.items[i], @errorName(err) });
};
} else if (mem.eql(u8, arg, "--minor-subsystem-version")) {
i += 1;
@ -1997,7 +2009,7 @@ fn buildOutputType(
linker_args.items[i],
10,
) catch |err| {
fatal("unable to parse '{s}': {s}", .{ arg, @errorName(err) });
fatal("unable to parse minor subsystem version '{s}': {s}", .{ linker_args.items[i], @errorName(err) });
};
} else if (mem.eql(u8, arg, "-framework")) {
i += 1;
@ -2996,6 +3008,7 @@ fn buildOutputType(
.headerpad_size = headerpad_size,
.headerpad_max_install_names = headerpad_max_install_names,
.dead_strip_dylibs = dead_strip_dylibs,
.reference_trace = reference_trace,
}) catch |err| switch (err) {
error.LibCUnavailable => {
const target = target_info.target;
@ -3744,6 +3757,8 @@ pub const usage_build =
\\Options:
\\ -fstage1 Force using bootstrap compiler as the codegen backend
\\ -fno-stage1 Prevent using bootstrap compiler as the codegen backend
\\ -freference-trace[=num] How many lines of reference trace should be shown per compile error
\\ -fno-reference-trace Disable reference trace
\\ --build-file [file] Override path to build.zig
\\ --cache-dir [path] Override path to local Zig cache directory
\\ --global-cache-dir [path] Override path to global Zig cache directory
@ -3816,6 +3831,12 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
} else if (mem.eql(u8, arg, "-fno-stage1")) {
use_stage1 = false;
try child_argv.append(arg);
} else if (mem.eql(u8, arg, "-freference-trace")) {
try child_argv.append(arg);
} else if (mem.startsWith(u8, arg, "-freference-trace=")) {
try child_argv.append(arg);
} else if (mem.eql(u8, arg, "-fno-reference-trace")) {
try child_argv.append(arg);
}
}
try child_argv.append(arg);

View File

@ -232,6 +232,7 @@ const Writer = struct {
.make_ptr_const,
.validate_deref,
.overflow_arithmetic_ptr,
.check_comptime_control_flow,
=> try self.writeUnNode(stream, inst),
.ref,
@ -406,7 +407,6 @@ const Writer = struct {
.alloc_inferred_comptime_mut,
.ret_ptr,
.ret_type,
.check_comptime_control_flow,
=> try self.writeNode(stream, inst),
.error_value,

View File

@ -1551,6 +1551,7 @@ pub const TestContext = struct {
.self_exe_path = zig_exe_path,
// TODO instead of turning off color, pass in a std.Progress.Node
.color = .off,
.reference_trace = 0,
// TODO: force self-hosted linkers with stage2 backend to avoid LLD creeping in
// until the auto-select mechanism deems them worthy
.use_lld = switch (case.backend) {

View File

@ -2391,12 +2391,15 @@ pub const Value = extern union {
union_obj.val.hash(active_field_ty, hasher, mod);
},
.Fn => {
const func: *Module.Fn = val.castTag(.function).?.data;
// Note that his hashes the *Fn rather than the *Decl. This is
// Note that his hashes the *Fn/*ExternFn rather than the *Decl. This is
// to differentiate function bodies from function pointers.
// This is currently redundant since we already hash the zig type tag
// at the top of this function.
std.hash.autoHash(hasher, func);
if (val.castTag(.function)) |func| {
std.hash.autoHash(hasher, func.data);
} else if (val.castTag(.extern_fn)) |func| {
std.hash.autoHash(hasher, func.data);
} else unreachable;
},
.Frame => {
@panic("TODO implement hashing frame values");
@ -2775,6 +2778,9 @@ pub const Value = extern union {
const tuple = ty.tupleFields();
return tuple.values[index];
}
if (ty.structFieldValueComptime(index)) |some| {
return some;
}
unreachable;
},
.undef => return Value.undef,

View File

@ -87,6 +87,10 @@ test {
_ = @import("behavior/bugs/12486.zig");
_ = @import("behavior/bugs/12680.zig");
_ = @import("behavior/bugs/12776.zig");
_ = @import("behavior/bugs/12786.zig");
_ = @import("behavior/bugs/12794.zig");
_ = @import("behavior/bugs/12801-1.zig");
_ = @import("behavior/bugs/12801-2.zig");
_ = @import("behavior/byteswap.zig");
_ = @import("behavior/byval_arg_var.zig");
_ = @import("behavior/call.zig");

View File

@ -0,0 +1,28 @@
const std = @import("std");
fn NamespacedGlobals(comptime modules: anytype) type {
return @Type(.{
.Struct = .{
.layout = .Auto,
.is_tuple = false,
.fields = &.{
.{
.name = "globals",
.field_type = modules.mach.globals,
.default_value = null,
.is_comptime = false,
.alignment = @alignOf(modules.mach.globals),
},
},
.decls = &[_]std.builtin.Type.Declaration{},
},
});
}
test {
_ = NamespacedGlobals(.{
.mach = .{
.globals = struct {},
},
});
}

View File

@ -0,0 +1,38 @@
const std = @import("std");
fn NamespacedComponents(comptime modules: anytype) type {
return @Type(.{
.Struct = .{
.layout = .Auto,
.is_tuple = false,
.fields = &.{.{
.name = "components",
.field_type = @TypeOf(modules.components),
.default_value = null,
.is_comptime = false,
.alignment = @alignOf(@TypeOf(modules.components)),
}},
.decls = &[_]std.builtin.Type.Declaration{},
},
});
}
fn namespacedComponents(comptime modules: anytype) NamespacedComponents(modules) {
var x: NamespacedComponents(modules) = undefined;
x.components = modules.components;
return x;
}
pub fn World(comptime modules: anytype) type {
const all_components = namespacedComponents(modules);
_ = all_components;
return struct {};
}
test {
_ = World(.{
.components = .{
.location = struct {},
},
});
}

View File

@ -0,0 +1,13 @@
const std = @import("std");
const builtin = @import("builtin");
comptime capacity: fn () u64 = capacity_,
fn capacity_() u64 {
return 64;
}
test {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
try std.testing.expect((@This(){}).capacity() == 64);
}

View File

@ -0,0 +1,24 @@
const std = @import("std");
const builtin = @import("builtin");
const Auto = struct {
auto: [max_len]u8 = undefined,
offset: u64 = 0,
comptime capacity: *const fn () u64 = capacity,
const max_len: u64 = 32;
fn capacity() u64 {
return max_len;
}
};
test {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const a: Auto = .{ .offset = 16, .capacity = Auto.capacity };
try std.testing.expect(a.capacity() == 32);
try std.testing.expect((a.capacity)() == 32);
}

View File

@ -1337,3 +1337,64 @@ test "lazy value is resolved as slice operand" {
try expect(@ptrToInt(ptr1) == @ptrToInt(ptr2));
try expect(ptr1.len == ptr2.len);
}
test "break from inline loop depends on runtime condition" {
const S = struct {
fn foo(a: u8) bool {
return a == 4;
}
};
const arr = [_]u8{ 1, 2, 3, 4 };
{
const blk = blk: {
inline for (arr) |val| {
if (S.foo(val)) {
break :blk val;
}
}
return error.TestFailed;
};
try expect(blk == 4);
}
{
comptime var i = 0;
const blk = blk: {
inline while (i < arr.len) : (i += 1) {
const val = arr[i];
if (S.foo(val)) {
break :blk val;
}
}
return error.TestFailed;
};
try expect(blk == 4);
}
}
test "inline for inside a runtime condition" {
var a = false;
if (a) {
const arr = .{ 1, 2, 3 };
inline for (arr) |val| {
if (val < 3) continue;
try expect(val == 3);
}
}
}
test "continue in inline for inside a comptime switch" {
const arr = .{ 1, 2, 3 };
var count: u8 = 0;
switch (arr[1]) {
2 => {
inline for (arr) |val| {
if (val == 2) continue;
count += val;
}
},
else => {},
}
try expect(count == 4);
}

View File

@ -358,3 +358,14 @@ test "nested generic function" {
try expect(@typeInfo(@TypeOf(S.g)).Fn.is_generic);
try S.foo(u32, S.bar, 123);
}
test "extern function used as generic parameter" {
const S = struct {
extern fn foo() void;
extern fn bar() void;
inline fn baz(comptime _: anytype) type {
return struct {};
}
};
try expect(S.baz(S.foo) != S.baz(S.bar));
}

View File

@ -0,0 +1,21 @@
pub export fn entry() void {
var a = false;
const arr1 = .{ 1, 2, 3 };
loop: inline for (arr1) |val1| {
_ = val1;
if (a) {
const arr = .{ 1, 2, 3 };
inline for (arr) |val| {
if (val < 3) continue :loop;
if (val != 3) unreachable;
}
}
}
}
// error
// backend=stage2
// target=native
//
// :9:30: error: comptime control flow inside runtime block
// :6:13: note: runtime control flow here

View File

@ -0,0 +1,25 @@
fn foo() bool {
return false;
}
pub export fn entry() void {
const Widget = union(enum) { a: u0 };
comptime var a = 1;
const info = @typeInfo(Widget).Union;
inline for (info.fields) |field| {
if (foo()) {
switch (field.field_type) {
u0 => a = 2,
else => unreachable,
}
}
}
}
// error
// backend=stage2
// target=native
//
// :13:27: error: store to comptime variable depends on runtime condition
// :11:16: note: runtime condition here

View File

@ -10,4 +10,3 @@ export fn entry() void {
// target=native
//
// :1:1: error: dependency loop detected
// :2:19: note: referenced here