compiler: allow files with AstGen errors to undergo semantic analysis

This commit enhances AstGen to introduce a form of error resilience
which allows valid ZIR to be emitted even when AstGen errors occur.

When a non-fatal AstGen error (e.g. `appendErrorNode`) occurs, ZIR
generation is not affected; the error is added to `astgen.errors` and
ultimately to the errors stored in `extra`, but that doesn't stop us
getting valid ZIR. Fatal AstGen errors (e.g. `failNode`) are a bit
trickier. These errors return `error.AnalysisFail`, which is propagated
up the stack. In theory, any parent expression can catch this error and
handle it, continuing ZIR generation whilst throwing away whatever was
lost. For now, we only do this in one place: when creating declarations.
If a call to `fnDecl`, `comptimeDecl`, `globalVarDecl`, etc, returns
`error.AnalysisFail`, the `declaration` instruction is still created,
but its body simply contains the new `extended(astgen_error())`
instruction, which instructs Sema to terminate semantic analysis with a
transitive error. This means that a fatal AstGen error causes the
innermost declaration containing the error to fail, but the rest of the
file remains intact.

If a source file contains parse errors, or an `error.AnalysisFail`
happens when lowering the top-level struct (e.g. there is an error in
one of its fields, or a name has multiple declarations), then lowering
for the entire file fails. Alongside the existing `Zir.hasCompileErrors`
query, this commit introduces `Zir.loweringFailed`, which returns `true`
only in this case.

The end result here is that files with AstGen failures will almost
always still emit valid ZIR, and hence can undergo semantic analysis on
the parts of the file which are (from AstGen's perspective) valid. This
is a noteworthy improvement to UX, but the main motivation here is
actually incremental compilation. Previously, AstGen failures caused
lots of semantic analysis work to be thrown out, because all `AnalUnit`s
in the file required re-analysis so as to trigger necessary transitive
failures and remove stored compile errors which would no longer make
sense (because a fresh compilation of this code would not emit those
errors, as the units those errors applied to would fail sooner due to
referencing a failed file). Now, this case only applies when a file has
severe top-level errors, which is far less common than something like
having an unused variable.

Lastly, this commit changes a few errors in `AstGen` to become fatal
when they were previously non-fatal and vice versa. If there is still a
reasonable way to continue AstGen and lower to ZIR after an error, it is
non-fatal; otherwise, it is fatal. For instance, `comptime const`, while
redundant syntax, has a clear meaning we can lower; on the other hand,
using an undeclared identifer has no sane lowering, so must trigger a
fatal error.
This commit is contained in:
mlugg 2024-11-12 19:33:50 +00:00
parent cbc05e0b1d
commit 4d7818a76a
No known key found for this signature in database
GPG Key ID: 3F5B7DCCBF4AF02E
17 changed files with 238 additions and 93 deletions

View File

@ -74,6 +74,12 @@ pub fn MultiArrayList(comptime T: type) type {
len: usize,
capacity: usize,
pub const empty: Slice = .{
.ptrs = undefined,
.len = 0,
.capacity = 0,
};
pub fn items(self: Slice, comptime field: Field) []FieldType(field) {
const F = FieldType(field);
if (self.capacity == 0) {

View File

@ -172,9 +172,9 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
};
defer gz_instructions.deinit(gpa);
// The AST -> ZIR lowering process assumes an AST that does not have any
// parse errors.
if (tree.errors.len == 0) {
// The AST -> ZIR lowering process assumes an AST that does not have any parse errors.
// Parse errors, or AstGen errors in the root struct, are considered "fatal", so we emit no ZIR.
const fatal = if (tree.errors.len == 0) fatal: {
if (AstGen.structDeclInner(
&gen_scope,
&gen_scope.base,
@ -184,13 +184,15 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
0,
)) |struct_decl_ref| {
assert(struct_decl_ref.toIndex().? == .main_struct_inst);
break :fatal false;
} else |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => {}, // Handled via compile_errors below.
error.AnalysisFail => break :fatal true, // Handled via compile_errors below.
}
} else {
} else fatal: {
try lowerAstErrors(&astgen);
}
break :fatal true;
};
const err_index = @intFromEnum(Zir.ExtraIndex.compile_errors);
if (astgen.compile_errors.items.len == 0) {
@ -228,8 +230,8 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
}
}
return Zir{
.instructions = astgen.instructions.toOwnedSlice(),
return .{
.instructions = if (fatal) .empty else astgen.instructions.toOwnedSlice(),
.string_bytes = try astgen.string_bytes.toOwnedSlice(gpa),
.extra = try astgen.extra.toOwnedSlice(gpa),
};
@ -2101,7 +2103,7 @@ fn comptimeExprAst(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
if (gz.is_comptime) {
return astgen.failNode(node, "redundant comptime keyword in already comptime scope", .{});
try astgen.appendErrorNode(node, "redundant comptime keyword in already comptime scope", .{});
}
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
@ -3275,6 +3277,9 @@ fn varDecl(
try astgen.appendErrorTok(comptime_token, "'comptime const' is redundant; instead wrap the initialization expression with 'comptime'", .{});
}
// `comptime const` is a non-fatal error; treat it like the init was marked `comptime`.
const force_comptime = var_decl.comptime_token != null;
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
@ -3288,7 +3293,7 @@ fn varDecl(
} else .{ .rl = .none, .ctx = .const_init };
const prev_anon_name_strategy = gz.anon_name_strategy;
gz.anon_name_strategy = .dbg_var;
const init_inst = try reachableExpr(gz, scope, result_info, var_decl.ast.init_node, node);
const init_inst = try reachableExprComptime(gz, scope, result_info, var_decl.ast.init_node, node, force_comptime);
gz.anon_name_strategy = prev_anon_name_strategy;
try gz.addDbgVar(.dbg_var_val, ident_name, init_inst);
@ -3358,7 +3363,7 @@ fn varDecl(
const prev_anon_name_strategy = gz.anon_name_strategy;
gz.anon_name_strategy = .dbg_var;
defer gz.anon_name_strategy = prev_anon_name_strategy;
const init_inst = try reachableExpr(gz, scope, init_result_info, var_decl.ast.init_node, node);
const init_inst = try reachableExprComptime(gz, scope, init_result_info, var_decl.ast.init_node, node, force_comptime);
// The const init expression may have modified the error return trace, so signal
// to Sema that it should save the new index for restoring later.
@ -3503,7 +3508,7 @@ fn assignDestructure(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerErro
const full = tree.assignDestructure(node);
if (full.comptime_token != null and gz.is_comptime) {
return astgen.failNode(node, "redundant comptime keyword in already comptime scope", .{});
return astgen.appendErrorNode(node, "redundant comptime keyword in already comptime scope", .{});
}
// If this expression is marked comptime, we must wrap the whole thing in a comptime block.
@ -3562,7 +3567,7 @@ fn assignDestructureMaybeDecls(
const full = tree.assignDestructure(node);
if (full.comptime_token != null and gz.is_comptime) {
return astgen.failNode(node, "redundant comptime keyword in already comptime scope", .{});
try astgen.appendErrorNode(node, "redundant comptime keyword in already comptime scope", .{});
}
const is_comptime = full.comptime_token != null or gz.is_comptime;
@ -3676,6 +3681,7 @@ fn assignDestructureMaybeDecls(
if (full.comptime_token != null and !any_non_const_variables) {
try astgen.appendErrorTok(full.comptime_token.?, "'comptime const' is redundant; instead wrap the initialization expression with 'comptime'", .{});
// Note that this is non-fatal; we will still evaluate at comptime.
}
// If this expression is marked comptime, we must wrap it in a comptime block.
@ -4125,8 +4131,8 @@ fn fnDecl(
// The source slice is added towards the *end* of this function.
astgen.src_hasher.update(std.mem.asBytes(&astgen.source_column));
// missing function name already happened in scanContainer()
const fn_name_token = fn_proto.name_token orelse return error.AnalysisFail;
// missing function name already checked in scanContainer()
const fn_name_token = fn_proto.name_token.?;
// We insert this at the beginning so that its instruction index marks the
// start of the top level declaration.
@ -5167,8 +5173,7 @@ fn structDeclInner(
if (is_comptime) {
switch (layout) {
.@"packed" => return astgen.failTok(member.comptime_token.?, "packed struct fields cannot be marked comptime", .{}),
.@"extern" => return astgen.failTok(member.comptime_token.?, "extern struct fields cannot be marked comptime", .{}),
.@"packed", .@"extern" => return astgen.failTok(member.comptime_token.?, "{s} struct fields cannot be marked comptime", .{@tagName(layout)}),
.auto => any_comptime_fields = true,
}
} else {
@ -5195,7 +5200,7 @@ fn structDeclInner(
if (have_align) {
if (layout == .@"packed") {
try astgen.appendErrorNode(member.ast.align_expr, "unable to override alignment of packed struct fields", .{});
return astgen.failNode(member.ast.align_expr, "unable to override alignment of packed struct fields", .{});
}
any_aligned_fields = true;
const align_ref = try expr(&block_scope, &namespace.base, coerced_align_ri, member.ast.align_expr);
@ -5289,8 +5294,7 @@ fn tupleDecl(
switch (layout) {
.auto => {},
.@"extern" => return astgen.failNode(node, "extern tuples are not supported", .{}),
.@"packed" => return astgen.failNode(node, "packed tuples are not supported", .{}),
.@"extern", .@"packed" => return astgen.failNode(node, "{s} tuples are not supported", .{@tagName(layout)}),
}
if (backing_int_node != 0) {
@ -5673,7 +5677,7 @@ fn containerDecl(
};
};
if (counts.nonexhaustive_node != 0 and container_decl.ast.arg == 0) {
try astgen.appendErrorNodeNotes(
return astgen.failNodeNotes(
node,
"non-exhaustive enum missing integer tag type",
.{},
@ -5896,9 +5900,19 @@ fn containerMember(
const full = tree.fullFnProto(&buf, member_node).?;
const body = if (node_tags[member_node] == .fn_decl) node_datas[member_node].rhs else 0;
const prev_decl_index = wip_members.decl_index;
astgen.fnDecl(gz, scope, wip_members, member_node, body, full) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => {},
error.AnalysisFail => {
wip_members.decl_index = prev_decl_index;
try addFailedDeclaration(
wip_members,
gz,
.{ .named = full.name_token.? },
full.ast.proto_node,
full.visib_token != null,
);
},
};
},
@ -5907,28 +5921,77 @@ fn containerMember(
.simple_var_decl,
.aligned_var_decl,
=> {
astgen.globalVarDecl(gz, scope, wip_members, member_node, tree.fullVarDecl(member_node).?) catch |err| switch (err) {
const full = tree.fullVarDecl(member_node).?;
const prev_decl_index = wip_members.decl_index;
astgen.globalVarDecl(gz, scope, wip_members, member_node, full) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => {},
error.AnalysisFail => {
wip_members.decl_index = prev_decl_index;
try addFailedDeclaration(
wip_members,
gz,
.{ .named = full.ast.mut_token + 1 },
member_node,
full.visib_token != null,
);
},
};
},
.@"comptime" => {
const prev_decl_index = wip_members.decl_index;
astgen.comptimeDecl(gz, scope, wip_members, member_node) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => {},
error.AnalysisFail => {
wip_members.decl_index = prev_decl_index;
try addFailedDeclaration(
wip_members,
gz,
.@"comptime",
member_node,
false,
);
},
};
},
.@"usingnamespace" => {
const prev_decl_index = wip_members.decl_index;
astgen.usingnamespaceDecl(gz, scope, wip_members, member_node) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => {},
error.AnalysisFail => {
wip_members.decl_index = prev_decl_index;
try addFailedDeclaration(
wip_members,
gz,
.@"usingnamespace",
member_node,
is_pub: {
const main_tokens = tree.nodes.items(.main_token);
const token_tags = tree.tokens.items(.tag);
const main_token = main_tokens[member_node];
break :is_pub main_token > 0 and token_tags[main_token - 1] == .keyword_pub;
},
);
},
};
},
.test_decl => {
const prev_decl_index = wip_members.decl_index;
// We need to have *some* decl here so that the decl count matches what's expected.
// Since it doesn't strictly matter *what* this is, let's save ourselves the trouble
// of duplicating the test name logic, and just assume this is an unnamed test.
astgen.testDecl(gz, scope, wip_members, member_node) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => {},
error.AnalysisFail => {
wip_members.decl_index = prev_decl_index;
try addFailedDeclaration(
wip_members,
gz,
.unnamed_test,
member_node,
false,
);
},
};
},
else => unreachable,
@ -6140,7 +6203,7 @@ fn orelseCatchExpr(
const payload = payload_token orelse break :blk &else_scope.base;
const err_str = tree.tokenSlice(payload);
if (mem.eql(u8, err_str, "_")) {
return astgen.failTok(payload, "discard of error capture; omit it instead", .{});
try astgen.appendErrorTok(payload, "discard of error capture; omit it instead", .{});
}
const err_name = try astgen.identAsString(payload);
@ -6599,7 +6662,7 @@ fn whileExpr(
const is_inline = while_full.inline_token != null;
if (parent_gz.is_comptime and is_inline) {
return astgen.failTok(while_full.inline_token.?, "redundant inline keyword in comptime scope", .{});
try astgen.appendErrorTok(while_full.inline_token.?, "redundant inline keyword in comptime scope", .{});
}
const loop_tag: Zir.Inst.Tag = if (is_inline) .block_inline else .loop;
const loop_block = try parent_gz.makeBlockInst(loop_tag, node);
@ -6889,7 +6952,7 @@ fn forExpr(
const is_inline = for_full.inline_token != null;
if (parent_gz.is_comptime and is_inline) {
return astgen.failTok(for_full.inline_token.?, "redundant inline keyword in comptime scope", .{});
try astgen.appendErrorTok(for_full.inline_token.?, "redundant inline keyword in comptime scope", .{});
}
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
@ -6950,7 +7013,7 @@ fn forExpr(
.none;
if (end_val == .none and is_discard) {
return astgen.failTok(ident_tok, "discard of unbounded counter", .{});
try astgen.appendErrorTok(ident_tok, "discard of unbounded counter", .{});
}
const start_is_zero = nodeIsTriviallyZero(tree, start_node);
@ -7467,6 +7530,7 @@ fn switchExprErrUnion(
const err_name = blk: {
const err_str = tree.tokenSlice(error_payload);
if (mem.eql(u8, err_str, "_")) {
// This is fatal because we already know we're switching on the captured error.
return astgen.failTok(error_payload, "discard of error capture; omit it instead", .{});
}
const err_name = try astgen.identAsString(error_payload);
@ -7521,7 +7585,7 @@ fn switchExprErrUnion(
const capture_slice = tree.tokenSlice(capture_token);
if (mem.eql(u8, capture_slice, "_")) {
return astgen.failTok(capture_token, "discard of error capture; omit it instead", .{});
try astgen.appendErrorTok(capture_token, "discard of error capture; omit it instead", .{});
}
const tag_name = try astgen.identAsString(capture_token);
try astgen.detectLocalShadowing(&case_scope.base, tag_name, capture_token, capture_slice, .capture);
@ -8018,7 +8082,7 @@ fn switchExpr(
break :blk payload_sub_scope;
const tag_slice = tree.tokenSlice(tag_token);
if (mem.eql(u8, tag_slice, "_")) {
return astgen.failTok(tag_token, "discard of tag capture; omit it instead", .{});
try astgen.appendErrorTok(tag_token, "discard of tag capture; omit it instead", .{});
} else if (case.inline_token == null) {
return astgen.failTok(tag_token, "tag capture on non-inline prong", .{});
}
@ -13699,6 +13763,8 @@ fn scanContainer(
const main_tokens = tree.nodes.items(.main_token);
const token_tags = tree.tokens.items(.tag);
var any_invalid_declarations = false;
// This type forms a linked list of source tokens declaring the same name.
const NameEntry = struct {
tok: Ast.TokenIndex,
@ -13758,6 +13824,7 @@ fn scanContainer(
const ident = main_tokens[member_node] + 1;
if (token_tags[ident] != .identifier) {
try astgen.appendErrorNode(member_node, "missing function name", .{});
any_invalid_declarations = true;
continue;
}
break :blk .{ .decl, ident };
@ -13853,6 +13920,7 @@ fn scanContainer(
token_bytes,
}),
});
any_invalid_declarations = true;
continue;
}
@ -13870,6 +13938,7 @@ fn scanContainer(
.{},
),
});
any_invalid_declarations = true;
break;
}
s = local_val.parent;
@ -13886,6 +13955,7 @@ fn scanContainer(
.{},
),
});
any_invalid_declarations = true;
break;
}
s = local_ptr.parent;
@ -13897,7 +13967,10 @@ fn scanContainer(
};
}
if (!any_duplicates) return decl_count;
if (!any_duplicates) {
if (any_invalid_declarations) return error.AnalysisFail;
return decl_count;
}
for (names.keys(), names.values()) |name, first| {
if (first.next == null) continue;
@ -13909,6 +13982,7 @@ fn scanContainer(
try notes.append(astgen.arena, try astgen.errNoteNode(namespace.node, "{s} declared here", .{@tagName(container_kind)}));
const name_duped = try astgen.arena.dupe(u8, mem.span(astgen.nullTerminatedString(name)));
try astgen.appendErrorTokNotes(first.tok, "duplicate {s} member name '{s}'", .{ @tagName(container_kind), name_duped }, notes.items);
any_invalid_declarations = true;
}
for (test_names.keys(), test_names.values()) |name, first| {
@ -13921,6 +13995,7 @@ fn scanContainer(
try notes.append(astgen.arena, try astgen.errNoteNode(namespace.node, "{s} declared here", .{@tagName(container_kind)}));
const name_duped = try astgen.arena.dupe(u8, mem.span(astgen.nullTerminatedString(name)));
try astgen.appendErrorTokNotes(first.tok, "duplicate test name '{s}'", .{name_duped}, notes.items);
any_invalid_declarations = true;
}
for (decltest_names.keys(), decltest_names.values()) |name, first| {
@ -13933,9 +14008,11 @@ fn scanContainer(
try notes.append(astgen.arena, try astgen.errNoteNode(namespace.node, "{s} declared here", .{@tagName(container_kind)}));
const name_duped = try astgen.arena.dupe(u8, mem.span(astgen.nullTerminatedString(name)));
try astgen.appendErrorTokNotes(first.tok, "duplicate decltest '{s}'", .{name_duped}, notes.items);
any_invalid_declarations = true;
}
return decl_count;
assert(any_invalid_declarations);
return error.AnalysisFail;
}
fn isInferred(astgen: *AstGen, ref: Zir.Inst.Ref) bool {
@ -14083,6 +14160,37 @@ const DeclarationName = union(enum) {
@"usingnamespace",
};
fn addFailedDeclaration(
wip_members: *WipMembers,
gz: *GenZir,
name: DeclarationName,
src_node: Ast.Node.Index,
is_pub: bool,
) !void {
const decl_inst = try gz.makeDeclaration(src_node);
wip_members.nextDecl(decl_inst);
var decl_gz = gz.makeSubBlock(&gz.base); // scope doesn't matter here
_ = try decl_gz.add(.{
.tag = .extended,
.data = .{ .extended = .{
.opcode = .astgen_error,
.small = undefined,
.operand = undefined,
} },
});
try setDeclaration(
decl_inst,
@splat(0), // use a fixed hash to represent an AstGen failure; we don't care about source changes if AstGen still failed!
name,
gz.astgen.source_line,
is_pub,
false, // we don't care about exports since semantic analysis will fail
.empty,
&decl_gz,
null,
);
}
/// Sets all extra data for a `declaration` instruction.
/// Unstacks `value_gz`, `align_gz`, `linksection_gz`, and `addrspace_gz`.
fn setDeclaration(

View File

@ -120,7 +120,21 @@ pub fn bodySlice(zir: Zir, start: usize, len: usize) []Inst.Index {
}
pub fn hasCompileErrors(code: Zir) bool {
return code.extra[@intFromEnum(ExtraIndex.compile_errors)] != 0;
if (code.extra[@intFromEnum(ExtraIndex.compile_errors)] != 0) {
return true;
} else {
assert(code.instructions.len != 0); // i.e. lowering did not fail
return false;
}
}
pub fn loweringFailed(code: Zir) bool {
if (code.instructions.len == 0) {
assert(code.hasCompileErrors());
return true;
} else {
return false;
}
}
pub fn deinit(code: *Zir, gpa: Allocator) void {
@ -2089,7 +2103,14 @@ pub const Inst = struct {
/// `small` is an `Inst.InplaceOp`.
inplace_arith_result_ty,
/// Marks a statement that can be stepped to but produces no code.
/// `operand` and `small` are ignored.
dbg_empty_stmt,
/// At this point, AstGen encountered a fatal error which terminated ZIR lowering for this body.
/// A file-level error has been reported. Sema should terminate semantic analysis.
/// `operand` and `small` are ignored.
/// This instruction is always `noreturn`, however, it is not considered as such by ZIR-level queries. This allows AstGen to assume that
/// any code may have gone here, avoiding false-positive "unreachable code" errors.
astgen_error,
pub const InstData = struct {
opcode: Extended,
@ -4065,6 +4086,7 @@ fn findDeclsInner(
.inplace_arith_result_ty,
.tuple_decl,
.dbg_empty_stmt,
.astgen_error,
=> return,
// `@TypeOf` has a body.

View File

@ -1360,6 +1360,7 @@ fn analyzeBodyInner(
i += 1;
continue;
},
.astgen_error => return error.AnalysisFail,
};
},

View File

@ -185,11 +185,11 @@ pub fn astGenFile(
log.debug("AstGen cached success: {s}", .{file.sub_file_path});
if (file.zir.hasCompileErrors()) {
{
comp.mutex.lock();
defer comp.mutex.unlock();
try zcu.failed_files.putNoClobber(gpa, file, null);
}
comp.mutex.lock();
defer comp.mutex.unlock();
try zcu.failed_files.putNoClobber(gpa, file, null);
}
if (file.zir.loweringFailed()) {
file.status = .astgen_failure;
return error.AnalysisFail;
}
@ -226,7 +226,7 @@ pub fn astGenFile(
// single-threaded context, so we need to keep both versions around
// until that point in the pipeline. Previous ZIR data is freed after
// that.
if (file.zir_loaded and !file.zir.hasCompileErrors()) {
if (file.zir_loaded and !file.zir.loweringFailed()) {
assert(file.prev_zir == null);
const prev_zir_ptr = try gpa.create(Zir);
file.prev_zir = prev_zir_ptr;
@ -321,11 +321,11 @@ pub fn astGenFile(
};
if (file.zir.hasCompileErrors()) {
{
comp.mutex.lock();
defer comp.mutex.unlock();
try zcu.failed_files.putNoClobber(gpa, file, null);
}
comp.mutex.lock();
defer comp.mutex.unlock();
try zcu.failed_files.putNoClobber(gpa, file, null);
}
if (file.zir.loweringFailed()) {
file.status = .astgen_failure;
return error.AnalysisFail;
}
@ -363,7 +363,7 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
.file = file,
.inst_map = .{},
};
if (!new_zir.hasCompileErrors()) {
if (!new_zir.loweringFailed()) {
try Zcu.mapOldZirToNew(gpa, old_zir.*, file.zir, &gop.value_ptr.inst_map);
}
}
@ -379,20 +379,19 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
const file = updated_file.file;
if (file.zir.hasCompileErrors()) {
// If we mark this as outdated now, users of this inst will just get a transitive analysis failure.
// Ultimately, they would end up throwing out potentially useful analysis results.
// So, do nothing. We already have the file failure -- that's sufficient for now!
continue;
}
const old_inst = tracked_inst.inst.unwrap() orelse continue; // we can't continue tracking lost insts
const tracked_inst_index = (InternPool.TrackedInst.Index.Unwrapped{
.tid = @enumFromInt(tid),
.index = @intCast(tracked_inst_unwrapped_index),
}).wrap(ip);
const new_inst = updated_file.inst_map.get(old_inst) orelse {
// Tracking failed for this instruction. Invalidate associated `src_hash` deps.
log.debug("tracking failed for %{d}", .{old_inst});
// Tracking failed for this instruction.
// This may be due to changes in the ZIR, or AstGen might have failed due to a very broken file.
// Either way, invalidate associated `src_hash` deps.
log.debug("tracking failed for %{d}{s}", .{
old_inst,
if (file.zir.loweringFailed()) " due to AstGen failure" else "",
});
tracked_inst.inst = .lost;
try zcu.markDependeeOutdated(.not_marked_po, .{ .src_hash = tracked_inst_index });
continue;
@ -494,8 +493,8 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
for (updated_files.keys(), updated_files.values()) |file_index, updated_file| {
const file = updated_file.file;
if (file.zir.hasCompileErrors()) {
// Keep `prev_zir` around: it's the last non-error ZIR.
if (file.zir.loweringFailed()) {
// Keep `prev_zir` around: it's the last usable ZIR.
// Don't update the namespace, as we have no new data to update *to*.
} else {
const prev_zir = file.prev_zir.?;

View File

@ -6457,7 +6457,7 @@ fn cmdChangelist(
file.zir_loaded = true;
defer file.zir.deinit(gpa);
if (file.zir.hasCompileErrors()) {
if (file.zir.loweringFailed()) {
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
@ -6492,7 +6492,7 @@ fn cmdChangelist(
file.zir = try AstGen.generate(gpa, new_tree);
file.zir_loaded = true;
if (file.zir.hasCompileErrors()) {
if (file.zir.loweringFailed()) {
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();

View File

@ -623,6 +623,7 @@ const Writer = struct {
.inplace_arith_result_ty => try self.writeInplaceArithResultTy(stream, extended),
.dbg_empty_stmt => try stream.writeAll("))"),
.astgen_error => try stream.writeAll("))"),
}
}

View File

@ -1,11 +1,8 @@
const A = B;
test "Crash" {
pub const A = B;
export fn foo() void {
_ = @typeInfo(@This()).@"struct".decls[0];
}
// error
// backend=stage2
// target=native
// is_test=true
//
// :1:11: error: use of undeclared identifier 'B'
// :1:15: error: use of undeclared identifier 'B'

View File

@ -0,0 +1,17 @@
const a = bogus; // astgen error (undeclared identifier)
const b: u32 = "hi"; // sema error (type mismatch)
comptime {
_ = b;
@compileError("not hit because 'b' failed");
}
comptime {
@compileError("this should be hit");
}
// error
//
// :1:11: error: use of undeclared identifier 'bogus'
// :2:16: error: expected type 'u32', found '*const [2:0]u8'
// :10:5: error: this should be hit

View File

@ -1,13 +1,8 @@
fn func() bogus {}
fn func() bogus {}
export fn entry() usize {
return @sizeOf(@TypeOf(func));
}
fn func() void {}
fn func() void {}
// error
//
// :1:4: error: duplicate struct member name 'func'
// :2:4: note: duplicate name here
// :1:1: note: struct declared here
// :1:11: error: use of undeclared identifier 'bogus'
// :2:11: error: use of undeclared identifier 'bogus'

View File

@ -19,3 +19,5 @@ export fn entry() void {
//
// :4:5: error: unreachable code
// :4:25: note: control flow is diverted here
// :4:25: error: aoeu
// :1:36: note: called from here

View File

@ -1,22 +1,27 @@
comptime {
const a = "foo";
if (a == "foo") unreachable;
if (a != "foo") unreachable;
}
comptime {
const a = "foo";
if (a == ("foo")) unreachable; // intentionally allow
if (a == "foo") {} else unreachable;
}
comptime {
const a = "foo";
if (a != ("foo")) {} // intentionally allow
if (a == ("foo")) {} // intentionally allow
}
comptime {
const a = "foo";
switch (a) {
"foo" => unreachable,
else => {},
"foo" => {},
else => unreachable,
}
}
comptime {
const a = "foo";
switch (a) {
("foo") => unreachable, // intentionally allow
("foo") => {}, // intentionally allow
else => {},
}
}
@ -25,5 +30,6 @@ comptime {
// backend=stage2
// target=native
//
// :3:11: error: cannot compare strings with ==
// :12:9: error: cannot switch on strings
// :3:11: error: cannot compare strings with !=
// :7:11: error: cannot compare strings with ==
// :17:9: error: cannot switch on strings

View File

@ -1,6 +1,6 @@
export fn foo() void {
const a = 1;
struct {
_ = struct {
test a {}
};
}

View File

@ -28,10 +28,6 @@ fn foo() void {
_ = jd;
}
export fn entry() usize {
return @sizeOf(@TypeOf(foo));
}
// error
// backend=stage2
// target=native

View File

@ -7,7 +7,7 @@ export fn entry2() void {
@panic("");
}
export fn entry3() void {
@compileError("");
@compileError("expect to hit this");
@compileError("");
}
@ -21,3 +21,4 @@ export fn entry3() void {
// :6:5: note: control flow is diverted here
// :11:5: error: unreachable code
// :10:5: note: control flow is diverted here
// :10:5: error: expect to hit this

View File

@ -2,14 +2,8 @@
fn entry() void {}
fn entry() void {}
fn foo() void {
var foo = 1234;
}
// error
//
// :2:4: error: duplicate struct member name 'entry'
// :3:4: note: duplicate name here
// :2:1: note: struct declared here
// :6:9: error: local variable shadows declaration of 'foo'
// :5:1: note: declared here

View File

@ -1,6 +1,6 @@
pub fn main() void {
const x = 1;
const y, var z = .{ 2, 3 };
const y, var z: u32 = .{ 2, 3 };
}
// error