mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
incremental: fix errors not being deleted upon re-analysis
Previously, logic in `Compilation.getAllErrorsAlloc` was corrupting the `failed_analysis` hashmap. This meant that on updates after the initial update, attempts to remove entries from this map (because the `AnalUnit` in question is being re-analyzed) silently failed. This resulted in compile errors from earlier updates wrongly getting "stuck", i.e. never being removed. This commit also adds a few log calls which helped me to find this bug.
This commit is contained in:
parent
ba78d79228
commit
f0d5e0df4d
@ -3102,9 +3102,10 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
||||
for (zcu.failed_embed_files.values()) |error_msg| {
|
||||
try addModuleErrorMsg(zcu, &bundle, error_msg.*);
|
||||
}
|
||||
{
|
||||
var sorted_failed_analysis: std.AutoArrayHashMapUnmanaged(InternPool.AnalUnit, *Zcu.ErrorMsg).DataList.Slice = s: {
|
||||
const SortOrder = struct {
|
||||
zcu: *Zcu,
|
||||
errors: []const *Zcu.ErrorMsg,
|
||||
err: *?Error,
|
||||
|
||||
const Error = @typeInfo(
|
||||
@ -3113,12 +3114,11 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
||||
|
||||
pub fn lessThan(ctx: @This(), lhs_index: usize, rhs_index: usize) bool {
|
||||
if (ctx.err.*) |_| return lhs_index < rhs_index;
|
||||
const errors = ctx.zcu.failed_analysis.values();
|
||||
const lhs_src_loc = errors[lhs_index].src_loc.upgradeOrLost(ctx.zcu) orelse {
|
||||
const lhs_src_loc = ctx.errors[lhs_index].src_loc.upgradeOrLost(ctx.zcu) orelse {
|
||||
// LHS source location lost, so should never be referenced. Just sort it to the end.
|
||||
return false;
|
||||
};
|
||||
const rhs_src_loc = errors[rhs_index].src_loc.upgradeOrLost(ctx.zcu) orelse {
|
||||
const rhs_src_loc = ctx.errors[rhs_index].src_loc.upgradeOrLost(ctx.zcu) orelse {
|
||||
// RHS source location lost, so should never be referenced. Just sort it to the end.
|
||||
return true;
|
||||
};
|
||||
@ -3135,13 +3135,24 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
||||
}).main;
|
||||
}
|
||||
};
|
||||
|
||||
// We can't directly sort `zcu.failed_analysis.entries`, because that would leave the map
|
||||
// in an invalid state, and we need it intact for future incremental updates. The amount
|
||||
// of data here is only as large as the number of analysis errors, so just dupe it all.
|
||||
var entries = try zcu.failed_analysis.entries.clone(gpa);
|
||||
errdefer entries.deinit(gpa);
|
||||
|
||||
var err: ?SortOrder.Error = null;
|
||||
// This leaves `zcu.failed_analysis` an invalid state, but we do not
|
||||
// need lookups anymore anyway.
|
||||
zcu.failed_analysis.entries.sort(SortOrder{ .zcu = zcu, .err = &err });
|
||||
entries.sort(SortOrder{
|
||||
.zcu = zcu,
|
||||
.errors = entries.items(.value),
|
||||
.err = &err,
|
||||
});
|
||||
if (err) |e| return e;
|
||||
}
|
||||
for (zcu.failed_analysis.keys(), zcu.failed_analysis.values()) |anal_unit, error_msg| {
|
||||
break :s entries.slice();
|
||||
};
|
||||
defer sorted_failed_analysis.deinit(gpa);
|
||||
for (sorted_failed_analysis.items(.key), sorted_failed_analysis.items(.value)) |anal_unit, error_msg| {
|
||||
if (comp.incremental) {
|
||||
const refs = try zcu.resolveReferences();
|
||||
if (!refs.contains(anal_unit)) continue;
|
||||
@ -3158,6 +3169,11 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
||||
// We'll try again once parsing succeeds.
|
||||
if (!zcu.fileByIndex(file_index).okToReportErrors()) continue;
|
||||
|
||||
std.log.scoped(.zcu).debug("analysis error '{s}' reported from unit '{}'", .{
|
||||
error_msg.msg,
|
||||
zcu.fmtAnalUnit(anal_unit),
|
||||
});
|
||||
|
||||
try addModuleErrorMsg(zcu, &bundle, error_msg.*);
|
||||
if (zcu.cimport_errors.get(anal_unit)) |errors| {
|
||||
for (errors.getMessages()) |err_msg_index| {
|
||||
|
||||
12
src/Zcu.zig
12
src/Zcu.zig
@ -3590,17 +3590,17 @@ fn formatAnalUnit(data: struct { unit: AnalUnit, zcu: *Zcu }, comptime fmt: []co
|
||||
const cu = ip.getComptimeUnit(cu_id);
|
||||
if (cu.zir_index.resolveFull(ip)) |resolved| {
|
||||
const file_path = zcu.fileByIndex(resolved.file).sub_file_path;
|
||||
return writer.print("comptime(inst=('{s}', %{}))", .{ file_path, @intFromEnum(resolved.inst) });
|
||||
return writer.print("comptime(inst=('{s}', %{}) [{}])", .{ file_path, @intFromEnum(resolved.inst), @intFromEnum(cu_id) });
|
||||
} else {
|
||||
return writer.writeAll("comptime(inst=<list>)");
|
||||
return writer.print("comptime(inst=<lost> [{}])", .{@intFromEnum(cu_id)});
|
||||
}
|
||||
},
|
||||
.nav_val => |nav| return writer.print("nav_val('{}')", .{ip.getNav(nav).fqn.fmt(ip)}),
|
||||
.nav_ty => |nav| return writer.print("nav_ty('{}')", .{ip.getNav(nav).fqn.fmt(ip)}),
|
||||
.type => |ty| return writer.print("ty('{}')", .{Type.fromInterned(ty).containerTypeName(ip).fmt(ip)}),
|
||||
.nav_val => |nav| return writer.print("nav_val('{}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(nav) }),
|
||||
.nav_ty => |nav| return writer.print("nav_ty('{}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(nav) }),
|
||||
.type => |ty| return writer.print("ty('{}' [{}])", .{ Type.fromInterned(ty).containerTypeName(ip).fmt(ip), @intFromEnum(ty) }),
|
||||
.func => |func| {
|
||||
const nav = zcu.funcInfo(func).owner_nav;
|
||||
return writer.print("func('{}')", .{ip.getNav(nav).fqn.fmt(ip)});
|
||||
return writer.print("func('{}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(func) });
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
52
test/incremental/fix_many_errors
Normal file
52
test/incremental/fix_many_errors
Normal file
@ -0,0 +1,52 @@
|
||||
#target=x86_64-linux-selfhosted
|
||||
#target=x86_64-linux-cbe
|
||||
#target=x86_64-windows-cbe
|
||||
#update=initial version
|
||||
#file=main.zig
|
||||
pub fn main() !void {}
|
||||
comptime { @compileError("c0"); }
|
||||
comptime { @compileError("c1"); }
|
||||
comptime { @compileError("c2"); }
|
||||
comptime { @compileError("c3"); }
|
||||
comptime { @compileError("c4"); }
|
||||
comptime { @compileError("c5"); }
|
||||
comptime { @compileError("c6"); }
|
||||
comptime { @compileError("c7"); }
|
||||
comptime { @compileError("c8"); }
|
||||
comptime { @compileError("c9"); }
|
||||
export fn f0() void { @compileError("f0"); }
|
||||
export fn f1() void { @compileError("f1"); }
|
||||
export fn f2() void { @compileError("f2"); }
|
||||
export fn f3() void { @compileError("f3"); }
|
||||
export fn f4() void { @compileError("f4"); }
|
||||
export fn f5() void { @compileError("f5"); }
|
||||
export fn f6() void { @compileError("f6"); }
|
||||
export fn f7() void { @compileError("f7"); }
|
||||
export fn f8() void { @compileError("f8"); }
|
||||
export fn f9() void { @compileError("f9"); }
|
||||
#expect_error=ignored
|
||||
#update=fix all the errors
|
||||
#file=main.zig
|
||||
pub fn main() !void {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
comptime {}
|
||||
export fn f0() void {}
|
||||
export fn f1() void {}
|
||||
export fn f2() void {}
|
||||
export fn f3() void {}
|
||||
export fn f4() void {}
|
||||
export fn f5() void {}
|
||||
export fn f6() void {}
|
||||
export fn f7() void {}
|
||||
export fn f8() void {}
|
||||
export fn f9() void {}
|
||||
const std = @import("std");
|
||||
#expect_stdout=""
|
||||
Loading…
x
Reference in New Issue
Block a user