incremental: introduce file dependencies to handle AstGen failures

The re-analysis here is a little coarse; it'd be nice in the future to
have a way for an AstGen failure to preserve *all* analysis which
depends on the last success, and just hide the compile errors which
depend on it somehow. But I'm not sure how we'd achieve that, so this
works fine for now.

Resolves: #21223
This commit is contained in:
mlugg 2024-10-16 15:56:48 +01:00
parent c6842b58d4
commit 22539783ad
No known key found for this signature in database
GPG Key ID: 3F5B7DCCBF4AF02E
8 changed files with 97 additions and 23 deletions

View File

@ -2901,6 +2901,7 @@ pub fn makeBinFileWritable(comp: *Compilation) !void {
const Header = extern struct {
intern_pool: extern struct {
thread_count: u32,
file_deps_len: u32,
src_hash_deps_len: u32,
nav_val_deps_len: u32,
namespace_deps_len: u32,
@ -2943,6 +2944,7 @@ pub fn saveState(comp: *Compilation) !void {
const header: Header = .{
.intern_pool = .{
.thread_count = @intCast(ip.locals.len),
.file_deps_len = @intCast(ip.file_deps.count()),
.src_hash_deps_len = @intCast(ip.src_hash_deps.count()),
.nav_val_deps_len = @intCast(ip.nav_val_deps.count()),
.namespace_deps_len = @intCast(ip.namespace_deps.count()),
@ -2969,6 +2971,8 @@ pub fn saveState(comp: *Compilation) !void {
addBuf(&bufs, mem.asBytes(&header));
addBuf(&bufs, mem.sliceAsBytes(pt_headers.items));
addBuf(&bufs, mem.sliceAsBytes(ip.file_deps.keys()));
addBuf(&bufs, mem.sliceAsBytes(ip.file_deps.values()));
addBuf(&bufs, mem.sliceAsBytes(ip.src_hash_deps.keys()));
addBuf(&bufs, mem.sliceAsBytes(ip.src_hash_deps.values()));
addBuf(&bufs, mem.sliceAsBytes(ip.nav_val_deps.keys()));

View File

@ -17,6 +17,13 @@ tid_shift_31: if (single_threaded) u0 else std.math.Log2Int(u32),
/// Cached shift amount to put a `tid` in the top bits of a 32-bit value.
tid_shift_32: if (single_threaded) u0 else std.math.Log2Int(u32),
/// Dependencies on whether an entire file gets past AstGen.
/// These are triggered by `@import`, so that:
/// * if a file initially fails AstGen, triggering a transitive failure, when a future update
/// causes it to succeed AstGen, the `@import` is re-analyzed, allowing analysis to proceed
/// * if a file initially succeds AstGen, but a future update causes the file to fail it,
/// the `@import` is re-analyzed, registering a transitive failure
file_deps: std.AutoArrayHashMapUnmanaged(FileIndex, DepEntry.Index),
/// Dependencies on the source code hash associated with a ZIR instruction.
/// * For a `declaration`, this is the entire declaration body.
/// * For a `struct_decl`, `union_decl`, etc, this is the source of the fields (but not declarations).
@ -70,6 +77,7 @@ pub const empty: InternPool = .{
.tid_shift_30 = if (single_threaded) 0 else 31,
.tid_shift_31 = if (single_threaded) 0 else 31,
.tid_shift_32 = if (single_threaded) 0 else 31,
.file_deps = .empty,
.src_hash_deps = .empty,
.nav_val_deps = .empty,
.interned_deps = .empty,
@ -656,6 +664,7 @@ pub const Nav = struct {
};
pub const Dependee = union(enum) {
file: FileIndex,
src_hash: TrackedInst.Index,
nav_val: Nav.Index,
interned: Index,
@ -704,6 +713,7 @@ pub const DependencyIterator = struct {
pub fn dependencyIterator(ip: *const InternPool, dependee: Dependee) DependencyIterator {
const first_entry = switch (dependee) {
.file => |x| ip.file_deps.get(x),
.src_hash => |x| ip.src_hash_deps.get(x),
.nav_val => |x| ip.nav_val_deps.get(x),
.interned => |x| ip.interned_deps.get(x),
@ -740,6 +750,7 @@ pub fn addDependency(ip: *InternPool, gpa: Allocator, depender: AnalUnit, depend
const new_index: DepEntry.Index = switch (dependee) {
inline else => |dependee_payload, tag| new_index: {
const gop = try switch (tag) {
.file => ip.file_deps,
.src_hash => ip.src_hash_deps,
.nav_val => ip.nav_val_deps,
.interned => ip.interned_deps,
@ -6268,6 +6279,7 @@ pub fn init(ip: *InternPool, gpa: Allocator, available_threads: usize) !void {
}
pub fn deinit(ip: *InternPool, gpa: Allocator) void {
ip.file_deps.deinit(gpa);
ip.src_hash_deps.deinit(gpa);
ip.nav_val_deps.deinit(gpa);
ip.interned_deps.deinit(gpa);

View File

@ -454,6 +454,7 @@ pub fn create(arena: Allocator, options: CreateOptions) !*Package.Module {
.tree = undefined,
.zir = undefined,
.status = .never_loaded,
.prev_status = .never_loaded,
.mod = new,
};
break :b new;

View File

@ -6024,9 +6024,7 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
pt.astGenFile(result.file, path_digest) catch |err|
return sema.fail(&child_block, src, "C import failed: {s}", .{@errorName(err)});
// TODO: register some kind of dependency on the file.
// That way, if this returns `error.AnalysisFail`, we have the dependency banked ready to
// trigger re-analysis later.
try sema.declareDependency(.{ .file = result.file_index });
try pt.ensureFileAnalyzed(result.file_index);
const ty = zcu.fileRootType(result.file_index);
try sema.declareDependency(.{ .interned = ty });
@ -14347,9 +14345,7 @@ fn zirImport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
return sema.fail(block, operand_src, "unable to open '{s}': {s}", .{ operand, @errorName(err) });
},
};
// TODO: register some kind of dependency on the file.
// That way, if this returns `error.AnalysisFail`, we have the dependency banked ready to
// trigger re-analysis later.
try sema.declareDependency(.{ .file = result.file_index });
try pt.ensureFileAnalyzed(result.file_index);
const ty = zcu.fileRootType(result.file_index);
try sema.declareDependency(.{ .interned = ty });

View File

@ -424,13 +424,8 @@ pub const Namespace = struct {
};
pub const File = struct {
status: enum {
never_loaded,
retryable_failure,
parse_failure,
astgen_failure,
success_zir,
},
status: Status,
prev_status: Status,
source_loaded: bool,
tree_loaded: bool,
zir_loaded: bool,
@ -458,6 +453,14 @@ pub const File = struct {
/// successful, this field is unloaded.
prev_zir: ?*Zir = null,
pub const Status = enum {
never_loaded,
retryable_failure,
parse_failure,
astgen_failure,
success_zir,
};
/// A single reference to a file.
pub const Reference = union(enum) {
/// The file is imported directly (i.e. not as a package) with @import.
@ -3474,6 +3477,10 @@ fn formatDependee(data: struct { dependee: InternPool.Dependee, zcu: *Zcu }, com
const zcu = data.zcu;
const ip = &zcu.intern_pool;
switch (data.dependee) {
.file => |file| {
const file_path = zcu.fileByIndex(file).sub_file_path;
return writer.print("file('{s}')", .{file_path});
},
.src_hash => |ti| {
const info = ti.resolveFull(ip) orelse {
return writer.writeAll("inst(<lost>)");

View File

@ -179,10 +179,10 @@ pub fn astGenFile(
.inode = header.stat_inode,
.mtime = header.stat_mtime,
};
file.prev_status = file.status;
file.status = .success_zir;
log.debug("AstGen cached success: {s}", .{file.sub_file_path});
// TODO don't report compile errors until Sema @importFile
if (file.zir.hasCompileErrors()) {
{
comp.mutex.lock();
@ -258,6 +258,7 @@ pub fn astGenFile(
// Any potential AST errors are converted to ZIR errors here.
file.zir = try AstGen.generate(gpa, file.tree);
file.zir_loaded = true;
file.prev_status = file.status;
file.status = .success_zir;
log.debug("AstGen fresh success: {s}", .{file.sub_file_path});
@ -350,6 +351,9 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
defer cleanupUpdatedFiles(gpa, &updated_files);
for (zcu.import_table.values()) |file_index| {
const file = zcu.fileByIndex(file_index);
if (file.prev_status != file.status and file.prev_status != .never_loaded) {
try zcu.markDependeeOutdated(.not_marked_po, .{ .file = file_index });
}
const old_zir = file.prev_zir orelse continue;
const new_zir = file.zir;
const gop = try updated_files.getOrPut(gpa, file_index);
@ -551,11 +555,13 @@ pub fn ensureCauAnalyzed(pt: Zcu.PerThread, cau_index: InternPool.Cau.Index) Zcu
const cau_outdated = zcu.outdated.swapRemove(anal_unit) or
zcu.potentially_outdated.swapRemove(anal_unit);
const prev_failed = zcu.failed_analysis.contains(anal_unit) or zcu.transitive_failed_analysis.contains(anal_unit);
if (cau_outdated) {
_ = zcu.outdated_ready.swapRemove(anal_unit);
} else {
// We can trust the current information about this `Cau`.
if (zcu.failed_analysis.contains(anal_unit) or zcu.transitive_failed_analysis.contains(anal_unit)) {
if (prev_failed) {
return error.AnalysisFail;
}
// If it wasn't failed and wasn't marked outdated, then either...
@ -578,9 +584,13 @@ pub fn ensureCauAnalyzed(pt: Zcu.PerThread, cau_index: InternPool.Cau.Index) Zcu
// Since it does not, this must be a transitive failure.
try zcu.transitive_failed_analysis.put(gpa, anal_unit, {});
}
// We treat errors as up-to-date, since those uses would just trigger a transitive error.
// The exception is types, since type declarations may require re-analysis if the type, e.g. its captures, changed.
const outdated = cau.owner.unwrap() == .type;
// We consider this `Cau` to be outdated if:
// * Previous analysis succeeded; in this case, we need to re-analyze dependants to ensure
// they hit a transitive error here, rather than reporting a different error later (which
// may now be invalid).
// * The `Cau` is a type; in this case, the declaration site may require re-analysis to
// construct a valid type.
const outdated = !prev_failed or cau.owner.unwrap() == .type;
break :res .{ .{
.invalidate_decl_val = outdated,
.invalidate_decl_ref = outdated,
@ -597,10 +607,9 @@ pub fn ensureCauAnalyzed(pt: Zcu.PerThread, cau_index: InternPool.Cau.Index) Zcu
);
zcu.retryable_failures.appendAssumeCapacity(anal_unit);
zcu.failed_analysis.putAssumeCapacityNoClobber(anal_unit, msg);
// We treat errors as up-to-date, since those uses would just trigger a transitive error
break :res .{ .{
.invalidate_decl_val = false,
.invalidate_decl_ref = false,
.invalidate_decl_val = true,
.invalidate_decl_ref = true,
}, true };
},
};
@ -707,11 +716,13 @@ pub fn ensureFuncBodyAnalyzed(pt: Zcu.PerThread, maybe_coerced_func_index: Inter
const func_outdated = zcu.outdated.swapRemove(anal_unit) or
zcu.potentially_outdated.swapRemove(anal_unit);
const prev_failed = zcu.failed_analysis.contains(anal_unit) or zcu.transitive_failed_analysis.contains(anal_unit);
if (func_outdated) {
_ = zcu.outdated_ready.swapRemove(anal_unit);
} else {
// We can trust the current information about this function.
if (zcu.failed_analysis.contains(anal_unit) or zcu.transitive_failed_analysis.contains(anal_unit)) {
if (prev_failed) {
return error.AnalysisFail;
}
switch (func.analysisUnordered(ip).state) {
@ -730,7 +741,10 @@ pub fn ensureFuncBodyAnalyzed(pt: Zcu.PerThread, maybe_coerced_func_index: Inter
// Since it does not, this must be a transitive failure.
try zcu.transitive_failed_analysis.put(gpa, anal_unit, {});
}
break :res .{ false, true }; // we treat errors as up-to-date IES, since those uses would just trigger a transitive error
// We consider the IES to be outdated if the function previously succeeded analysis; in this case,
// we need to re-analyze dependants to ensure they hit a transitive error here, rather than reporting
// a different error later (which may now be invalid).
break :res .{ !prev_failed, true };
},
error.OutOfMemory => return error.OutOfMemory, // TODO: graceful handling like `ensureCauAnalyzed`
};
@ -1445,6 +1459,7 @@ pub fn importPkg(pt: Zcu.PerThread, mod: *Module) !Zcu.ImportFileResult {
.tree = undefined,
.zir = undefined,
.status = .never_loaded,
.prev_status = .never_loaded,
.mod = mod,
};
@ -1555,6 +1570,7 @@ pub fn importFile(
.tree = undefined,
.zir = undefined,
.status = .never_loaded,
.prev_status = .never_loaded,
.mod = mod,
};

View File

@ -6118,6 +6118,7 @@ fn cmdAstCheck(
var file: Zcu.File = .{
.status = .never_loaded,
.prev_status = .never_loaded,
.source_loaded = false,
.tree_loaded = false,
.zir_loaded = false,
@ -6441,6 +6442,7 @@ fn cmdDumpZir(
var file: Zcu.File = .{
.status = .never_loaded,
.prev_status = .never_loaded,
.source_loaded = false,
.tree_loaded = false,
.zir_loaded = true,
@ -6508,6 +6510,7 @@ fn cmdChangelist(
var file: Zcu.File = .{
.status = .never_loaded,
.prev_status = .never_loaded,
.source_loaded = false,
.tree_loaded = false,
.zir_loaded = false,

View File

@ -0,0 +1,35 @@
#target=x86_64-linux-selfhosted
#target=x86_64-linux-cbe
#target=x86_64-windows-cbe
#update=initial version with error
#file=main.zig
pub fn main() !void {
try @import("foo.zig").hello();
}
#file=foo.zig
pub fn hello() !void {
try std.io.getStdOut().writeAll("Hello, World!\n");
}
#expect_error=ignored
#update=fix the error
#file=foo.zig
const std = @import("std");
pub fn hello() !void {
try std.io.getStdOut().writeAll("Hello, World!\n");
}
#expect_stdout="Hello, World!\n"
#update=add new error
#file=foo.zig
const std = @import("std");
pub fn hello() !void {
try std.io.getStdOut().writeAll(hello_str);
}
#expect_error=ignored
#update=fix the new error
#file=foo.zig
const std = @import("std");
const hello_str = "Hello, World! Again!\n";
pub fn hello() !void {
try std.io.getStdOut().writeAll(hello_str);
}
#expect_stdout="Hello, World! Again!\n"