mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
Zcu: remove *_loaded fields on File
Instead, `source`, `tree`, and `zir` should all be optional. This is precisely what we're actually trying to model here; and `File` isn't optimized for memory consumption or serializability anyway, so it's fine to use a couple of extra bytes on actual optionals here.
This commit is contained in:
parent
3a4bb47fed
commit
d3ca10d5d8
@ -264,14 +264,12 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn populateFile(comp: *Compilation, mod: *Module, file: *File) !void {
|
pub fn populateFile(comp: *Compilation, mod: *Module, file: *File) !void {
|
||||||
assert(file.source_loaded == true);
|
|
||||||
|
|
||||||
if (mod.root.statFile(mod.root_src_path)) |stat| {
|
if (mod.root.statFile(mod.root_src_path)) |stat| {
|
||||||
if (stat.size != file.source.len) {
|
if (stat.size != file.source.?.len) {
|
||||||
std.log.warn(
|
std.log.warn(
|
||||||
"the cached file '{}{s}' had the wrong size. Expected {d}, found {d}. " ++
|
"the cached file '{}{s}' had the wrong size. Expected {d}, found {d}. " ++
|
||||||
"Overwriting with correct file contents now",
|
"Overwriting with correct file contents now",
|
||||||
.{ mod.root, mod.root_src_path, file.source.len, stat.size },
|
.{ mod.root, mod.root_src_path, file.source.?.len, stat.size },
|
||||||
);
|
);
|
||||||
|
|
||||||
try writeFile(file, mod);
|
try writeFile(file, mod);
|
||||||
@ -296,15 +294,13 @@ pub fn populateFile(comp: *Compilation, mod: *Module, file: *File) !void {
|
|||||||
|
|
||||||
log.debug("parsing and generating '{s}'", .{mod.root_src_path});
|
log.debug("parsing and generating '{s}'", .{mod.root_src_path});
|
||||||
|
|
||||||
file.tree = try std.zig.Ast.parse(comp.gpa, file.source, .zig);
|
file.tree = try std.zig.Ast.parse(comp.gpa, file.source.?, .zig);
|
||||||
assert(file.tree.errors.len == 0); // builtin.zig must parse
|
assert(file.tree.?.errors.len == 0); // builtin.zig must parse
|
||||||
file.tree_loaded = true;
|
|
||||||
|
|
||||||
file.zir = try AstGen.generate(comp.gpa, file.tree);
|
file.zir = try AstGen.generate(comp.gpa, file.tree.?);
|
||||||
assert(!file.zir.hasCompileErrors()); // builtin.zig must not have astgen errors
|
assert(!file.zir.?.hasCompileErrors()); // builtin.zig must not have astgen errors
|
||||||
file.zir_loaded = true;
|
|
||||||
file.status = .success_zir;
|
file.status = .success_zir;
|
||||||
// Note that whilst we set `zir_loaded` here, we populated `path_digest`
|
// Note that whilst we set `zir` here, we populated `path_digest`
|
||||||
// all the way back in `Package.Module.create`.
|
// all the way back in `Package.Module.create`.
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -312,7 +308,7 @@ fn writeFile(file: *File, mod: *Module) !void {
|
|||||||
var buf: [std.fs.max_path_bytes]u8 = undefined;
|
var buf: [std.fs.max_path_bytes]u8 = undefined;
|
||||||
var af = try mod.root.atomicFile(mod.root_src_path, .{ .make_path = true }, &buf);
|
var af = try mod.root.atomicFile(mod.root_src_path, .{ .make_path = true }, &buf);
|
||||||
defer af.deinit();
|
defer af.deinit();
|
||||||
try af.file.writeAll(file.source);
|
try af.file.writeAll(file.source.?);
|
||||||
af.finish() catch |err| switch (err) {
|
af.finish() catch |err| switch (err) {
|
||||||
error.AccessDenied => switch (builtin.os.tag) {
|
error.AccessDenied => switch (builtin.os.tag) {
|
||||||
.windows => {
|
.windows => {
|
||||||
@ -326,7 +322,7 @@ fn writeFile(file: *File, mod: *Module) !void {
|
|||||||
};
|
};
|
||||||
|
|
||||||
file.stat = .{
|
file.stat = .{
|
||||||
.size = file.source.len,
|
.size = file.source.?.len,
|
||||||
.inode = 0, // dummy value
|
.inode = 0, // dummy value
|
||||||
.mtime = 0, // dummy value
|
.mtime = 0, // dummy value
|
||||||
};
|
};
|
||||||
|
|||||||
@ -3211,7 +3211,7 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
|||||||
} else {
|
} else {
|
||||||
// Must be ZIR or Zoir errors. Note that this may include AST errors.
|
// Must be ZIR or Zoir errors. Note that this may include AST errors.
|
||||||
_ = try file.getTree(gpa); // Tree must be loaded.
|
_ = try file.getTree(gpa); // Tree must be loaded.
|
||||||
if (file.zir_loaded) {
|
if (file.zir != null) {
|
||||||
try addZirErrorMessages(&bundle, file);
|
try addZirErrorMessages(&bundle, file);
|
||||||
} else if (file.zoir != null) {
|
} else if (file.zoir != null) {
|
||||||
try addZoirErrorMessages(&bundle, file);
|
try addZoirErrorMessages(&bundle, file);
|
||||||
@ -3623,22 +3623,17 @@ pub fn addModuleErrorMsg(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn addZirErrorMessages(eb: *ErrorBundle.Wip, file: *Zcu.File) !void {
|
pub fn addZirErrorMessages(eb: *ErrorBundle.Wip, file: *Zcu.File) !void {
|
||||||
assert(file.zir_loaded);
|
|
||||||
assert(file.tree_loaded);
|
|
||||||
assert(file.source_loaded);
|
|
||||||
const gpa = eb.gpa;
|
const gpa = eb.gpa;
|
||||||
const src_path = try file.fullPath(gpa);
|
const src_path = try file.fullPath(gpa);
|
||||||
defer gpa.free(src_path);
|
defer gpa.free(src_path);
|
||||||
return eb.addZirErrorMessages(file.zir, file.tree, file.source, src_path);
|
return eb.addZirErrorMessages(file.zir.?, file.tree.?, file.source.?, src_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn addZoirErrorMessages(eb: *ErrorBundle.Wip, file: *Zcu.File) !void {
|
pub fn addZoirErrorMessages(eb: *ErrorBundle.Wip, file: *Zcu.File) !void {
|
||||||
assert(file.source_loaded);
|
|
||||||
assert(file.tree_loaded);
|
|
||||||
const gpa = eb.gpa;
|
const gpa = eb.gpa;
|
||||||
const src_path = try file.fullPath(gpa);
|
const src_path = try file.fullPath(gpa);
|
||||||
defer gpa.free(src_path);
|
defer gpa.free(src_path);
|
||||||
return eb.addZoirErrorMessages(file.zoir.?, file.tree, file.source, src_path);
|
return eb.addZoirErrorMessages(file.zoir.?, file.tree.?, file.source.?, src_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn performAllTheWork(
|
pub fn performAllTheWork(
|
||||||
@ -4312,18 +4307,17 @@ fn workerAstGenFile(
|
|||||||
// Pre-emptively look for `@import` paths and queue them up.
|
// Pre-emptively look for `@import` paths and queue them up.
|
||||||
// If we experience an error preemptively fetching the
|
// If we experience an error preemptively fetching the
|
||||||
// file, just ignore it and let it happen again later during Sema.
|
// file, just ignore it and let it happen again later during Sema.
|
||||||
assert(file.zir_loaded);
|
const imports_index = file.zir.?.extra[@intFromEnum(Zir.ExtraIndex.imports)];
|
||||||
const imports_index = file.zir.extra[@intFromEnum(Zir.ExtraIndex.imports)];
|
|
||||||
if (imports_index != 0) {
|
if (imports_index != 0) {
|
||||||
const extra = file.zir.extraData(Zir.Inst.Imports, imports_index);
|
const extra = file.zir.?.extraData(Zir.Inst.Imports, imports_index);
|
||||||
var import_i: u32 = 0;
|
var import_i: u32 = 0;
|
||||||
var extra_index = extra.end;
|
var extra_index = extra.end;
|
||||||
|
|
||||||
while (import_i < extra.data.imports_len) : (import_i += 1) {
|
while (import_i < extra.data.imports_len) : (import_i += 1) {
|
||||||
const item = file.zir.extraData(Zir.Inst.Imports.Item, extra_index);
|
const item = file.zir.?.extraData(Zir.Inst.Imports.Item, extra_index);
|
||||||
extra_index = item.end;
|
extra_index = item.end;
|
||||||
|
|
||||||
const import_path = file.zir.nullTerminatedString(item.data.name);
|
const import_path = file.zir.?.nullTerminatedString(item.data.name);
|
||||||
// `@import("builtin")` is handled specially.
|
// `@import("builtin")` is handled specially.
|
||||||
if (mem.eql(u8, import_path, "builtin")) continue;
|
if (mem.eql(u8, import_path, "builtin")) continue;
|
||||||
|
|
||||||
|
|||||||
@ -482,13 +482,11 @@ pub fn create(arena: Allocator, options: CreateOptions) !*Package.Module {
|
|||||||
};
|
};
|
||||||
new_file.* = .{
|
new_file.* = .{
|
||||||
.sub_file_path = "builtin.zig",
|
.sub_file_path = "builtin.zig",
|
||||||
.source = generated_builtin_source,
|
|
||||||
.source_loaded = true,
|
|
||||||
.tree_loaded = false,
|
|
||||||
.zir_loaded = false,
|
|
||||||
.stat = undefined,
|
.stat = undefined,
|
||||||
.tree = undefined,
|
.source = generated_builtin_source,
|
||||||
.zir = undefined,
|
.tree = null,
|
||||||
|
.zir = null,
|
||||||
|
.zoir = null,
|
||||||
.status = .never_loaded,
|
.status = .never_loaded,
|
||||||
.prev_status = .never_loaded,
|
.prev_status = .never_loaded,
|
||||||
.mod = new,
|
.mod = new,
|
||||||
|
|||||||
13
src/Sema.zig
13
src/Sema.zig
@ -7649,9 +7649,8 @@ fn analyzeCall(
|
|||||||
const nav = ip.getNav(info.owner_nav);
|
const nav = ip.getNav(info.owner_nav);
|
||||||
const resolved_func_inst = info.zir_body_inst.resolveFull(ip) orelse return error.AnalysisFail;
|
const resolved_func_inst = info.zir_body_inst.resolveFull(ip) orelse return error.AnalysisFail;
|
||||||
const file = zcu.fileByIndex(resolved_func_inst.file);
|
const file = zcu.fileByIndex(resolved_func_inst.file);
|
||||||
assert(file.zir_loaded);
|
const zir_info = file.zir.?.getFnInfo(resolved_func_inst.inst);
|
||||||
const zir_info = file.zir.getFnInfo(resolved_func_inst.inst);
|
break :b .{ nav, file.zir.?, info.zir_body_inst, resolved_func_inst.inst, zir_info };
|
||||||
break :b .{ nav, file.zir, info.zir_body_inst, resolved_func_inst.inst, zir_info };
|
|
||||||
} else .{ undefined, undefined, undefined, undefined, undefined };
|
} else .{ undefined, undefined, undefined, undefined, undefined };
|
||||||
|
|
||||||
// This is the `inst_map` used when evaluating generic parameters and return types.
|
// This is the `inst_map` used when evaluating generic parameters and return types.
|
||||||
@ -35328,7 +35327,7 @@ fn backingIntType(
|
|||||||
break :blk accumulator;
|
break :blk accumulator;
|
||||||
};
|
};
|
||||||
|
|
||||||
const zir = zcu.namespacePtr(struct_type.namespace).fileScope(zcu).zir;
|
const zir = zcu.namespacePtr(struct_type.namespace).fileScope(zcu).zir.?;
|
||||||
const zir_index = struct_type.zir_index.resolve(ip) orelse return error.AnalysisFail;
|
const zir_index = struct_type.zir_index.resolve(ip) orelse return error.AnalysisFail;
|
||||||
const extended = zir.instructions.items(.data)[@intFromEnum(zir_index)].extended;
|
const extended = zir.instructions.items(.data)[@intFromEnum(zir_index)].extended;
|
||||||
assert(extended.opcode == .struct_decl);
|
assert(extended.opcode == .struct_decl);
|
||||||
@ -35948,7 +35947,7 @@ fn structFields(
|
|||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
const namespace_index = struct_type.namespace;
|
const namespace_index = struct_type.namespace;
|
||||||
const zir = zcu.namespacePtr(namespace_index).fileScope(zcu).zir;
|
const zir = zcu.namespacePtr(namespace_index).fileScope(zcu).zir.?;
|
||||||
const zir_index = struct_type.zir_index.resolve(ip) orelse return error.AnalysisFail;
|
const zir_index = struct_type.zir_index.resolve(ip) orelse return error.AnalysisFail;
|
||||||
|
|
||||||
const fields_len, _, var extra_index = structZirInfo(zir, zir_index);
|
const fields_len, _, var extra_index = structZirInfo(zir, zir_index);
|
||||||
@ -36149,7 +36148,7 @@ fn structFieldInits(
|
|||||||
assert(!struct_type.haveFieldInits(ip));
|
assert(!struct_type.haveFieldInits(ip));
|
||||||
|
|
||||||
const namespace_index = struct_type.namespace;
|
const namespace_index = struct_type.namespace;
|
||||||
const zir = zcu.namespacePtr(namespace_index).fileScope(zcu).zir;
|
const zir = zcu.namespacePtr(namespace_index).fileScope(zcu).zir.?;
|
||||||
const zir_index = struct_type.zir_index.resolve(ip) orelse return error.AnalysisFail;
|
const zir_index = struct_type.zir_index.resolve(ip) orelse return error.AnalysisFail;
|
||||||
const fields_len, _, var extra_index = structZirInfo(zir, zir_index);
|
const fields_len, _, var extra_index = structZirInfo(zir, zir_index);
|
||||||
|
|
||||||
@ -36268,7 +36267,7 @@ fn unionFields(
|
|||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
const zir = zcu.namespacePtr(union_type.namespace).fileScope(zcu).zir;
|
const zir = zcu.namespacePtr(union_type.namespace).fileScope(zcu).zir.?;
|
||||||
const zir_index = union_type.zir_index.resolve(ip) orelse return error.AnalysisFail;
|
const zir_index = union_type.zir_index.resolve(ip) orelse return error.AnalysisFail;
|
||||||
const extended = zir.instructions.items(.data)[@intFromEnum(zir_index)].extended;
|
const extended = zir.instructions.items(.data)[@intFromEnum(zir_index)].extended;
|
||||||
assert(extended.opcode == .union_decl);
|
assert(extended.opcode == .union_decl);
|
||||||
|
|||||||
@ -3587,8 +3587,7 @@ pub fn typeDeclSrcLine(ty: Type, zcu: *Zcu) ?u32 {
|
|||||||
};
|
};
|
||||||
const info = tracked.resolveFull(&zcu.intern_pool) orelse return null;
|
const info = tracked.resolveFull(&zcu.intern_pool) orelse return null;
|
||||||
const file = zcu.fileByIndex(info.file);
|
const file = zcu.fileByIndex(info.file);
|
||||||
assert(file.zir_loaded);
|
const zir = file.zir.?;
|
||||||
const zir = file.zir;
|
|
||||||
const inst = zir.instructions.get(@intFromEnum(info.inst));
|
const inst = zir.instructions.get(@intFromEnum(info.inst));
|
||||||
return switch (inst.tag) {
|
return switch (inst.tag) {
|
||||||
.struct_init, .struct_init_ref => zir.extraData(Zir.Inst.StructInit, inst.data.pl_node.payload_index).data.abs_line,
|
.struct_init, .struct_init_ref => zir.extraData(Zir.Inst.StructInit, inst.data.pl_node.payload_index).data.abs_line,
|
||||||
@ -3905,7 +3904,7 @@ fn resolveStructInner(
|
|||||||
var comptime_err_ret_trace = std.ArrayList(Zcu.LazySrcLoc).init(gpa);
|
var comptime_err_ret_trace = std.ArrayList(Zcu.LazySrcLoc).init(gpa);
|
||||||
defer comptime_err_ret_trace.deinit();
|
defer comptime_err_ret_trace.deinit();
|
||||||
|
|
||||||
const zir = zcu.namespacePtr(struct_obj.namespace).fileScope(zcu).zir;
|
const zir = zcu.namespacePtr(struct_obj.namespace).fileScope(zcu).zir.?;
|
||||||
var sema: Sema = .{
|
var sema: Sema = .{
|
||||||
.pt = pt,
|
.pt = pt,
|
||||||
.gpa = gpa,
|
.gpa = gpa,
|
||||||
@ -3959,7 +3958,7 @@ fn resolveUnionInner(
|
|||||||
var comptime_err_ret_trace = std.ArrayList(Zcu.LazySrcLoc).init(gpa);
|
var comptime_err_ret_trace = std.ArrayList(Zcu.LazySrcLoc).init(gpa);
|
||||||
defer comptime_err_ret_trace.deinit();
|
defer comptime_err_ret_trace.deinit();
|
||||||
|
|
||||||
const zir = zcu.namespacePtr(union_obj.namespace).fileScope(zcu).zir;
|
const zir = zcu.namespacePtr(union_obj.namespace).fileScope(zcu).zir.?;
|
||||||
var sema: Sema = .{
|
var sema: Sema = .{
|
||||||
.pt = pt,
|
.pt = pt,
|
||||||
.gpa = gpa,
|
.gpa = gpa,
|
||||||
|
|||||||
85
src/Zcu.zig
85
src/Zcu.zig
@ -660,22 +660,17 @@ pub const Namespace = struct {
|
|||||||
pub const File = struct {
|
pub const File = struct {
|
||||||
status: Status,
|
status: Status,
|
||||||
prev_status: Status,
|
prev_status: Status,
|
||||||
source_loaded: bool,
|
|
||||||
tree_loaded: bool,
|
|
||||||
zir_loaded: bool,
|
|
||||||
/// Relative to the owning package's root source directory.
|
/// Relative to the owning package's root source directory.
|
||||||
/// Memory is stored in gpa, owned by File.
|
/// Memory is stored in gpa, owned by File.
|
||||||
sub_file_path: []const u8,
|
sub_file_path: []const u8,
|
||||||
/// Whether this is populated depends on `source_loaded`.
|
|
||||||
source: [:0]const u8,
|
|
||||||
/// Whether this is populated depends on `status`.
|
/// Whether this is populated depends on `status`.
|
||||||
stat: Cache.File.Stat,
|
stat: Cache.File.Stat,
|
||||||
/// Whether this is populated or not depends on `tree_loaded`.
|
|
||||||
tree: Ast,
|
source: ?[:0]const u8,
|
||||||
/// Whether this is populated or not depends on `zir_loaded`.
|
tree: ?Ast,
|
||||||
zir: Zir,
|
zir: ?Zir,
|
||||||
/// Cached Zoir, generated lazily.
|
zoir: ?Zoir,
|
||||||
zoir: ?Zoir = null,
|
|
||||||
/// Module that this file is a part of, managed externally.
|
/// Module that this file is a part of, managed externally.
|
||||||
mod: *Package.Module,
|
mod: *Package.Module,
|
||||||
/// Whether this file is a part of multiple packages. This is an error condition which will be reported after AstGen.
|
/// Whether this file is a part of multiple packages. This is an error condition which will be reported after AstGen.
|
||||||
@ -727,23 +722,23 @@ pub const File = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn unloadTree(file: *File, gpa: Allocator) void {
|
pub fn unloadTree(file: *File, gpa: Allocator) void {
|
||||||
if (file.tree_loaded) {
|
if (file.tree) |*tree| {
|
||||||
file.tree_loaded = false;
|
tree.deinit(gpa);
|
||||||
file.tree.deinit(gpa);
|
file.tree = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unloadSource(file: *File, gpa: Allocator) void {
|
pub fn unloadSource(file: *File, gpa: Allocator) void {
|
||||||
if (file.source_loaded) {
|
if (file.source) |source| {
|
||||||
file.source_loaded = false;
|
gpa.free(source);
|
||||||
gpa.free(file.source);
|
file.source = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unloadZir(file: *File, gpa: Allocator) void {
|
pub fn unloadZir(file: *File, gpa: Allocator) void {
|
||||||
if (file.zir_loaded) {
|
if (file.zir) |*zir| {
|
||||||
file.zir_loaded = false;
|
zir.deinit(gpa);
|
||||||
file.zir.deinit(gpa);
|
file.zir = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -753,8 +748,8 @@ pub const File = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub fn getSource(file: *File, gpa: Allocator) !Source {
|
pub fn getSource(file: *File, gpa: Allocator) !Source {
|
||||||
if (file.source_loaded) return Source{
|
if (file.source) |source| return .{
|
||||||
.bytes = file.source,
|
.bytes = source,
|
||||||
.stat = file.stat,
|
.stat = file.stat,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -769,7 +764,8 @@ pub const File = struct {
|
|||||||
return error.FileTooBig;
|
return error.FileTooBig;
|
||||||
|
|
||||||
const source = try gpa.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0);
|
const source = try gpa.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0);
|
||||||
defer if (!file.source_loaded) gpa.free(source);
|
defer gpa.free(source);
|
||||||
|
|
||||||
const amt = try f.readAll(source);
|
const amt = try f.readAll(source);
|
||||||
if (amt != stat.size)
|
if (amt != stat.size)
|
||||||
return error.UnexpectedEndOfFile;
|
return error.UnexpectedEndOfFile;
|
||||||
@ -778,9 +774,9 @@ pub const File = struct {
|
|||||||
// used for error reporting. We need to keep the stat fields stale so that
|
// used for error reporting. We need to keep the stat fields stale so that
|
||||||
// astGenFile can know to regenerate ZIR.
|
// astGenFile can know to regenerate ZIR.
|
||||||
|
|
||||||
|
errdefer comptime unreachable; // don't error after populating `source`
|
||||||
file.source = source;
|
file.source = source;
|
||||||
file.source_loaded = true;
|
return .{
|
||||||
return Source{
|
|
||||||
.bytes = source,
|
.bytes = source,
|
||||||
.stat = .{
|
.stat = .{
|
||||||
.size = stat.size,
|
.size = stat.size,
|
||||||
@ -791,20 +787,20 @@ pub const File = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn getTree(file: *File, gpa: Allocator) !*const Ast {
|
pub fn getTree(file: *File, gpa: Allocator) !*const Ast {
|
||||||
if (file.tree_loaded) return &file.tree;
|
if (file.tree) |*tree| return tree;
|
||||||
|
|
||||||
const source = try file.getSource(gpa);
|
const source = try file.getSource(gpa);
|
||||||
file.tree = try Ast.parse(gpa, source.bytes, file.getMode());
|
file.tree = try .parse(gpa, source.bytes, file.getMode());
|
||||||
file.tree_loaded = true;
|
return &file.tree.?;
|
||||||
return &file.tree;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getZoir(file: *File, zcu: *Zcu) !*const Zoir {
|
pub fn getZoir(file: *File, zcu: *Zcu) !*const Zoir {
|
||||||
if (file.zoir) |*zoir| return zoir;
|
if (file.zoir) |*zoir| return zoir;
|
||||||
|
|
||||||
assert(file.tree_loaded);
|
const tree = file.tree.?;
|
||||||
assert(file.tree.mode == .zon);
|
assert(tree.mode == .zon);
|
||||||
file.zoir = try ZonGen.generate(zcu.gpa, file.tree, .{});
|
|
||||||
|
file.zoir = try ZonGen.generate(zcu.gpa, tree, .{});
|
||||||
if (file.zoir.?.hasCompileErrors()) {
|
if (file.zoir.?.hasCompileErrors()) {
|
||||||
try zcu.failed_files.putNoClobber(zcu.gpa, file, null);
|
try zcu.failed_files.putNoClobber(zcu.gpa, file, null);
|
||||||
return error.AnalysisFail;
|
return error.AnalysisFail;
|
||||||
@ -900,18 +896,18 @@ pub const File = struct {
|
|||||||
|
|
||||||
// We can only mark children as failed if the ZIR is loaded, which may not
|
// We can only mark children as failed if the ZIR is loaded, which may not
|
||||||
// be the case if there were other astgen failures in this file
|
// be the case if there were other astgen failures in this file
|
||||||
if (!file.zir_loaded) return;
|
if (file.zir == null) return;
|
||||||
|
|
||||||
const imports_index = file.zir.extra[@intFromEnum(Zir.ExtraIndex.imports)];
|
const imports_index = file.zir.?.extra[@intFromEnum(Zir.ExtraIndex.imports)];
|
||||||
if (imports_index == 0) return;
|
if (imports_index == 0) return;
|
||||||
const extra = file.zir.extraData(Zir.Inst.Imports, imports_index);
|
const extra = file.zir.?.extraData(Zir.Inst.Imports, imports_index);
|
||||||
|
|
||||||
var extra_index = extra.end;
|
var extra_index = extra.end;
|
||||||
for (0..extra.data.imports_len) |_| {
|
for (0..extra.data.imports_len) |_| {
|
||||||
const item = file.zir.extraData(Zir.Inst.Imports.Item, extra_index);
|
const item = file.zir.?.extraData(Zir.Inst.Imports.Item, extra_index);
|
||||||
extra_index = item.end;
|
extra_index = item.end;
|
||||||
|
|
||||||
const import_path = file.zir.nullTerminatedString(item.data.name);
|
const import_path = file.zir.?.nullTerminatedString(item.data.name);
|
||||||
if (mem.eql(u8, import_path, "builtin")) continue;
|
if (mem.eql(u8, import_path, "builtin")) continue;
|
||||||
|
|
||||||
const res = pt.importFile(file, import_path) catch continue;
|
const res = pt.importFile(file, import_path) catch continue;
|
||||||
@ -1012,7 +1008,7 @@ pub const SrcLoc = struct {
|
|||||||
lazy: LazySrcLoc.Offset,
|
lazy: LazySrcLoc.Offset,
|
||||||
|
|
||||||
pub fn baseSrcToken(src_loc: SrcLoc) Ast.TokenIndex {
|
pub fn baseSrcToken(src_loc: SrcLoc) Ast.TokenIndex {
|
||||||
const tree = src_loc.file_scope.tree;
|
const tree = src_loc.file_scope.tree.?;
|
||||||
return tree.firstToken(src_loc.base_node);
|
return tree.firstToken(src_loc.base_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1057,7 +1053,6 @@ pub const SrcLoc = struct {
|
|||||||
const node_off = traced_off.x;
|
const node_off = traced_off.x;
|
||||||
const tree = try src_loc.file_scope.getTree(gpa);
|
const tree = try src_loc.file_scope.getTree(gpa);
|
||||||
const node = src_loc.relativeToNodeIndex(node_off);
|
const node = src_loc.relativeToNodeIndex(node_off);
|
||||||
assert(src_loc.file_scope.tree_loaded);
|
|
||||||
return tree.nodeToSpan(node);
|
return tree.nodeToSpan(node);
|
||||||
},
|
},
|
||||||
.node_offset_main_token => |node_off| {
|
.node_offset_main_token => |node_off| {
|
||||||
@ -1069,7 +1064,6 @@ pub const SrcLoc = struct {
|
|||||||
.node_offset_bin_op => |node_off| {
|
.node_offset_bin_op => |node_off| {
|
||||||
const tree = try src_loc.file_scope.getTree(gpa);
|
const tree = try src_loc.file_scope.getTree(gpa);
|
||||||
const node = src_loc.relativeToNodeIndex(node_off);
|
const node = src_loc.relativeToNodeIndex(node_off);
|
||||||
assert(src_loc.file_scope.tree_loaded);
|
|
||||||
return tree.nodeToSpan(node);
|
return tree.nodeToSpan(node);
|
||||||
},
|
},
|
||||||
.node_offset_initializer => |node_off| {
|
.node_offset_initializer => |node_off| {
|
||||||
@ -2408,9 +2402,8 @@ pub const LazySrcLoc = struct {
|
|||||||
if (zir_inst == .main_struct_inst) return .{ file, 0 };
|
if (zir_inst == .main_struct_inst) return .{ file, 0 };
|
||||||
|
|
||||||
// Otherwise, make sure ZIR is loaded.
|
// Otherwise, make sure ZIR is loaded.
|
||||||
assert(file.zir_loaded);
|
const zir = file.zir.?;
|
||||||
|
|
||||||
const zir = file.zir;
|
|
||||||
const inst = zir.instructions.get(@intFromEnum(zir_inst));
|
const inst = zir.instructions.get(@intFromEnum(zir_inst));
|
||||||
const base_node: Ast.Node.Index = switch (inst.tag) {
|
const base_node: Ast.Node.Index = switch (inst.tag) {
|
||||||
.declaration => inst.data.declaration.src_node,
|
.declaration => inst.data.declaration.src_node,
|
||||||
@ -3671,7 +3664,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv
|
|||||||
const inst_info = nav.analysis.?.zir_index.resolveFull(ip) orelse continue;
|
const inst_info = nav.analysis.?.zir_index.resolveFull(ip) orelse continue;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
// If the file failed AstGen, the TrackedInst refers to the old ZIR.
|
// If the file failed AstGen, the TrackedInst refers to the old ZIR.
|
||||||
const zir = if (file.status == .success_zir) file.zir else file.prev_zir.?.*;
|
const zir = if (file.status == .success_zir) file.zir.? else file.prev_zir.?.*;
|
||||||
const decl = zir.getDeclaration(inst_info.inst);
|
const decl = zir.getDeclaration(inst_info.inst);
|
||||||
|
|
||||||
if (!comp.config.is_test or file.mod != zcu.main_mod) continue;
|
if (!comp.config.is_test or file.mod != zcu.main_mod) continue;
|
||||||
@ -3703,7 +3696,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv
|
|||||||
const inst_info = ip.getNav(nav).analysis.?.zir_index.resolveFull(ip) orelse continue;
|
const inst_info = ip.getNav(nav).analysis.?.zir_index.resolveFull(ip) orelse continue;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
// If the file failed AstGen, the TrackedInst refers to the old ZIR.
|
// If the file failed AstGen, the TrackedInst refers to the old ZIR.
|
||||||
const zir = if (file.status == .success_zir) file.zir else file.prev_zir.?.*;
|
const zir = if (file.status == .success_zir) file.zir.? else file.prev_zir.?.*;
|
||||||
const decl = zir.getDeclaration(inst_info.inst);
|
const decl = zir.getDeclaration(inst_info.inst);
|
||||||
if (decl.linkage == .@"export") {
|
if (decl.linkage == .@"export") {
|
||||||
const unit: AnalUnit = .wrap(.{ .nav_val = nav });
|
const unit: AnalUnit = .wrap(.{ .nav_val = nav });
|
||||||
@ -3721,7 +3714,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv
|
|||||||
const inst_info = ip.getNav(nav).analysis.?.zir_index.resolveFull(ip) orelse continue;
|
const inst_info = ip.getNav(nav).analysis.?.zir_index.resolveFull(ip) orelse continue;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
// If the file failed AstGen, the TrackedInst refers to the old ZIR.
|
// If the file failed AstGen, the TrackedInst refers to the old ZIR.
|
||||||
const zir = if (file.status == .success_zir) file.zir else file.prev_zir.?.*;
|
const zir = if (file.status == .success_zir) file.zir.? else file.prev_zir.?.*;
|
||||||
const decl = zir.getDeclaration(inst_info.inst);
|
const decl = zir.getDeclaration(inst_info.inst);
|
||||||
if (decl.linkage == .@"export") {
|
if (decl.linkage == .@"export") {
|
||||||
const unit: AnalUnit = .wrap(.{ .nav_val = nav });
|
const unit: AnalUnit = .wrap(.{ .nav_val = nav });
|
||||||
@ -3858,7 +3851,7 @@ pub fn navSrcLine(zcu: *Zcu, nav_index: InternPool.Nav.Index) u32 {
|
|||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
const inst_info = ip.getNav(nav_index).srcInst(ip).resolveFull(ip).?;
|
const inst_info = ip.getNav(nav_index).srcInst(ip).resolveFull(ip).?;
|
||||||
const zir = zcu.fileByIndex(inst_info.file).zir;
|
const zir = zcu.fileByIndex(inst_info.file).zir;
|
||||||
return zir.getDeclaration(inst_info.inst).src_line;
|
return zir.?.getDeclaration(inst_info.inst).src_line;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn navValue(zcu: *const Zcu, nav_index: InternPool.Nav.Index) Value {
|
pub fn navValue(zcu: *const Zcu, nav_index: InternPool.Nav.Index) Value {
|
||||||
|
|||||||
@ -209,7 +209,6 @@ pub fn astGenFile(
|
|||||||
},
|
},
|
||||||
else => |e| return e,
|
else => |e| return e,
|
||||||
};
|
};
|
||||||
file.zir_loaded = true;
|
|
||||||
file.stat = .{
|
file.stat = .{
|
||||||
.size = header.stat_size,
|
.size = header.stat_size,
|
||||||
.inode = header.stat_inode,
|
.inode = header.stat_inode,
|
||||||
@ -219,12 +218,12 @@ pub fn astGenFile(
|
|||||||
file.status = .success_zir;
|
file.status = .success_zir;
|
||||||
log.debug("AstGen cached success: {s}", .{file.sub_file_path});
|
log.debug("AstGen cached success: {s}", .{file.sub_file_path});
|
||||||
|
|
||||||
if (file.zir.hasCompileErrors()) {
|
if (file.zir.?.hasCompileErrors()) {
|
||||||
comp.mutex.lock();
|
comp.mutex.lock();
|
||||||
defer comp.mutex.unlock();
|
defer comp.mutex.unlock();
|
||||||
try zcu.failed_files.putNoClobber(gpa, file, null);
|
try zcu.failed_files.putNoClobber(gpa, file, null);
|
||||||
}
|
}
|
||||||
if (file.zir.loweringFailed()) {
|
if (file.zir.?.loweringFailed()) {
|
||||||
file.status = .astgen_failure;
|
file.status = .astgen_failure;
|
||||||
return error.AnalysisFail;
|
return error.AnalysisFail;
|
||||||
}
|
}
|
||||||
@ -261,13 +260,12 @@ pub fn astGenFile(
|
|||||||
// single-threaded context, so we need to keep both versions around
|
// single-threaded context, so we need to keep both versions around
|
||||||
// until that point in the pipeline. Previous ZIR data is freed after
|
// until that point in the pipeline. Previous ZIR data is freed after
|
||||||
// that.
|
// that.
|
||||||
if (file.zir_loaded and !file.zir.loweringFailed()) {
|
if (file.zir != null and !file.zir.?.loweringFailed()) {
|
||||||
assert(file.prev_zir == null);
|
assert(file.prev_zir == null);
|
||||||
const prev_zir_ptr = try gpa.create(Zir);
|
const prev_zir_ptr = try gpa.create(Zir);
|
||||||
file.prev_zir = prev_zir_ptr;
|
file.prev_zir = prev_zir_ptr;
|
||||||
prev_zir_ptr.* = file.zir;
|
prev_zir_ptr.* = file.zir.?;
|
||||||
file.zir = undefined;
|
file.zir = null;
|
||||||
file.zir_loaded = false;
|
|
||||||
}
|
}
|
||||||
file.unload(gpa);
|
file.unload(gpa);
|
||||||
|
|
||||||
@ -275,7 +273,7 @@ pub fn astGenFile(
|
|||||||
return error.FileTooBig;
|
return error.FileTooBig;
|
||||||
|
|
||||||
const source = try gpa.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0);
|
const source = try gpa.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0);
|
||||||
defer if (!file.source_loaded) gpa.free(source);
|
defer if (file.source == null) gpa.free(source);
|
||||||
const amt = try source_file.readAll(source);
|
const amt = try source_file.readAll(source);
|
||||||
if (amt != stat.size)
|
if (amt != stat.size)
|
||||||
return error.UnexpectedEndOfFile;
|
return error.UnexpectedEndOfFile;
|
||||||
@ -286,42 +284,39 @@ pub fn astGenFile(
|
|||||||
.mtime = stat.mtime,
|
.mtime = stat.mtime,
|
||||||
};
|
};
|
||||||
file.source = source;
|
file.source = source;
|
||||||
file.source_loaded = true;
|
|
||||||
|
|
||||||
file.tree = try Ast.parse(gpa, source, .zig);
|
file.tree = try Ast.parse(gpa, source, .zig);
|
||||||
file.tree_loaded = true;
|
|
||||||
|
|
||||||
// Any potential AST errors are converted to ZIR errors here.
|
// Any potential AST errors are converted to ZIR errors here.
|
||||||
file.zir = try AstGen.generate(gpa, file.tree);
|
file.zir = try AstGen.generate(gpa, file.tree.?);
|
||||||
file.zir_loaded = true;
|
|
||||||
file.prev_status = file.status;
|
file.prev_status = file.status;
|
||||||
file.status = .success_zir;
|
file.status = .success_zir;
|
||||||
log.debug("AstGen fresh success: {s}", .{file.sub_file_path});
|
log.debug("AstGen fresh success: {s}", .{file.sub_file_path});
|
||||||
|
|
||||||
const safety_buffer = if (Zcu.data_has_safety_tag)
|
const safety_buffer = if (Zcu.data_has_safety_tag)
|
||||||
try gpa.alloc([8]u8, file.zir.instructions.len)
|
try gpa.alloc([8]u8, file.zir.?.instructions.len)
|
||||||
else
|
else
|
||||||
undefined;
|
undefined;
|
||||||
defer if (Zcu.data_has_safety_tag) gpa.free(safety_buffer);
|
defer if (Zcu.data_has_safety_tag) gpa.free(safety_buffer);
|
||||||
const data_ptr = if (Zcu.data_has_safety_tag)
|
const data_ptr = if (Zcu.data_has_safety_tag)
|
||||||
if (file.zir.instructions.len == 0)
|
if (file.zir.?.instructions.len == 0)
|
||||||
@as([*]const u8, undefined)
|
@as([*]const u8, undefined)
|
||||||
else
|
else
|
||||||
@as([*]const u8, @ptrCast(safety_buffer.ptr))
|
@as([*]const u8, @ptrCast(safety_buffer.ptr))
|
||||||
else
|
else
|
||||||
@as([*]const u8, @ptrCast(file.zir.instructions.items(.data).ptr));
|
@as([*]const u8, @ptrCast(file.zir.?.instructions.items(.data).ptr));
|
||||||
if (Zcu.data_has_safety_tag) {
|
if (Zcu.data_has_safety_tag) {
|
||||||
// The `Data` union has a safety tag but in the file format we store it without.
|
// The `Data` union has a safety tag but in the file format we store it without.
|
||||||
for (file.zir.instructions.items(.data), 0..) |*data, i| {
|
for (file.zir.?.instructions.items(.data), 0..) |*data, i| {
|
||||||
const as_struct: *const Zcu.HackDataLayout = @ptrCast(data);
|
const as_struct: *const Zcu.HackDataLayout = @ptrCast(data);
|
||||||
safety_buffer[i] = as_struct.data;
|
safety_buffer[i] = as_struct.data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const header: Zir.Header = .{
|
const header: Zir.Header = .{
|
||||||
.instructions_len = @as(u32, @intCast(file.zir.instructions.len)),
|
.instructions_len = @as(u32, @intCast(file.zir.?.instructions.len)),
|
||||||
.string_bytes_len = @as(u32, @intCast(file.zir.string_bytes.len)),
|
.string_bytes_len = @as(u32, @intCast(file.zir.?.string_bytes.len)),
|
||||||
.extra_len = @as(u32, @intCast(file.zir.extra.len)),
|
.extra_len = @as(u32, @intCast(file.zir.?.extra.len)),
|
||||||
|
|
||||||
.stat_size = stat.size,
|
.stat_size = stat.size,
|
||||||
.stat_inode = stat.inode,
|
.stat_inode = stat.inode,
|
||||||
@ -333,20 +328,20 @@ pub fn astGenFile(
|
|||||||
.len = @sizeOf(Zir.Header),
|
.len = @sizeOf(Zir.Header),
|
||||||
},
|
},
|
||||||
.{
|
.{
|
||||||
.base = @as([*]const u8, @ptrCast(file.zir.instructions.items(.tag).ptr)),
|
.base = @as([*]const u8, @ptrCast(file.zir.?.instructions.items(.tag).ptr)),
|
||||||
.len = file.zir.instructions.len,
|
.len = file.zir.?.instructions.len,
|
||||||
},
|
},
|
||||||
.{
|
.{
|
||||||
.base = data_ptr,
|
.base = data_ptr,
|
||||||
.len = file.zir.instructions.len * 8,
|
.len = file.zir.?.instructions.len * 8,
|
||||||
},
|
},
|
||||||
.{
|
.{
|
||||||
.base = file.zir.string_bytes.ptr,
|
.base = file.zir.?.string_bytes.ptr,
|
||||||
.len = file.zir.string_bytes.len,
|
.len = file.zir.?.string_bytes.len,
|
||||||
},
|
},
|
||||||
.{
|
.{
|
||||||
.base = @as([*]const u8, @ptrCast(file.zir.extra.ptr)),
|
.base = @as([*]const u8, @ptrCast(file.zir.?.extra.ptr)),
|
||||||
.len = file.zir.extra.len * 4,
|
.len = file.zir.?.extra.len * 4,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
cache_file.writevAll(&iovecs) catch |err| {
|
cache_file.writevAll(&iovecs) catch |err| {
|
||||||
@ -355,12 +350,12 @@ pub fn astGenFile(
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
if (file.zir.hasCompileErrors()) {
|
if (file.zir.?.hasCompileErrors()) {
|
||||||
comp.mutex.lock();
|
comp.mutex.lock();
|
||||||
defer comp.mutex.unlock();
|
defer comp.mutex.unlock();
|
||||||
try zcu.failed_files.putNoClobber(gpa, file, null);
|
try zcu.failed_files.putNoClobber(gpa, file, null);
|
||||||
}
|
}
|
||||||
if (file.zir.loweringFailed()) {
|
if (file.zir.?.loweringFailed()) {
|
||||||
file.status = .astgen_failure;
|
file.status = .astgen_failure;
|
||||||
return error.AnalysisFail;
|
return error.AnalysisFail;
|
||||||
}
|
}
|
||||||
@ -392,7 +387,7 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
|
|||||||
try zcu.markDependeeOutdated(.not_marked_po, .{ .file = file_index });
|
try zcu.markDependeeOutdated(.not_marked_po, .{ .file = file_index });
|
||||||
}
|
}
|
||||||
const old_zir = file.prev_zir orelse continue;
|
const old_zir = file.prev_zir orelse continue;
|
||||||
const new_zir = file.zir;
|
const new_zir = file.zir.?;
|
||||||
const gop = try updated_files.getOrPut(gpa, file_index);
|
const gop = try updated_files.getOrPut(gpa, file_index);
|
||||||
assert(!gop.found_existing);
|
assert(!gop.found_existing);
|
||||||
gop.value_ptr.* = .{
|
gop.value_ptr.* = .{
|
||||||
@ -400,7 +395,7 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
|
|||||||
.inst_map = .{},
|
.inst_map = .{},
|
||||||
};
|
};
|
||||||
if (!new_zir.loweringFailed()) {
|
if (!new_zir.loweringFailed()) {
|
||||||
try Zcu.mapOldZirToNew(gpa, old_zir.*, file.zir, &gop.value_ptr.inst_map);
|
try Zcu.mapOldZirToNew(gpa, old_zir.*, new_zir, &gop.value_ptr.inst_map);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -426,7 +421,7 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
|
|||||||
// Either way, invalidate associated `src_hash` deps.
|
// Either way, invalidate associated `src_hash` deps.
|
||||||
log.debug("tracking failed for %{d}{s}", .{
|
log.debug("tracking failed for %{d}{s}", .{
|
||||||
old_inst,
|
old_inst,
|
||||||
if (file.zir.loweringFailed()) " due to AstGen failure" else "",
|
if (file.zir.?.loweringFailed()) " due to AstGen failure" else "",
|
||||||
});
|
});
|
||||||
tracked_inst.inst = .lost;
|
tracked_inst.inst = .lost;
|
||||||
try zcu.markDependeeOutdated(.not_marked_po, .{ .src_hash = tracked_inst_index });
|
try zcu.markDependeeOutdated(.not_marked_po, .{ .src_hash = tracked_inst_index });
|
||||||
@ -435,7 +430,7 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
|
|||||||
tracked_inst.inst = InternPool.TrackedInst.MaybeLost.ZirIndex.wrap(new_inst);
|
tracked_inst.inst = InternPool.TrackedInst.MaybeLost.ZirIndex.wrap(new_inst);
|
||||||
|
|
||||||
const old_zir = file.prev_zir.?.*;
|
const old_zir = file.prev_zir.?.*;
|
||||||
const new_zir = file.zir;
|
const new_zir = file.zir.?;
|
||||||
const old_tag = old_zir.instructions.items(.tag)[@intFromEnum(old_inst)];
|
const old_tag = old_zir.instructions.items(.tag)[@intFromEnum(old_inst)];
|
||||||
const old_data = old_zir.instructions.items(.data)[@intFromEnum(old_inst)];
|
const old_data = old_zir.instructions.items(.data)[@intFromEnum(old_inst)];
|
||||||
|
|
||||||
@ -532,7 +527,7 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
|
|||||||
|
|
||||||
for (updated_files.keys(), updated_files.values()) |file_index, updated_file| {
|
for (updated_files.keys(), updated_files.values()) |file_index, updated_file| {
|
||||||
const file = updated_file.file;
|
const file = updated_file.file;
|
||||||
if (file.zir.loweringFailed()) {
|
if (file.zir.?.loweringFailed()) {
|
||||||
// Keep `prev_zir` around: it's the last usable ZIR.
|
// Keep `prev_zir` around: it's the last usable ZIR.
|
||||||
// Don't update the namespace, as we have no new data to update *to*.
|
// Don't update the namespace, as we have no new data to update *to*.
|
||||||
} else {
|
} else {
|
||||||
@ -805,7 +800,7 @@ fn analyzeComptimeUnit(pt: Zcu.PerThread, cu_id: InternPool.ComptimeUnit.Id) Zcu
|
|||||||
// unnecessary, and we can move the below `removeDependenciesForDepender` call up with its friends
|
// unnecessary, and we can move the below `removeDependenciesForDepender` call up with its friends
|
||||||
// in `ensureComptimeUnitUpToDate`.
|
// in `ensureComptimeUnitUpToDate`.
|
||||||
if (file.status != .success_zir) return error.AnalysisFail;
|
if (file.status != .success_zir) return error.AnalysisFail;
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
|
|
||||||
// We are about to re-analyze this unit; drop its depenndencies.
|
// We are about to re-analyze this unit; drop its depenndencies.
|
||||||
zcu.intern_pool.removeDependenciesForDepender(gpa, anal_unit);
|
zcu.intern_pool.removeDependenciesForDepender(gpa, anal_unit);
|
||||||
@ -1002,7 +997,7 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
|
|||||||
// unnecessary, and we can move the below `removeDependenciesForDepender` call up with its friends
|
// unnecessary, and we can move the below `removeDependenciesForDepender` call up with its friends
|
||||||
// in `ensureComptimeUnitUpToDate`.
|
// in `ensureComptimeUnitUpToDate`.
|
||||||
if (file.status != .success_zir) return error.AnalysisFail;
|
if (file.status != .success_zir) return error.AnalysisFail;
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
|
|
||||||
// We are about to re-analyze this unit; drop its depenndencies.
|
// We are about to re-analyze this unit; drop its depenndencies.
|
||||||
zcu.intern_pool.removeDependenciesForDepender(gpa, anal_unit);
|
zcu.intern_pool.removeDependenciesForDepender(gpa, anal_unit);
|
||||||
@ -1380,7 +1375,7 @@ fn analyzeNavType(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileEr
|
|||||||
// unnecessary, and we can move the below `removeDependenciesForDepender` call up with its friends
|
// unnecessary, and we can move the below `removeDependenciesForDepender` call up with its friends
|
||||||
// in `ensureComptimeUnitUpToDate`.
|
// in `ensureComptimeUnitUpToDate`.
|
||||||
if (file.status != .success_zir) return error.AnalysisFail;
|
if (file.status != .success_zir) return error.AnalysisFail;
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
|
|
||||||
// We are about to re-analyze this unit; drop its depenndencies.
|
// We are about to re-analyze this unit; drop its depenndencies.
|
||||||
zcu.intern_pool.removeDependenciesForDepender(gpa, anal_unit);
|
zcu.intern_pool.removeDependenciesForDepender(gpa, anal_unit);
|
||||||
@ -1758,7 +1753,7 @@ fn createFileRootStruct(
|
|||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
const file = zcu.fileByIndex(file_index);
|
const file = zcu.fileByIndex(file_index);
|
||||||
const extended = file.zir.instructions.items(.data)[@intFromEnum(Zir.Inst.Index.main_struct_inst)].extended;
|
const extended = file.zir.?.instructions.items(.data)[@intFromEnum(Zir.Inst.Index.main_struct_inst)].extended;
|
||||||
assert(extended.opcode == .struct_decl);
|
assert(extended.opcode == .struct_decl);
|
||||||
const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small);
|
const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small);
|
||||||
assert(!small.has_captures_len);
|
assert(!small.has_captures_len);
|
||||||
@ -1766,16 +1761,16 @@ fn createFileRootStruct(
|
|||||||
assert(small.layout == .auto);
|
assert(small.layout == .auto);
|
||||||
var extra_index: usize = extended.operand + @typeInfo(Zir.Inst.StructDecl).@"struct".fields.len;
|
var extra_index: usize = extended.operand + @typeInfo(Zir.Inst.StructDecl).@"struct".fields.len;
|
||||||
const fields_len = if (small.has_fields_len) blk: {
|
const fields_len = if (small.has_fields_len) blk: {
|
||||||
const fields_len = file.zir.extra[extra_index];
|
const fields_len = file.zir.?.extra[extra_index];
|
||||||
extra_index += 1;
|
extra_index += 1;
|
||||||
break :blk fields_len;
|
break :blk fields_len;
|
||||||
} else 0;
|
} else 0;
|
||||||
const decls_len = if (small.has_decls_len) blk: {
|
const decls_len = if (small.has_decls_len) blk: {
|
||||||
const decls_len = file.zir.extra[extra_index];
|
const decls_len = file.zir.?.extra[extra_index];
|
||||||
extra_index += 1;
|
extra_index += 1;
|
||||||
break :blk decls_len;
|
break :blk decls_len;
|
||||||
} else 0;
|
} else 0;
|
||||||
const decls = file.zir.bodySlice(extra_index, decls_len);
|
const decls = file.zir.?.bodySlice(extra_index, decls_len);
|
||||||
extra_index += decls_len;
|
extra_index += decls_len;
|
||||||
|
|
||||||
const tracked_inst = try ip.trackZir(gpa, pt.tid, .{
|
const tracked_inst = try ip.trackZir(gpa, pt.tid, .{
|
||||||
@ -1844,17 +1839,17 @@ fn updateFileNamespace(pt: Zcu.PerThread, file_index: Zcu.File.Index) Allocator.
|
|||||||
|
|
||||||
const namespace_index = Type.fromInterned(file_root_type).getNamespaceIndex(zcu);
|
const namespace_index = Type.fromInterned(file_root_type).getNamespaceIndex(zcu);
|
||||||
const decls = decls: {
|
const decls = decls: {
|
||||||
const extended = file.zir.instructions.items(.data)[@intFromEnum(Zir.Inst.Index.main_struct_inst)].extended;
|
const extended = file.zir.?.instructions.items(.data)[@intFromEnum(Zir.Inst.Index.main_struct_inst)].extended;
|
||||||
const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small);
|
const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small);
|
||||||
|
|
||||||
var extra_index: usize = extended.operand + @typeInfo(Zir.Inst.StructDecl).@"struct".fields.len;
|
var extra_index: usize = extended.operand + @typeInfo(Zir.Inst.StructDecl).@"struct".fields.len;
|
||||||
extra_index += @intFromBool(small.has_fields_len);
|
extra_index += @intFromBool(small.has_fields_len);
|
||||||
const decls_len = if (small.has_decls_len) blk: {
|
const decls_len = if (small.has_decls_len) blk: {
|
||||||
const decls_len = file.zir.extra[extra_index];
|
const decls_len = file.zir.?.extra[extra_index];
|
||||||
extra_index += 1;
|
extra_index += 1;
|
||||||
break :blk decls_len;
|
break :blk decls_len;
|
||||||
} else 0;
|
} else 0;
|
||||||
break :decls file.zir.bodySlice(extra_index, decls_len);
|
break :decls file.zir.?.bodySlice(extra_index, decls_len);
|
||||||
};
|
};
|
||||||
try pt.scanNamespace(namespace_index, decls);
|
try pt.scanNamespace(namespace_index, decls);
|
||||||
zcu.namespacePtr(namespace_index).generation = zcu.generation;
|
zcu.namespacePtr(namespace_index).generation = zcu.generation;
|
||||||
@ -1873,7 +1868,7 @@ fn semaFile(pt: Zcu.PerThread, file_index: Zcu.File.Index) Zcu.SemaError!void {
|
|||||||
if (file.status != .success_zir) {
|
if (file.status != .success_zir) {
|
||||||
return error.AnalysisFail;
|
return error.AnalysisFail;
|
||||||
}
|
}
|
||||||
assert(file.zir_loaded);
|
assert(file.zir != null);
|
||||||
|
|
||||||
const new_namespace_index = try pt.createNamespace(.{
|
const new_namespace_index = try pt.createNamespace(.{
|
||||||
.parent = .none,
|
.parent = .none,
|
||||||
@ -1983,13 +1978,11 @@ pub fn importPkg(pt: Zcu.PerThread, mod: *Module) !Zcu.ImportFileResult {
|
|||||||
gop.value_ptr.* = new_file_index;
|
gop.value_ptr.* = new_file_index;
|
||||||
new_file.* = .{
|
new_file.* = .{
|
||||||
.sub_file_path = sub_file_path,
|
.sub_file_path = sub_file_path,
|
||||||
.source = undefined,
|
|
||||||
.source_loaded = false,
|
|
||||||
.tree_loaded = false,
|
|
||||||
.zir_loaded = false,
|
|
||||||
.stat = undefined,
|
.stat = undefined,
|
||||||
.tree = undefined,
|
.source = null,
|
||||||
.zir = undefined,
|
.tree = null,
|
||||||
|
.zir = null,
|
||||||
|
.zoir = null,
|
||||||
.status = .never_loaded,
|
.status = .never_loaded,
|
||||||
.prev_status = .never_loaded,
|
.prev_status = .never_loaded,
|
||||||
.mod = mod,
|
.mod = mod,
|
||||||
@ -2096,13 +2089,11 @@ pub fn importFile(
|
|||||||
gop.value_ptr.* = new_file_index;
|
gop.value_ptr.* = new_file_index;
|
||||||
new_file.* = .{
|
new_file.* = .{
|
||||||
.sub_file_path = sub_file_path,
|
.sub_file_path = sub_file_path,
|
||||||
.source = undefined,
|
|
||||||
.source_loaded = false,
|
|
||||||
.tree_loaded = false,
|
|
||||||
.zir_loaded = false,
|
|
||||||
.stat = undefined,
|
.stat = undefined,
|
||||||
.tree = undefined,
|
.source = null,
|
||||||
.zir = undefined,
|
.tree = null,
|
||||||
|
.zir = null,
|
||||||
|
.zoir = null,
|
||||||
.status = .never_loaded,
|
.status = .never_loaded,
|
||||||
.prev_status = .never_loaded,
|
.prev_status = .never_loaded,
|
||||||
.mod = mod,
|
.mod = mod,
|
||||||
@ -2441,7 +2432,7 @@ const ScanDeclIter = struct {
|
|||||||
const namespace = zcu.namespacePtr(namespace_index);
|
const namespace = zcu.namespacePtr(namespace_index);
|
||||||
const gpa = zcu.gpa;
|
const gpa = zcu.gpa;
|
||||||
const file = namespace.fileScope(zcu);
|
const file = namespace.fileScope(zcu);
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
const ip = &zcu.intern_pool;
|
const ip = &zcu.intern_pool;
|
||||||
|
|
||||||
const decl = zir.getDeclaration(decl_inst);
|
const decl = zir.getDeclaration(decl_inst);
|
||||||
@ -2591,7 +2582,7 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
|
|||||||
const func = zcu.funcInfo(func_index);
|
const func = zcu.funcInfo(func_index);
|
||||||
const inst_info = func.zir_body_inst.resolveFull(ip) orelse return error.AnalysisFail;
|
const inst_info = func.zir_body_inst.resolveFull(ip) orelse return error.AnalysisFail;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
|
|
||||||
try zcu.analysis_in_progress.put(gpa, anal_unit, {});
|
try zcu.analysis_in_progress.put(gpa, anal_unit, {});
|
||||||
errdefer _ = zcu.analysis_in_progress.swapRemove(anal_unit);
|
errdefer _ = zcu.analysis_in_progress.swapRemove(anal_unit);
|
||||||
@ -2843,7 +2834,9 @@ pub fn getErrorValueFromSlice(pt: Zcu.PerThread, name: []const u8) Allocator.Err
|
|||||||
/// Removes any entry from `Zcu.failed_files` associated with `file`. Acquires `Compilation.mutex` as needed.
|
/// Removes any entry from `Zcu.failed_files` associated with `file`. Acquires `Compilation.mutex` as needed.
|
||||||
/// `file.zir` must be unchanged from the last update, as it is used to determine if there is such an entry.
|
/// `file.zir` must be unchanged from the last update, as it is used to determine if there is such an entry.
|
||||||
fn lockAndClearFileCompileError(pt: Zcu.PerThread, file: *Zcu.File) void {
|
fn lockAndClearFileCompileError(pt: Zcu.PerThread, file: *Zcu.File) void {
|
||||||
if (!file.zir_loaded or !file.zir.hasCompileErrors()) return;
|
const zir = file.zir orelse return;
|
||||||
|
if (zir.hasCompileErrors()) return;
|
||||||
|
|
||||||
pt.zcu.comp.mutex.lock();
|
pt.zcu.comp.mutex.lock();
|
||||||
defer pt.zcu.comp.mutex.unlock();
|
defer pt.zcu.comp.mutex.unlock();
|
||||||
if (pt.zcu.failed_files.fetchSwapRemove(file)) |kv| {
|
if (pt.zcu.failed_files.fetchSwapRemove(file)) |kv| {
|
||||||
@ -3779,7 +3772,7 @@ fn recreateStructType(
|
|||||||
const inst_info = key.zir_index.resolveFull(ip).?;
|
const inst_info = key.zir_index.resolveFull(ip).?;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
assert(file.status == .success_zir); // otherwise inst tracking failed
|
assert(file.status == .success_zir); // otherwise inst tracking failed
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
|
|
||||||
assert(zir.instructions.items(.tag)[@intFromEnum(inst_info.inst)] == .extended);
|
assert(zir.instructions.items(.tag)[@intFromEnum(inst_info.inst)] == .extended);
|
||||||
const extended = zir.instructions.items(.data)[@intFromEnum(inst_info.inst)].extended;
|
const extended = zir.instructions.items(.data)[@intFromEnum(inst_info.inst)].extended;
|
||||||
@ -3852,7 +3845,7 @@ fn recreateUnionType(
|
|||||||
const inst_info = key.zir_index.resolveFull(ip).?;
|
const inst_info = key.zir_index.resolveFull(ip).?;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
assert(file.status == .success_zir); // otherwise inst tracking failed
|
assert(file.status == .success_zir); // otherwise inst tracking failed
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
|
|
||||||
assert(zir.instructions.items(.tag)[@intFromEnum(inst_info.inst)] == .extended);
|
assert(zir.instructions.items(.tag)[@intFromEnum(inst_info.inst)] == .extended);
|
||||||
const extended = zir.instructions.items(.data)[@intFromEnum(inst_info.inst)].extended;
|
const extended = zir.instructions.items(.data)[@intFromEnum(inst_info.inst)].extended;
|
||||||
@ -3939,7 +3932,7 @@ fn recreateEnumType(
|
|||||||
const inst_info = key.zir_index.resolveFull(ip).?;
|
const inst_info = key.zir_index.resolveFull(ip).?;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
assert(file.status == .success_zir); // otherwise inst tracking failed
|
assert(file.status == .success_zir); // otherwise inst tracking failed
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
|
|
||||||
assert(zir.instructions.items(.tag)[@intFromEnum(inst_info.inst)] == .extended);
|
assert(zir.instructions.items(.tag)[@intFromEnum(inst_info.inst)] == .extended);
|
||||||
const extended = zir.instructions.items(.data)[@intFromEnum(inst_info.inst)].extended;
|
const extended = zir.instructions.items(.data)[@intFromEnum(inst_info.inst)].extended;
|
||||||
@ -4083,7 +4076,7 @@ pub fn ensureNamespaceUpToDate(pt: Zcu.PerThread, namespace_index: Zcu.Namespace
|
|||||||
const inst_info = key.zir_index.resolveFull(ip) orelse return error.AnalysisFail;
|
const inst_info = key.zir_index.resolveFull(ip) orelse return error.AnalysisFail;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
if (file.status != .success_zir) return error.AnalysisFail;
|
if (file.status != .success_zir) return error.AnalysisFail;
|
||||||
const zir = file.zir;
|
const zir = file.zir.?;
|
||||||
|
|
||||||
assert(zir.instructions.items(.tag)[@intFromEnum(inst_info.inst)] == .extended);
|
assert(zir.instructions.items(.tag)[@intFromEnum(inst_info.inst)] == .extended);
|
||||||
const extended = zir.instructions.items(.data)[@intFromEnum(inst_info.inst)].extended;
|
const extended = zir.instructions.items(.data)[@intFromEnum(inst_info.inst)].extended;
|
||||||
|
|||||||
@ -750,8 +750,7 @@ pub const File = struct {
|
|||||||
{
|
{
|
||||||
const ti = ti_id.resolveFull(&pt.zcu.intern_pool).?;
|
const ti = ti_id.resolveFull(&pt.zcu.intern_pool).?;
|
||||||
const file = pt.zcu.fileByIndex(ti.file);
|
const file = pt.zcu.fileByIndex(ti.file);
|
||||||
assert(file.zir_loaded);
|
const inst = file.zir.?.instructions.get(@intFromEnum(ti.inst));
|
||||||
const inst = file.zir.instructions.get(@intFromEnum(ti.inst));
|
|
||||||
assert(inst.tag == .declaration);
|
assert(inst.tag == .declaration);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -2358,8 +2358,7 @@ fn initWipNavInner(
|
|||||||
const nav = ip.getNav(nav_index);
|
const nav = ip.getNav(nav_index);
|
||||||
const inst_info = nav.srcInst(ip).resolveFull(ip).?;
|
const inst_info = nav.srcInst(ip).resolveFull(ip).?;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
assert(file.zir_loaded);
|
const decl = file.zir.?.getDeclaration(inst_info.inst);
|
||||||
const decl = file.zir.getDeclaration(inst_info.inst);
|
|
||||||
log.debug("initWipNav({s}:{d}:{d} %{d} = {})", .{
|
log.debug("initWipNav({s}:{d}:{d} %{d} = {})", .{
|
||||||
file.sub_file_path,
|
file.sub_file_path,
|
||||||
decl.src_line + 1,
|
decl.src_line + 1,
|
||||||
@ -2373,7 +2372,7 @@ fn initWipNavInner(
|
|||||||
switch (nav_key) {
|
switch (nav_key) {
|
||||||
// Ignore @extern
|
// Ignore @extern
|
||||||
.@"extern" => |@"extern"| if (decl.linkage != .@"extern" or
|
.@"extern" => |@"extern"| if (decl.linkage != .@"extern" or
|
||||||
!@"extern".name.eqlSlice(file.zir.nullTerminatedString(decl.name), ip)) return null,
|
!@"extern".name.eqlSlice(file.zir.?.nullTerminatedString(decl.name), ip)) return null,
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2696,8 +2695,7 @@ fn updateComptimeNavInner(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPoo
|
|||||||
const nav = ip.getNav(nav_index);
|
const nav = ip.getNav(nav_index);
|
||||||
const inst_info = nav.srcInst(ip).resolveFull(ip).?;
|
const inst_info = nav.srcInst(ip).resolveFull(ip).?;
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
assert(file.zir_loaded);
|
const decl = file.zir.?.getDeclaration(inst_info.inst);
|
||||||
const decl = file.zir.getDeclaration(inst_info.inst);
|
|
||||||
log.debug("updateComptimeNav({s}:{d}:{d} %{d} = {})", .{
|
log.debug("updateComptimeNav({s}:{d}:{d} %{d} = {})", .{
|
||||||
file.sub_file_path,
|
file.sub_file_path,
|
||||||
decl.src_line + 1,
|
decl.src_line + 1,
|
||||||
@ -4097,7 +4095,7 @@ pub fn updateContainerType(dwarf: *Dwarf, pt: Zcu.PerThread, type_index: InternP
|
|||||||
// if a newly-tracked instruction can be a type's owner `zir_index`.
|
// if a newly-tracked instruction can be a type's owner `zir_index`.
|
||||||
comptime assert(Zir.inst_tracking_version == 0);
|
comptime assert(Zir.inst_tracking_version == 0);
|
||||||
|
|
||||||
const decl_inst = file.zir.instructions.get(@intFromEnum(inst_info.inst));
|
const decl_inst = file.zir.?.instructions.get(@intFromEnum(inst_info.inst));
|
||||||
const name_strat: Zir.Inst.NameStrategy = switch (decl_inst.tag) {
|
const name_strat: Zir.Inst.NameStrategy = switch (decl_inst.tag) {
|
||||||
.struct_init, .struct_init_ref, .struct_init_anon => .anon,
|
.struct_init, .struct_init_ref, .struct_init_anon => .anon,
|
||||||
.extended => switch (decl_inst.data.extended.opcode) {
|
.extended => switch (decl_inst.data.extended.opcode) {
|
||||||
@ -4301,14 +4299,13 @@ pub fn updateLineNumber(dwarf: *Dwarf, zcu: *Zcu, zir_index: InternPool.TrackedI
|
|||||||
const inst_info = zir_index.resolveFull(ip).?;
|
const inst_info = zir_index.resolveFull(ip).?;
|
||||||
assert(inst_info.inst != .main_struct_inst);
|
assert(inst_info.inst != .main_struct_inst);
|
||||||
const file = zcu.fileByIndex(inst_info.file);
|
const file = zcu.fileByIndex(inst_info.file);
|
||||||
assert(file.zir_loaded);
|
const decl = file.zir.?.getDeclaration(inst_info.inst);
|
||||||
const decl = file.zir.getDeclaration(inst_info.inst);
|
|
||||||
log.debug("updateLineNumber({s}:{d}:{d} %{d} = {s})", .{
|
log.debug("updateLineNumber({s}:{d}:{d} %{d} = {s})", .{
|
||||||
file.sub_file_path,
|
file.sub_file_path,
|
||||||
decl.src_line + 1,
|
decl.src_line + 1,
|
||||||
decl.src_column + 1,
|
decl.src_column + 1,
|
||||||
@intFromEnum(inst_info.inst),
|
@intFromEnum(inst_info.inst),
|
||||||
file.zir.nullTerminatedString(decl.name),
|
file.zir.?.nullTerminatedString(decl.name),
|
||||||
});
|
});
|
||||||
|
|
||||||
var line_buf: [4]u8 = undefined;
|
var line_buf: [4]u8 = undefined;
|
||||||
@ -4661,7 +4658,7 @@ pub fn flushModule(dwarf: *Dwarf, pt: Zcu.PerThread) FlushError!void {
|
|||||||
.target_unit = StringSection.unit,
|
.target_unit = StringSection.unit,
|
||||||
.target_entry = (try dwarf.debug_line_str.addString(
|
.target_entry = (try dwarf.debug_line_str.addString(
|
||||||
dwarf,
|
dwarf,
|
||||||
if (file.mod.builtin_file == file) file.source else "",
|
if (file.mod.builtin_file == file) file.source.? else "",
|
||||||
)).toOptional(),
|
)).toOptional(),
|
||||||
});
|
});
|
||||||
header.appendNTimesAssumeCapacity(0, dwarf.sectionOffsetBytes());
|
header.appendNTimesAssumeCapacity(0, dwarf.sectionOffsetBytes());
|
||||||
|
|||||||
112
src/main.zig
112
src/main.zig
@ -3636,7 +3636,7 @@ fn buildOutputType(
|
|||||||
|
|
||||||
if (show_builtin) {
|
if (show_builtin) {
|
||||||
const builtin_mod = comp.root_mod.getBuiltinDependency();
|
const builtin_mod = comp.root_mod.getBuiltinDependency();
|
||||||
const source = builtin_mod.builtin_file.?.source;
|
const source = builtin_mod.builtin_file.?.source.?;
|
||||||
return std.io.getStdOut().writeAll(source);
|
return std.io.getStdOut().writeAll(source);
|
||||||
}
|
}
|
||||||
switch (listen) {
|
switch (listen) {
|
||||||
@ -6135,14 +6135,12 @@ fn cmdAstCheck(
|
|||||||
var file: Zcu.File = .{
|
var file: Zcu.File = .{
|
||||||
.status = .never_loaded,
|
.status = .never_loaded,
|
||||||
.prev_status = .never_loaded,
|
.prev_status = .never_loaded,
|
||||||
.source_loaded = false,
|
|
||||||
.tree_loaded = false,
|
|
||||||
.zir_loaded = false,
|
|
||||||
.sub_file_path = undefined,
|
.sub_file_path = undefined,
|
||||||
.source = undefined,
|
|
||||||
.stat = undefined,
|
.stat = undefined,
|
||||||
.tree = undefined,
|
.source = null,
|
||||||
.zir = undefined,
|
.tree = null,
|
||||||
|
.zir = null,
|
||||||
|
.zoir = null,
|
||||||
.mod = undefined,
|
.mod = undefined,
|
||||||
};
|
};
|
||||||
if (zig_source_file) |file_name| {
|
if (zig_source_file) |file_name| {
|
||||||
@ -6163,7 +6161,6 @@ fn cmdAstCheck(
|
|||||||
|
|
||||||
file.sub_file_path = file_name;
|
file.sub_file_path = file_name;
|
||||||
file.source = source;
|
file.source = source;
|
||||||
file.source_loaded = true;
|
|
||||||
file.stat = .{
|
file.stat = .{
|
||||||
.size = stat.size,
|
.size = stat.size,
|
||||||
.inode = stat.inode,
|
.inode = stat.inode,
|
||||||
@ -6176,7 +6173,6 @@ fn cmdAstCheck(
|
|||||||
};
|
};
|
||||||
file.sub_file_path = "<stdin>";
|
file.sub_file_path = "<stdin>";
|
||||||
file.source = source;
|
file.source = source;
|
||||||
file.source_loaded = true;
|
|
||||||
file.stat.size = source.len;
|
file.stat.size = source.len;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6196,17 +6192,15 @@ fn cmdAstCheck(
|
|||||||
.fully_qualified_name = "root",
|
.fully_qualified_name = "root",
|
||||||
});
|
});
|
||||||
|
|
||||||
file.tree = try Ast.parse(gpa, file.source, mode);
|
file.tree = try Ast.parse(gpa, file.source.?, mode);
|
||||||
file.tree_loaded = true;
|
defer file.tree.?.deinit(gpa);
|
||||||
defer file.tree.deinit(gpa);
|
|
||||||
|
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
.zig => {
|
.zig => {
|
||||||
file.zir = try AstGen.generate(gpa, file.tree);
|
file.zir = try AstGen.generate(gpa, file.tree.?);
|
||||||
file.zir_loaded = true;
|
defer file.zir.?.deinit(gpa);
|
||||||
defer file.zir.deinit(gpa);
|
|
||||||
|
|
||||||
if (file.zir.hasCompileErrors()) {
|
if (file.zir.?.hasCompileErrors()) {
|
||||||
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
||||||
try wip_errors.init(gpa);
|
try wip_errors.init(gpa);
|
||||||
defer wip_errors.deinit();
|
defer wip_errors.deinit();
|
||||||
@ -6215,13 +6209,13 @@ fn cmdAstCheck(
|
|||||||
defer error_bundle.deinit(gpa);
|
defer error_bundle.deinit(gpa);
|
||||||
error_bundle.renderToStdErr(color.renderOptions());
|
error_bundle.renderToStdErr(color.renderOptions());
|
||||||
|
|
||||||
if (file.zir.loweringFailed()) {
|
if (file.zir.?.loweringFailed()) {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!want_output_text) {
|
if (!want_output_text) {
|
||||||
if (file.zir.hasCompileErrors()) {
|
if (file.zir.?.hasCompileErrors()) {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
} else {
|
} else {
|
||||||
return cleanExit();
|
return cleanExit();
|
||||||
@ -6233,18 +6227,18 @@ fn cmdAstCheck(
|
|||||||
|
|
||||||
{
|
{
|
||||||
const token_bytes = @sizeOf(Ast.TokenList) +
|
const token_bytes = @sizeOf(Ast.TokenList) +
|
||||||
file.tree.tokens.len * (@sizeOf(std.zig.Token.Tag) + @sizeOf(Ast.ByteOffset));
|
file.tree.?.tokens.len * (@sizeOf(std.zig.Token.Tag) + @sizeOf(Ast.ByteOffset));
|
||||||
const tree_bytes = @sizeOf(Ast) + file.tree.nodes.len *
|
const tree_bytes = @sizeOf(Ast) + file.tree.?.nodes.len *
|
||||||
(@sizeOf(Ast.Node.Tag) +
|
(@sizeOf(Ast.Node.Tag) +
|
||||||
@sizeOf(Ast.Node.Data) +
|
@sizeOf(Ast.Node.Data) +
|
||||||
@sizeOf(Ast.TokenIndex));
|
@sizeOf(Ast.TokenIndex));
|
||||||
const instruction_bytes = file.zir.instructions.len *
|
const instruction_bytes = file.zir.?.instructions.len *
|
||||||
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
|
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
|
||||||
// the debug safety tag but we want to measure release size.
|
// the debug safety tag but we want to measure release size.
|
||||||
(@sizeOf(Zir.Inst.Tag) + 8);
|
(@sizeOf(Zir.Inst.Tag) + 8);
|
||||||
const extra_bytes = file.zir.extra.len * @sizeOf(u32);
|
const extra_bytes = file.zir.?.extra.len * @sizeOf(u32);
|
||||||
const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes +
|
const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes +
|
||||||
file.zir.string_bytes.len * @sizeOf(u8);
|
file.zir.?.string_bytes.len * @sizeOf(u8);
|
||||||
const stdout = io.getStdOut();
|
const stdout = io.getStdOut();
|
||||||
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
|
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
|
||||||
// zig fmt: off
|
// zig fmt: off
|
||||||
@ -6258,27 +6252,27 @@ fn cmdAstCheck(
|
|||||||
\\# Extra Data Items: {d} ({})
|
\\# Extra Data Items: {d} ({})
|
||||||
\\
|
\\
|
||||||
, .{
|
, .{
|
||||||
fmtIntSizeBin(file.source.len),
|
fmtIntSizeBin(file.source.?.len),
|
||||||
file.tree.tokens.len, fmtIntSizeBin(token_bytes),
|
file.tree.?.tokens.len, fmtIntSizeBin(token_bytes),
|
||||||
file.tree.nodes.len, fmtIntSizeBin(tree_bytes),
|
file.tree.?.nodes.len, fmtIntSizeBin(tree_bytes),
|
||||||
fmtIntSizeBin(total_bytes),
|
fmtIntSizeBin(total_bytes),
|
||||||
file.zir.instructions.len, fmtIntSizeBin(instruction_bytes),
|
file.zir.?.instructions.len, fmtIntSizeBin(instruction_bytes),
|
||||||
fmtIntSizeBin(file.zir.string_bytes.len),
|
fmtIntSizeBin(file.zir.?.string_bytes.len),
|
||||||
file.zir.extra.len, fmtIntSizeBin(extra_bytes),
|
file.zir.?.extra.len, fmtIntSizeBin(extra_bytes),
|
||||||
});
|
});
|
||||||
// zig fmt: on
|
// zig fmt: on
|
||||||
}
|
}
|
||||||
|
|
||||||
try @import("print_zir.zig").renderAsTextToFile(gpa, &file, io.getStdOut());
|
try @import("print_zir.zig").renderAsTextToFile(gpa, &file, io.getStdOut());
|
||||||
|
|
||||||
if (file.zir.hasCompileErrors()) {
|
if (file.zir.?.hasCompileErrors()) {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
} else {
|
} else {
|
||||||
return cleanExit();
|
return cleanExit();
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.zon => {
|
.zon => {
|
||||||
const zoir = try ZonGen.generate(gpa, file.tree, .{});
|
const zoir = try ZonGen.generate(gpa, file.tree.?, .{});
|
||||||
defer zoir.deinit(gpa);
|
defer zoir.deinit(gpa);
|
||||||
|
|
||||||
if (zoir.hasCompileErrors()) {
|
if (zoir.hasCompileErrors()) {
|
||||||
@ -6289,7 +6283,7 @@ fn cmdAstCheck(
|
|||||||
{
|
{
|
||||||
const src_path = try file.fullPath(gpa);
|
const src_path = try file.fullPath(gpa);
|
||||||
defer gpa.free(src_path);
|
defer gpa.free(src_path);
|
||||||
try wip_errors.addZoirErrorMessages(zoir, file.tree, file.source, src_path);
|
try wip_errors.addZoirErrorMessages(zoir, file.tree.?, file.source.?, src_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
var error_bundle = try wip_errors.toOwnedBundle("");
|
var error_bundle = try wip_errors.toOwnedBundle("");
|
||||||
@ -6519,26 +6513,24 @@ fn cmdDumpZir(
|
|||||||
var file: Zcu.File = .{
|
var file: Zcu.File = .{
|
||||||
.status = .never_loaded,
|
.status = .never_loaded,
|
||||||
.prev_status = .never_loaded,
|
.prev_status = .never_loaded,
|
||||||
.source_loaded = false,
|
|
||||||
.tree_loaded = false,
|
|
||||||
.zir_loaded = true,
|
|
||||||
.sub_file_path = undefined,
|
.sub_file_path = undefined,
|
||||||
.source = undefined,
|
|
||||||
.stat = undefined,
|
.stat = undefined,
|
||||||
.tree = undefined,
|
.source = null,
|
||||||
|
.tree = null,
|
||||||
.zir = try Zcu.loadZirCache(gpa, f),
|
.zir = try Zcu.loadZirCache(gpa, f),
|
||||||
|
.zoir = null,
|
||||||
.mod = undefined,
|
.mod = undefined,
|
||||||
};
|
};
|
||||||
defer file.zir.deinit(gpa);
|
defer file.zir.?.deinit(gpa);
|
||||||
|
|
||||||
{
|
{
|
||||||
const instruction_bytes = file.zir.instructions.len *
|
const instruction_bytes = file.zir.?.instructions.len *
|
||||||
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
|
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
|
||||||
// the debug safety tag but we want to measure release size.
|
// the debug safety tag but we want to measure release size.
|
||||||
(@sizeOf(Zir.Inst.Tag) + 8);
|
(@sizeOf(Zir.Inst.Tag) + 8);
|
||||||
const extra_bytes = file.zir.extra.len * @sizeOf(u32);
|
const extra_bytes = file.zir.?.extra.len * @sizeOf(u32);
|
||||||
const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes +
|
const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes +
|
||||||
file.zir.string_bytes.len * @sizeOf(u8);
|
file.zir.?.string_bytes.len * @sizeOf(u8);
|
||||||
const stdout = io.getStdOut();
|
const stdout = io.getStdOut();
|
||||||
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
|
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
|
||||||
// zig fmt: off
|
// zig fmt: off
|
||||||
@ -6550,9 +6542,9 @@ fn cmdDumpZir(
|
|||||||
\\
|
\\
|
||||||
, .{
|
, .{
|
||||||
fmtIntSizeBin(total_bytes),
|
fmtIntSizeBin(total_bytes),
|
||||||
file.zir.instructions.len, fmtIntSizeBin(instruction_bytes),
|
file.zir.?.instructions.len, fmtIntSizeBin(instruction_bytes),
|
||||||
fmtIntSizeBin(file.zir.string_bytes.len),
|
fmtIntSizeBin(file.zir.?.string_bytes.len),
|
||||||
file.zir.extra.len, fmtIntSizeBin(extra_bytes),
|
file.zir.?.extra.len, fmtIntSizeBin(extra_bytes),
|
||||||
});
|
});
|
||||||
// zig fmt: on
|
// zig fmt: on
|
||||||
}
|
}
|
||||||
@ -6587,18 +6579,16 @@ fn cmdChangelist(
|
|||||||
var file: Zcu.File = .{
|
var file: Zcu.File = .{
|
||||||
.status = .never_loaded,
|
.status = .never_loaded,
|
||||||
.prev_status = .never_loaded,
|
.prev_status = .never_loaded,
|
||||||
.source_loaded = false,
|
|
||||||
.tree_loaded = false,
|
|
||||||
.zir_loaded = false,
|
|
||||||
.sub_file_path = old_source_file,
|
.sub_file_path = old_source_file,
|
||||||
.source = undefined,
|
|
||||||
.stat = .{
|
.stat = .{
|
||||||
.size = stat.size,
|
.size = stat.size,
|
||||||
.inode = stat.inode,
|
.inode = stat.inode,
|
||||||
.mtime = stat.mtime,
|
.mtime = stat.mtime,
|
||||||
},
|
},
|
||||||
.tree = undefined,
|
.source = null,
|
||||||
.zir = undefined,
|
.tree = null,
|
||||||
|
.zir = null,
|
||||||
|
.zoir = null,
|
||||||
.mod = undefined,
|
.mod = undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -6613,17 +6603,14 @@ fn cmdChangelist(
|
|||||||
if (amt != stat.size)
|
if (amt != stat.size)
|
||||||
return error.UnexpectedEndOfFile;
|
return error.UnexpectedEndOfFile;
|
||||||
file.source = source;
|
file.source = source;
|
||||||
file.source_loaded = true;
|
|
||||||
|
|
||||||
file.tree = try Ast.parse(gpa, file.source, .zig);
|
file.tree = try Ast.parse(gpa, file.source.?, .zig);
|
||||||
file.tree_loaded = true;
|
defer file.tree.?.deinit(gpa);
|
||||||
defer file.tree.deinit(gpa);
|
|
||||||
|
|
||||||
file.zir = try AstGen.generate(gpa, file.tree);
|
file.zir = try AstGen.generate(gpa, file.tree.?);
|
||||||
file.zir_loaded = true;
|
defer file.zir.?.deinit(gpa);
|
||||||
defer file.zir.deinit(gpa);
|
|
||||||
|
|
||||||
if (file.zir.loweringFailed()) {
|
if (file.zir.?.loweringFailed()) {
|
||||||
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
||||||
try wip_errors.init(gpa);
|
try wip_errors.init(gpa);
|
||||||
defer wip_errors.deinit();
|
defer wip_errors.deinit();
|
||||||
@ -6652,13 +6639,12 @@ fn cmdChangelist(
|
|||||||
var new_tree = try Ast.parse(gpa, new_source, .zig);
|
var new_tree = try Ast.parse(gpa, new_source, .zig);
|
||||||
defer new_tree.deinit(gpa);
|
defer new_tree.deinit(gpa);
|
||||||
|
|
||||||
var old_zir = file.zir;
|
var old_zir = file.zir.?;
|
||||||
defer old_zir.deinit(gpa);
|
defer old_zir.deinit(gpa);
|
||||||
file.zir_loaded = false;
|
file.zir = null;
|
||||||
file.zir = try AstGen.generate(gpa, new_tree);
|
file.zir = try AstGen.generate(gpa, new_tree);
|
||||||
file.zir_loaded = true;
|
|
||||||
|
|
||||||
if (file.zir.loweringFailed()) {
|
if (file.zir.?.loweringFailed()) {
|
||||||
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
|
||||||
try wip_errors.init(gpa);
|
try wip_errors.init(gpa);
|
||||||
defer wip_errors.deinit();
|
defer wip_errors.deinit();
|
||||||
@ -6672,7 +6658,7 @@ fn cmdChangelist(
|
|||||||
var inst_map: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .empty;
|
var inst_map: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .empty;
|
||||||
defer inst_map.deinit(gpa);
|
defer inst_map.deinit(gpa);
|
||||||
|
|
||||||
try Zcu.mapOldZirToNew(gpa, old_zir, file.zir, &inst_map);
|
try Zcu.mapOldZirToNew(gpa, old_zir, file.zir.?, &inst_map);
|
||||||
|
|
||||||
var bw = io.bufferedWriter(io.getStdOut().writer());
|
var bw = io.bufferedWriter(io.getStdOut().writer());
|
||||||
const stdout = bw.writer();
|
const stdout = bw.writer();
|
||||||
|
|||||||
@ -22,7 +22,7 @@ pub fn renderAsTextToFile(
|
|||||||
.gpa = gpa,
|
.gpa = gpa,
|
||||||
.arena = arena.allocator(),
|
.arena = arena.allocator(),
|
||||||
.file = scope_file,
|
.file = scope_file,
|
||||||
.code = scope_file.zir,
|
.code = scope_file.zir.?,
|
||||||
.indent = 0,
|
.indent = 0,
|
||||||
.parent_decl_node = 0,
|
.parent_decl_node = 0,
|
||||||
.recurse_decls = true,
|
.recurse_decls = true,
|
||||||
@ -36,18 +36,18 @@ pub fn renderAsTextToFile(
|
|||||||
try stream.print("%{d} ", .{@intFromEnum(main_struct_inst)});
|
try stream.print("%{d} ", .{@intFromEnum(main_struct_inst)});
|
||||||
try writer.writeInstToStream(stream, main_struct_inst);
|
try writer.writeInstToStream(stream, main_struct_inst);
|
||||||
try stream.writeAll("\n");
|
try stream.writeAll("\n");
|
||||||
const imports_index = scope_file.zir.extra[@intFromEnum(Zir.ExtraIndex.imports)];
|
const imports_index = scope_file.zir.?.extra[@intFromEnum(Zir.ExtraIndex.imports)];
|
||||||
if (imports_index != 0) {
|
if (imports_index != 0) {
|
||||||
try stream.writeAll("Imports:\n");
|
try stream.writeAll("Imports:\n");
|
||||||
|
|
||||||
const extra = scope_file.zir.extraData(Zir.Inst.Imports, imports_index);
|
const extra = scope_file.zir.?.extraData(Zir.Inst.Imports, imports_index);
|
||||||
var extra_index = extra.end;
|
var extra_index = extra.end;
|
||||||
|
|
||||||
for (0..extra.data.imports_len) |_| {
|
for (0..extra.data.imports_len) |_| {
|
||||||
const item = scope_file.zir.extraData(Zir.Inst.Imports.Item, extra_index);
|
const item = scope_file.zir.?.extraData(Zir.Inst.Imports.Item, extra_index);
|
||||||
extra_index = item.end;
|
extra_index = item.end;
|
||||||
|
|
||||||
const import_path = scope_file.zir.nullTerminatedString(item.data.name);
|
const import_path = scope_file.zir.?.nullTerminatedString(item.data.name);
|
||||||
try stream.print(" @import(\"{}\") ", .{
|
try stream.print(" @import(\"{}\") ", .{
|
||||||
std.zig.fmtEscapes(import_path),
|
std.zig.fmtEscapes(import_path),
|
||||||
});
|
});
|
||||||
@ -75,7 +75,7 @@ pub fn renderInstructionContext(
|
|||||||
.gpa = gpa,
|
.gpa = gpa,
|
||||||
.arena = arena.allocator(),
|
.arena = arena.allocator(),
|
||||||
.file = scope_file,
|
.file = scope_file,
|
||||||
.code = scope_file.zir,
|
.code = scope_file.zir.?,
|
||||||
.indent = if (indent < 2) 2 else indent,
|
.indent = if (indent < 2) 2 else indent,
|
||||||
.parent_decl_node = parent_decl_node,
|
.parent_decl_node = parent_decl_node,
|
||||||
.recurse_decls = false,
|
.recurse_decls = false,
|
||||||
@ -107,7 +107,7 @@ pub fn renderSingleInstruction(
|
|||||||
.gpa = gpa,
|
.gpa = gpa,
|
||||||
.arena = arena.allocator(),
|
.arena = arena.allocator(),
|
||||||
.file = scope_file,
|
.file = scope_file,
|
||||||
.code = scope_file.zir,
|
.code = scope_file.zir.?,
|
||||||
.indent = indent,
|
.indent = indent,
|
||||||
.parent_decl_node = parent_decl_node,
|
.parent_decl_node = parent_decl_node,
|
||||||
.recurse_decls = false,
|
.recurse_decls = false,
|
||||||
@ -2759,8 +2759,7 @@ const Writer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn writeSrcNode(self: *Writer, stream: anytype, src_node: i32) !void {
|
fn writeSrcNode(self: *Writer, stream: anytype, src_node: i32) !void {
|
||||||
if (!self.file.tree_loaded) return;
|
const tree = self.file.tree orelse return;
|
||||||
const tree = self.file.tree;
|
|
||||||
const abs_node = self.relativeToNodeIndex(src_node);
|
const abs_node = self.relativeToNodeIndex(src_node);
|
||||||
const src_span = tree.nodeToSpan(abs_node);
|
const src_span = tree.nodeToSpan(abs_node);
|
||||||
const start = self.line_col_cursor.find(tree.source, src_span.start);
|
const start = self.line_col_cursor.find(tree.source, src_span.start);
|
||||||
@ -2772,8 +2771,7 @@ const Writer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn writeSrcTok(self: *Writer, stream: anytype, src_tok: u32) !void {
|
fn writeSrcTok(self: *Writer, stream: anytype, src_tok: u32) !void {
|
||||||
if (!self.file.tree_loaded) return;
|
const tree = self.file.tree orelse return;
|
||||||
const tree = self.file.tree;
|
|
||||||
const abs_tok = tree.firstToken(self.parent_decl_node) + src_tok;
|
const abs_tok = tree.firstToken(self.parent_decl_node) + src_tok;
|
||||||
const span_start = tree.tokens.items(.start)[abs_tok];
|
const span_start = tree.tokens.items(.start)[abs_tok];
|
||||||
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(abs_tok).len));
|
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(abs_tok).len));
|
||||||
@ -2786,8 +2784,7 @@ const Writer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: u32) !void {
|
fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: u32) !void {
|
||||||
if (!self.file.tree_loaded) return;
|
const tree = self.file.tree orelse return;
|
||||||
const tree = self.file.tree;
|
|
||||||
const span_start = tree.tokens.items(.start)[src_tok];
|
const span_start = tree.tokens.items(.start)[src_tok];
|
||||||
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(src_tok).len));
|
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(src_tok).len));
|
||||||
const start = self.line_col_cursor.find(tree.source, span_start);
|
const start = self.line_col_cursor.find(tree.source, span_start);
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user