mirror of
https://github.com/ziglang/zig.git
synced 2026-02-01 20:23:38 +00:00
stage2: AstGen improvements
* AstGen: represent compile errors in ZIR rather than returning `error.AnalysisFail`. * ZIR: remove decl_ref and decl_val instructions. These are replaced by `decl_ref_named` and `decl_val_named`, respectively, which will probably get renamed in the future to the instructions that were just deleted. * AstGen: implement `@This()`, `@fence()`, `@returnAddress()`, and `@src()`. * AstGen: struct_decl improved to support fields_len=0 but have decls. * AstGen: fix missing null bytes after compile error messages. * SrcLoc: no longer depend on `Decl`. Instead have an explicit field `parent_decl_node` which is an absolute AST Node index. * Module: `failed_files` table can have null value, in which case the key, which is a `*Scope.File`, will have ZIR errors in it. * ZIR: implement text rendering of struct decls. * CLI: introduce debug_usage and `zig astgen` command which is enabled when the compiler is built in debug mode.
This commit is contained in:
parent
cf57e8223f
commit
01b4bf34ea
11
BRANCH_TODO
11
BRANCH_TODO
@ -1,3 +1,14 @@
|
||||
* AstGen decls into blocks so we can evaluate them independently
|
||||
* look for cached zir code
|
||||
* save zir code to cache
|
||||
* store list of imported strings
|
||||
* use list of imported strings to queue up more astgen tasks
|
||||
* keep track of file dependencies/dependants
|
||||
* unload files from memory when a dependency is dropped
|
||||
* implement zir error notes
|
||||
|
||||
* implement the new AstGen compile errors
|
||||
|
||||
* get rid of failed_root_src_file
|
||||
* get rid of Scope.DeclRef
|
||||
* handle decl collision with usingnamespace
|
||||
|
||||
@ -96,14 +96,18 @@ pub fn generate(gpa: *Allocator, file: *Scope.File) InnerError!Zir {
|
||||
.arg = 0,
|
||||
},
|
||||
};
|
||||
const struct_decl_ref = try AstGen.structDeclInner(
|
||||
if (AstGen.structDeclInner(
|
||||
&gen_scope,
|
||||
&gen_scope.base,
|
||||
0,
|
||||
container_decl,
|
||||
.struct_decl,
|
||||
);
|
||||
astgen.extra.items[0] = @enumToInt(struct_decl_ref);
|
||||
)) |struct_decl_ref| {
|
||||
astgen.extra.items[0] = @enumToInt(struct_decl_ref);
|
||||
} else |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.AnalysisFail => {}, // Handled via compile_errors below.
|
||||
}
|
||||
|
||||
if (astgen.compile_errors.items.len == 0) {
|
||||
astgen.extra.items[1] = 0;
|
||||
@ -1272,8 +1276,6 @@ fn blockExprStmts(
|
||||
.cmp_gt,
|
||||
.cmp_neq,
|
||||
.coerce_result_ptr,
|
||||
.decl_ref,
|
||||
.decl_val,
|
||||
.decl_ref_named,
|
||||
.decl_val_named,
|
||||
.load,
|
||||
@ -1381,6 +1383,10 @@ fn blockExprStmts(
|
||||
.type_info,
|
||||
.size_of,
|
||||
.bit_size_of,
|
||||
.this,
|
||||
.fence,
|
||||
.ret_addr,
|
||||
.builtin_src,
|
||||
=> break :b false,
|
||||
|
||||
// ZIR instructions that are always either `noreturn` or `void`.
|
||||
@ -2385,13 +2391,16 @@ fn structDeclInner(
|
||||
|
||||
const decl_inst = try gz.addBlock(tag, node);
|
||||
try gz.instructions.append(gpa, decl_inst);
|
||||
_ = try block_scope.addBreak(.break_inline, decl_inst, .void_value);
|
||||
if (field_index != 0) {
|
||||
_ = try block_scope.addBreak(.break_inline, decl_inst, .void_value);
|
||||
}
|
||||
|
||||
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
|
||||
@typeInfo(Zir.Inst.StructDecl).Struct.fields.len +
|
||||
bit_bag.items.len + 1 + fields_data.items.len +
|
||||
bit_bag.items.len + @boolToInt(field_index != 0) + fields_data.items.len +
|
||||
block_scope.instructions.items.len +
|
||||
wip_decls.bit_bag.items.len + 1 + wip_decls.name_and_value.items.len);
|
||||
wip_decls.bit_bag.items.len + @boolToInt(wip_decls.decl_index != 0) +
|
||||
wip_decls.name_and_value.items.len);
|
||||
const zir_datas = astgen.instructions.items(.data);
|
||||
zir_datas[decl_inst].pl_node.payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.StructDecl{
|
||||
.body_len = @intCast(u32, block_scope.instructions.items.len),
|
||||
@ -2401,11 +2410,15 @@ fn structDeclInner(
|
||||
astgen.extra.appendSliceAssumeCapacity(block_scope.instructions.items);
|
||||
|
||||
astgen.extra.appendSliceAssumeCapacity(bit_bag.items); // Likely empty.
|
||||
astgen.extra.appendAssumeCapacity(cur_bit_bag);
|
||||
if (field_index != 0) {
|
||||
astgen.extra.appendAssumeCapacity(cur_bit_bag);
|
||||
}
|
||||
astgen.extra.appendSliceAssumeCapacity(fields_data.items);
|
||||
|
||||
astgen.extra.appendSliceAssumeCapacity(wip_decls.bit_bag.items); // Likely empty.
|
||||
astgen.extra.appendAssumeCapacity(wip_decls.cur_bit_bag);
|
||||
if (wip_decls.decl_index != 0) {
|
||||
astgen.extra.appendAssumeCapacity(wip_decls.cur_bit_bag);
|
||||
}
|
||||
astgen.extra.appendSliceAssumeCapacity(wip_decls.name_and_value.items);
|
||||
|
||||
return gz.indexToRef(decl_inst);
|
||||
@ -4750,6 +4763,11 @@ fn builtinCall(
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
|
||||
.This => return rvalue(gz, scope, rl, try gz.addNode(.this, node), node),
|
||||
.fence => return rvalue(gz, scope, rl, try gz.addNode(.fence, node), node),
|
||||
.return_address => return rvalue(gz, scope, rl, try gz.addNode(.ret_addr, node), node),
|
||||
.src => return rvalue(gz, scope, rl, try gz.addNode(.builtin_src, node), node),
|
||||
|
||||
.add_with_overflow,
|
||||
.align_cast,
|
||||
.align_of,
|
||||
@ -4778,7 +4796,6 @@ fn builtinCall(
|
||||
.error_name,
|
||||
.error_return_trace,
|
||||
.err_set_cast,
|
||||
.fence,
|
||||
.field_parent_ptr,
|
||||
.float_to_int,
|
||||
.has_field,
|
||||
@ -4794,7 +4811,6 @@ fn builtinCall(
|
||||
.pop_count,
|
||||
.ptr_cast,
|
||||
.rem,
|
||||
.return_address,
|
||||
.set_align_stack,
|
||||
.set_cold,
|
||||
.set_float_mode,
|
||||
@ -4805,7 +4821,6 @@ fn builtinCall(
|
||||
.shuffle,
|
||||
.splat,
|
||||
.reduce,
|
||||
.src,
|
||||
.sqrt,
|
||||
.sin,
|
||||
.cos,
|
||||
@ -4821,21 +4836,18 @@ fn builtinCall(
|
||||
.round,
|
||||
.sub_with_overflow,
|
||||
.tag_name,
|
||||
.This,
|
||||
.truncate,
|
||||
.Type,
|
||||
.type_name,
|
||||
.union_init,
|
||||
=> return astgen.failNode(node, "TODO: implement builtin function {s}", .{
|
||||
builtin_name,
|
||||
}),
|
||||
|
||||
.async_call,
|
||||
.frame,
|
||||
.Frame,
|
||||
.frame_address,
|
||||
.frame_size,
|
||||
=> return astgen.failNode(node, "async and related features are not yet supported", .{}),
|
||||
=> return astgen.failNode(node, "TODO: implement builtin function {s}", .{
|
||||
builtin_name,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -5376,7 +5388,7 @@ pub fn failNodeNotes(
|
||||
{
|
||||
var managed = string_bytes.toManaged(astgen.gpa);
|
||||
defer string_bytes.* = managed.toUnmanaged();
|
||||
try managed.writer().print(format, args);
|
||||
try managed.writer().print(format ++ "\x00", args);
|
||||
}
|
||||
const notes_index: u32 = if (notes.len != 0) blk: {
|
||||
const notes_start = astgen.extra.items.len;
|
||||
@ -5417,7 +5429,7 @@ pub fn failTokNotes(
|
||||
{
|
||||
var managed = string_bytes.toManaged(astgen.gpa);
|
||||
defer string_bytes.* = managed.toUnmanaged();
|
||||
try managed.writer().print(format, args);
|
||||
try managed.writer().print(format ++ "\x00", args);
|
||||
}
|
||||
const notes_index: u32 = if (notes.len != 0) blk: {
|
||||
const notes_start = astgen.extra.items.len;
|
||||
@ -5451,7 +5463,7 @@ pub fn failOff(
|
||||
{
|
||||
var managed = string_bytes.toManaged(astgen.gpa);
|
||||
defer string_bytes.* = managed.toUnmanaged();
|
||||
try managed.writer().print(format, args);
|
||||
try managed.writer().print(format ++ "\x00", args);
|
||||
}
|
||||
try astgen.compile_errors.append(astgen.gpa, .{
|
||||
.msg = msg,
|
||||
@ -5475,7 +5487,7 @@ pub fn errNoteTok(
|
||||
{
|
||||
var managed = string_bytes.toManaged(astgen.gpa);
|
||||
defer string_bytes.* = managed.toUnmanaged();
|
||||
try managed.writer().print(format, args);
|
||||
try managed.writer().print(format ++ "\x00", args);
|
||||
}
|
||||
return astgen.addExtra(Zir.Inst.CompileErrors.Item{
|
||||
.msg = msg,
|
||||
@ -5498,7 +5510,7 @@ pub fn errNoteNode(
|
||||
{
|
||||
var managed = string_bytes.toManaged(astgen.gpa);
|
||||
defer string_bytes.* = managed.toUnmanaged();
|
||||
try managed.writer().print(format, args);
|
||||
try managed.writer().print(format ++ "\x00", args);
|
||||
}
|
||||
return astgen.addExtra(Zir.Inst.CompileErrors.Item{
|
||||
.msg = msg,
|
||||
|
||||
@ -391,10 +391,10 @@ pub const AllErrors = struct {
|
||||
const notes = try arena.allocator.alloc(Message, module_err_msg.notes.len);
|
||||
for (notes) |*note, i| {
|
||||
const module_note = module_err_msg.notes[i];
|
||||
const source = try module_note.src_loc.fileScope().getSource(module.gpa);
|
||||
const source = try module_note.src_loc.file_scope.getSource(module.gpa);
|
||||
const byte_offset = try module_note.src_loc.byteOffset();
|
||||
const loc = std.zig.findLineColumn(source, byte_offset);
|
||||
const sub_file_path = module_note.src_loc.fileScope().sub_file_path;
|
||||
const sub_file_path = module_note.src_loc.file_scope.sub_file_path;
|
||||
note.* = .{
|
||||
.src = .{
|
||||
.src_path = try arena.allocator.dupe(u8, sub_file_path),
|
||||
@ -406,10 +406,10 @@ pub const AllErrors = struct {
|
||||
},
|
||||
};
|
||||
}
|
||||
const source = try module_err_msg.src_loc.fileScope().getSource(module.gpa);
|
||||
const source = try module_err_msg.src_loc.file_scope.getSource(module.gpa);
|
||||
const byte_offset = try module_err_msg.src_loc.byteOffset();
|
||||
const loc = std.zig.findLineColumn(source, byte_offset);
|
||||
const sub_file_path = module_err_msg.src_loc.fileScope().sub_file_path;
|
||||
const sub_file_path = module_err_msg.src_loc.file_scope.sub_file_path;
|
||||
try errors.append(.{
|
||||
.src = .{
|
||||
.src_path = try arena.allocator.dupe(u8, sub_file_path),
|
||||
@ -423,6 +423,56 @@ pub const AllErrors = struct {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn addZir(
|
||||
arena: *Allocator,
|
||||
errors: *std.ArrayList(Message),
|
||||
file: *Module.Scope.File,
|
||||
source: []const u8,
|
||||
) !void {
|
||||
assert(file.zir_loaded);
|
||||
assert(file.tree_loaded);
|
||||
const Zir = @import("Zir.zig");
|
||||
const payload_index = file.zir.extra[Zir.compile_error_extra_index];
|
||||
assert(payload_index != 0);
|
||||
|
||||
const header = file.zir.extraData(Zir.Inst.CompileErrors, payload_index);
|
||||
const items_len = header.data.items_len;
|
||||
var extra_index = header.end;
|
||||
var item_i: usize = 0;
|
||||
while (item_i < items_len) : (item_i += 1) {
|
||||
const item = file.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
|
||||
extra_index = item.end;
|
||||
|
||||
if (item.data.notes != 0) {
|
||||
@panic("TODO implement AllErrors for Zir notes");
|
||||
}
|
||||
|
||||
const msg = file.zir.nullTerminatedString(item.data.msg);
|
||||
const byte_offset = blk: {
|
||||
const token_starts = file.tree.tokens.items(.start);
|
||||
if (item.data.node != 0) {
|
||||
const main_tokens = file.tree.nodes.items(.main_token);
|
||||
const main_token = main_tokens[item.data.node];
|
||||
break :blk token_starts[main_token];
|
||||
}
|
||||
break :blk token_starts[item.data.token] + item.data.byte_offset;
|
||||
};
|
||||
const loc = std.zig.findLineColumn(source, byte_offset);
|
||||
|
||||
try errors.append(.{
|
||||
.src = .{
|
||||
.src_path = try arena.dupe(u8, file.sub_file_path),
|
||||
.msg = try arena.dupe(u8, msg),
|
||||
.byte_offset = byte_offset,
|
||||
.line = @intCast(u32, loc.line),
|
||||
.column = @intCast(u32, loc.column),
|
||||
.notes = &.{}, // TODO
|
||||
.source_line = try arena.dupe(u8, loc.source_line),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn addPlain(
|
||||
arena: *std.heap.ArenaAllocator,
|
||||
errors: *std.ArrayList(Message),
|
||||
@ -1624,7 +1674,13 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
}
|
||||
if (self.bin_file.options.module) |module| {
|
||||
for (module.failed_files.items()) |entry| {
|
||||
try AllErrors.add(module, &arena, &errors, entry.value.*);
|
||||
if (entry.value) |msg| {
|
||||
try AllErrors.add(module, &arena, &errors, msg.*);
|
||||
} else {
|
||||
// Must be ZIR errors.
|
||||
const source = try entry.key.getSource(module.gpa);
|
||||
try AllErrors.addZir(&arena.allocator, &errors, entry.key, source);
|
||||
}
|
||||
}
|
||||
for (module.failed_decls.items()) |entry| {
|
||||
if (entry.key.namespace.file_scope.status == .parse_failure) {
|
||||
@ -2276,7 +2332,8 @@ fn reportRetryableAstGenError(
|
||||
file.status = .retryable_failure;
|
||||
|
||||
const err_msg = try Module.ErrorMsg.create(gpa, .{
|
||||
.container = .{ .file_scope = file },
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .entire_file,
|
||||
}, "unable to load {s}: {s}", .{
|
||||
file.sub_file_path, @errorName(err),
|
||||
|
||||
220
src/Module.zig
220
src/Module.zig
@ -70,7 +70,7 @@ emit_h_failed_decls: std.AutoArrayHashMapUnmanaged(*Decl, *ErrorMsg) = .{},
|
||||
compile_log_decls: std.AutoArrayHashMapUnmanaged(*Decl, SrcLoc) = .{},
|
||||
/// Using a map here for consistency with the other fields here.
|
||||
/// The ErrorMsg memory is owned by the `Scope.File`, using Module's general purpose allocator.
|
||||
failed_files: std.AutoArrayHashMapUnmanaged(*Scope.File, *ErrorMsg) = .{},
|
||||
failed_files: std.AutoArrayHashMapUnmanaged(*Scope.File, ?*ErrorMsg) = .{},
|
||||
/// Using a map here for consistency with the other fields here.
|
||||
/// The ErrorMsg memory is owned by the `Export`, using Module's general purpose allocator.
|
||||
failed_exports: std.AutoArrayHashMapUnmanaged(*Export, *ErrorMsg) = .{},
|
||||
@ -267,9 +267,10 @@ pub const Decl = struct {
|
||||
return .{ .node_offset = decl.nodeIndexToRelative(node_index) };
|
||||
}
|
||||
|
||||
pub fn srcLoc(decl: *Decl) SrcLoc {
|
||||
pub fn srcLoc(decl: Decl) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = decl },
|
||||
.file_scope = decl.getFileScope(),
|
||||
.parent_decl_node = decl.src_node,
|
||||
.lazy = .{ .node_offset = 0 },
|
||||
};
|
||||
}
|
||||
@ -367,7 +368,8 @@ pub const ErrorSet = struct {
|
||||
|
||||
pub fn srcLoc(self: ErrorSet) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = self.owner_decl },
|
||||
.file_scope = self.owner_decl.getFileScope(),
|
||||
.parent_decl_node = self.owner_decl.src_node,
|
||||
.lazy = .{ .node_offset = self.node_offset },
|
||||
};
|
||||
}
|
||||
@ -397,7 +399,8 @@ pub const Struct = struct {
|
||||
|
||||
pub fn srcLoc(s: Struct) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = s.owner_decl },
|
||||
.file_scope = s.owner_decl.getFileScope(),
|
||||
.parent_decl_node = s.owner_decl.src_node,
|
||||
.lazy = .{ .node_offset = s.node_offset },
|
||||
};
|
||||
}
|
||||
@ -416,7 +419,8 @@ pub const EnumSimple = struct {
|
||||
|
||||
pub fn srcLoc(self: EnumSimple) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = self.owner_decl },
|
||||
.file_scope = self.owner_decl.getFileScope(),
|
||||
.parent_decl_node = self.owner_decl.src_node,
|
||||
.lazy = .{ .node_offset = self.node_offset },
|
||||
};
|
||||
}
|
||||
@ -444,7 +448,8 @@ pub const EnumFull = struct {
|
||||
|
||||
pub fn srcLoc(self: EnumFull) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = self.owner_decl },
|
||||
.file_scope = self.owner_decl.getFileScope(),
|
||||
.parent_decl_node = self.owner_decl.src_node,
|
||||
.lazy = .{ .node_offset = self.node_offset },
|
||||
};
|
||||
}
|
||||
@ -1710,51 +1715,19 @@ pub const ErrorMsg = struct {
|
||||
|
||||
/// Canonical reference to a position within a source file.
|
||||
pub const SrcLoc = struct {
|
||||
/// The active field is determined by tag of `lazy`.
|
||||
container: union {
|
||||
/// The containing `Decl` according to the source code.
|
||||
decl: *Decl,
|
||||
file_scope: *Scope.File,
|
||||
},
|
||||
/// Relative to `decl`.
|
||||
file_scope: *Scope.File,
|
||||
/// Might be 0 depending on tag of `lazy`.
|
||||
parent_decl_node: ast.Node.Index,
|
||||
/// Relative to `parent_decl_node`.
|
||||
lazy: LazySrcLoc,
|
||||
|
||||
pub fn fileScope(src_loc: SrcLoc) *Scope.File {
|
||||
return switch (src_loc.lazy) {
|
||||
.unneeded => unreachable,
|
||||
pub fn declSrcToken(src_loc: SrcLoc) ast.TokenIndex {
|
||||
const tree = src_loc.file_scope.tree;
|
||||
return tree.firstToken(src_loc.parent_decl_node);
|
||||
}
|
||||
|
||||
.byte_abs,
|
||||
.token_abs,
|
||||
.node_abs,
|
||||
.entire_file,
|
||||
=> src_loc.container.file_scope,
|
||||
|
||||
.byte_offset,
|
||||
.token_offset,
|
||||
.node_offset,
|
||||
.node_offset_back2tok,
|
||||
.node_offset_var_decl_ty,
|
||||
.node_offset_for_cond,
|
||||
.node_offset_builtin_call_arg0,
|
||||
.node_offset_builtin_call_arg1,
|
||||
.node_offset_array_access_index,
|
||||
.node_offset_slice_sentinel,
|
||||
.node_offset_call_func,
|
||||
.node_offset_field_name,
|
||||
.node_offset_deref_ptr,
|
||||
.node_offset_asm_source,
|
||||
.node_offset_asm_ret_ty,
|
||||
.node_offset_if_cond,
|
||||
.node_offset_bin_op,
|
||||
.node_offset_bin_lhs,
|
||||
.node_offset_bin_rhs,
|
||||
.node_offset_switch_operand,
|
||||
.node_offset_switch_special_prong,
|
||||
.node_offset_switch_range,
|
||||
.node_offset_fn_type_cc,
|
||||
.node_offset_fn_type_ret_ty,
|
||||
=> src_loc.container.decl.namespace.file_scope,
|
||||
};
|
||||
pub fn declRelativeToNodeIndex(src_loc: SrcLoc, offset: i32) ast.TokenIndex {
|
||||
return @bitCast(ast.Node.Index, offset + @bitCast(i32, src_loc.parent_decl_node));
|
||||
}
|
||||
|
||||
pub fn byteOffset(src_loc: SrcLoc) !u32 {
|
||||
@ -1765,48 +1738,45 @@ pub const SrcLoc = struct {
|
||||
.byte_abs => |byte_index| return byte_index,
|
||||
|
||||
.token_abs => |tok_index| {
|
||||
const tree = src_loc.container.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_abs => |node| {
|
||||
const tree = src_loc.container.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
const tok_index = tree.firstToken(node);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.byte_offset => |byte_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
return decl.srcByteOffset() + byte_off;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[src_loc.declSrcToken()] + byte_off;
|
||||
},
|
||||
.token_offset => |tok_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tok_index = decl.srcToken() + tok_off;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tok_index = src_loc.declSrcToken() + tok_off;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset, .node_offset_bin_op => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_back2tok => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const tok_index = tree.firstToken(node) - 2;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_var_decl_ty => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const full = switch (node_tags[node]) {
|
||||
.global_var_decl => tree.globalVarDecl(node),
|
||||
@ -1825,11 +1795,10 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_builtin_call_arg0 => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const param = switch (node_tags[node]) {
|
||||
.builtin_call_two, .builtin_call_two_comma => node_datas[node].lhs,
|
||||
.builtin_call, .builtin_call_comma => tree.extra_data[node_datas[node].lhs],
|
||||
@ -1841,11 +1810,10 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_builtin_call_arg1 => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const param = switch (node_tags[node]) {
|
||||
.builtin_call_two, .builtin_call_two_comma => node_datas[node].rhs,
|
||||
.builtin_call, .builtin_call_comma => tree.extra_data[node_datas[node].lhs + 1],
|
||||
@ -1857,22 +1825,20 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_array_access_index => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node_datas[node].rhs];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_slice_sentinel => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const full = switch (node_tags[node]) {
|
||||
.slice_open => tree.sliceOpen(node),
|
||||
.slice => tree.slice(node),
|
||||
@ -1885,11 +1851,10 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_call_func => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
var params: [1]ast.Node.Index = undefined;
|
||||
const full = switch (node_tags[node]) {
|
||||
.call_one,
|
||||
@ -1912,11 +1877,10 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_field_name => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tok_index = switch (node_tags[node]) {
|
||||
.field_access => node_datas[node].rhs,
|
||||
else => tree.firstToken(node) - 2,
|
||||
@ -1925,21 +1889,19 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_deref_ptr => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tok_index = node_datas[node].lhs;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_asm_source => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const full = switch (node_tags[node]) {
|
||||
.asm_simple => tree.asmSimple(node),
|
||||
.@"asm" => tree.asmFull(node),
|
||||
@ -1951,11 +1913,10 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_asm_ret_ty => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const full = switch (node_tags[node]) {
|
||||
.asm_simple => tree.asmSimple(node),
|
||||
.@"asm" => tree.asmFull(node),
|
||||
@ -1968,9 +1929,8 @@ pub const SrcLoc = struct {
|
||||
},
|
||||
|
||||
.node_offset_for_cond, .node_offset_if_cond => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const src_node = switch (node_tags[node]) {
|
||||
.if_simple => tree.ifSimple(node).ast.cond_expr,
|
||||
@ -1988,9 +1948,8 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_bin_lhs => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const src_node = node_datas[node].lhs;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
@ -1999,9 +1958,8 @@ pub const SrcLoc = struct {
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_bin_rhs => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const src_node = node_datas[node].rhs;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
@ -2011,9 +1969,8 @@ pub const SrcLoc = struct {
|
||||
},
|
||||
|
||||
.node_offset_switch_operand => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const src_node = node_datas[node].lhs;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
@ -2023,9 +1980,8 @@ pub const SrcLoc = struct {
|
||||
},
|
||||
|
||||
.node_offset_switch_special_prong => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const switch_node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const switch_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
@ -2050,9 +2006,8 @@ pub const SrcLoc = struct {
|
||||
},
|
||||
|
||||
.node_offset_switch_range => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const switch_node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const switch_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
@ -2081,11 +2036,10 @@ pub const SrcLoc = struct {
|
||||
},
|
||||
|
||||
.node_offset_fn_type_cc => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
var params: [1]ast.Node.Index = undefined;
|
||||
const full = switch (node_tags[node]) {
|
||||
.fn_proto_simple => tree.fnProtoSimple(¶ms, node),
|
||||
@ -2101,11 +2055,10 @@ pub const SrcLoc = struct {
|
||||
},
|
||||
|
||||
.node_offset_fn_type_ret_ty => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const tree = decl.namespace.file_scope.tree;
|
||||
const tree = src_loc.file_scope.tree;
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
var params: [1]ast.Node.Index = undefined;
|
||||
const full = switch (node_tags[node]) {
|
||||
.fn_proto_simple => tree.fnProtoSimple(¶ms, node),
|
||||
@ -2288,7 +2241,8 @@ pub const LazySrcLoc = union(enum) {
|
||||
.token_abs,
|
||||
.node_abs,
|
||||
=> .{
|
||||
.container = .{ .file_scope = scope.getFileScope() },
|
||||
.file_scope = scope.getFileScope(),
|
||||
.parent_decl_node = 0,
|
||||
.lazy = lazy,
|
||||
},
|
||||
|
||||
@ -2317,7 +2271,8 @@ pub const LazySrcLoc = union(enum) {
|
||||
.node_offset_fn_type_cc,
|
||||
.node_offset_fn_type_ret_ty,
|
||||
=> .{
|
||||
.container = .{ .decl = scope.srcDecl().? },
|
||||
.file_scope = scope.getFileScope(),
|
||||
.parent_decl_node = scope.srcDecl().?.src_node,
|
||||
.lazy = lazy,
|
||||
},
|
||||
};
|
||||
@ -2332,7 +2287,8 @@ pub const LazySrcLoc = union(enum) {
|
||||
.token_abs,
|
||||
.node_abs,
|
||||
=> .{
|
||||
.container = .{ .file_scope = decl.getFileScope() },
|
||||
.file_scope = decl.getFileScope(),
|
||||
.parent_decl_node = 0,
|
||||
.lazy = lazy,
|
||||
},
|
||||
|
||||
@ -2361,7 +2317,8 @@ pub const LazySrcLoc = union(enum) {
|
||||
.node_offset_fn_type_cc,
|
||||
.node_offset_fn_type_ret_ty,
|
||||
=> .{
|
||||
.container = .{ .decl = decl },
|
||||
.file_scope = decl.getFileScope(),
|
||||
.parent_decl_node = decl.src_node,
|
||||
.lazy = lazy,
|
||||
},
|
||||
};
|
||||
@ -2409,7 +2366,7 @@ pub fn deinit(mod: *Module) void {
|
||||
mod.emit_h_failed_decls.deinit(gpa);
|
||||
|
||||
for (mod.failed_files.items()) |entry| {
|
||||
entry.value.destroy(gpa);
|
||||
if (entry.value) |msg| msg.destroy(gpa);
|
||||
}
|
||||
mod.failed_files.deinit(gpa);
|
||||
|
||||
@ -2495,7 +2452,7 @@ pub fn astGenFile(mod: *Module, file: *Scope.File, prog_node: *std.Progress.Node
|
||||
const lock = comp.mutex.acquire();
|
||||
defer lock.release();
|
||||
if (mod.failed_files.swapRemove(file)) |entry| {
|
||||
entry.value.destroy(gpa); // Delete previous error message.
|
||||
if (entry.value) |msg| msg.destroy(gpa); // Delete previous error message.
|
||||
}
|
||||
},
|
||||
}
|
||||
@ -2531,7 +2488,8 @@ pub fn astGenFile(mod: *Module, file: *Scope.File, prog_node: *std.Progress.Node
|
||||
const err_msg = try gpa.create(ErrorMsg);
|
||||
err_msg.* = .{
|
||||
.src_loc = .{
|
||||
.container = .{ .file_scope = file },
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = token_starts[parse_err.token] },
|
||||
},
|
||||
.msg = msg.toOwnedSlice(),
|
||||
@ -2550,11 +2508,11 @@ pub fn astGenFile(mod: *Module, file: *Scope.File, prog_node: *std.Progress.Node
|
||||
file.zir = try AstGen.generate(gpa, file);
|
||||
file.zir_loaded = true;
|
||||
|
||||
if (file.zir.extra[1] != 0) {
|
||||
if (file.zir.hasCompileErrors()) {
|
||||
{
|
||||
const lock = comp.mutex.acquire();
|
||||
defer lock.release();
|
||||
try mod.failed_files.putNoClobber(gpa, file, undefined);
|
||||
try mod.failed_files.putNoClobber(gpa, file, null);
|
||||
}
|
||||
file.status = .astgen_failure;
|
||||
return error.AnalysisFail;
|
||||
@ -2972,12 +2930,14 @@ fn semaContainerFn(
|
||||
if (deleted_decls.swapRemove(decl) == null) {
|
||||
decl.analysis = .sema_failure;
|
||||
const msg = try ErrorMsg.create(mod.gpa, .{
|
||||
.container = .{ .file_scope = namespace.file_scope },
|
||||
.file_scope = namespace.file_scope,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .token_abs = name_token },
|
||||
}, "redeclaration of '{s}'", .{decl.name});
|
||||
errdefer msg.destroy(mod.gpa);
|
||||
const other_src_loc: SrcLoc = .{
|
||||
.container = .{ .file_scope = decl.namespace.file_scope },
|
||||
.file_scope = namespace.file_scope,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .node_abs = prev_src_node },
|
||||
};
|
||||
try mod.errNoteNonLazy(other_src_loc, msg, "previously declared here", .{});
|
||||
@ -3040,12 +3000,14 @@ fn semaContainerVar(
|
||||
if (deleted_decls.swapRemove(decl) == null) {
|
||||
decl.analysis = .sema_failure;
|
||||
const msg = try ErrorMsg.create(mod.gpa, .{
|
||||
.container = .{ .file_scope = namespace.file_scope },
|
||||
.file_scope = namespace.file_scope,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .token_abs = name_token },
|
||||
}, "redeclaration of '{s}'", .{decl.name});
|
||||
errdefer msg.destroy(mod.gpa);
|
||||
const other_src_loc: SrcLoc = .{
|
||||
.container = .{ .file_scope = decl.namespace.file_scope },
|
||||
.file_scope = decl.namespace.file_scope,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .node_abs = prev_src_node },
|
||||
};
|
||||
try mod.errNoteNonLazy(other_src_loc, msg, "previously declared here", .{});
|
||||
|
||||
41
src/Sema.zig
41
src/Sema.zig
@ -170,9 +170,7 @@ pub fn analyzeBody(
|
||||
.cmp_lte => try sema.zirCmp(block, inst, .lte),
|
||||
.cmp_neq => try sema.zirCmp(block, inst, .neq),
|
||||
.coerce_result_ptr => try sema.zirCoerceResultPtr(block, inst),
|
||||
.decl_ref => try sema.zirDeclRef(block, inst),
|
||||
.decl_ref_named => try sema.zirDeclRefNamed(block, inst),
|
||||
.decl_val => try sema.zirDeclVal(block, inst),
|
||||
.decl_val_named => try sema.zirDeclValNamed(block, inst),
|
||||
.load => try sema.zirLoad(block, inst),
|
||||
.div => try sema.zirArithmetic(block, inst),
|
||||
@ -266,6 +264,10 @@ pub fn analyzeBody(
|
||||
.type_info => try sema.zirTypeInfo(block, inst),
|
||||
.size_of => try sema.zirSizeOf(block, inst),
|
||||
.bit_size_of => try sema.zirBitSizeOf(block, inst),
|
||||
.this => try sema.zirThis(block, inst),
|
||||
.fence => try sema.zirFence(block, inst),
|
||||
.ret_addr => try sema.zirRetAddr(block, inst),
|
||||
.builtin_src => try sema.zirBuiltinSrc(block, inst),
|
||||
.typeof => try sema.zirTypeof(block, inst),
|
||||
.typeof_elem => try sema.zirTypeofElem(block, inst),
|
||||
.typeof_peer => try sema.zirTypeofPeer(block, inst),
|
||||
@ -1656,20 +1658,6 @@ fn zirDbgStmtNode(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerE
|
||||
_ = try block.addDbgStmt(src, abs_byte_off);
|
||||
}
|
||||
|
||||
fn zirDeclRef(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
|
||||
const src = inst_data.src();
|
||||
const decl = sema.owner_decl.dependencies.entries.items[inst_data.payload_index].key;
|
||||
return sema.analyzeDeclRef(block, src, decl);
|
||||
}
|
||||
|
||||
fn zirDeclVal(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
|
||||
const src = inst_data.src();
|
||||
const decl = sema.owner_decl.dependencies.entries.items[inst_data.payload_index].key;
|
||||
return sema.analyzeDeclVal(block, src, decl);
|
||||
}
|
||||
|
||||
fn zirDeclRefNamed(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].str_tok;
|
||||
const src = inst_data.src();
|
||||
@ -4373,6 +4361,27 @@ fn zirBitSizeOf(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerErr
|
||||
return sema.mod.constIntUnsigned(sema.arena, src, Type.initTag(.comptime_int), bit_size);
|
||||
}
|
||||
|
||||
fn zirThis(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
|
||||
const src_node = sema.code.instructions.items(.data)[inst].node;
|
||||
const src: LazySrcLoc = .{ .node_offset = src_node };
|
||||
return sema.mod.fail(&block.base, src, "TODO: implement Sema.zirThis", .{});
|
||||
}
|
||||
fn zirFence(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
|
||||
const src_node = sema.code.instructions.items(.data)[inst].node;
|
||||
const src: LazySrcLoc = .{ .node_offset = src_node };
|
||||
return sema.mod.fail(&block.base, src, "TODO: implement Sema.zirFence", .{});
|
||||
}
|
||||
fn zirRetAddr(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
|
||||
const src_node = sema.code.instructions.items(.data)[inst].node;
|
||||
const src: LazySrcLoc = .{ .node_offset = src_node };
|
||||
return sema.mod.fail(&block.base, src, "TODO: implement Sema.zirRetAddr", .{});
|
||||
}
|
||||
fn zirBuiltinSrc(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
|
||||
const src_node = sema.code.instructions.items(.data)[inst].node;
|
||||
const src: LazySrcLoc = .{ .node_offset = src_node };
|
||||
return sema.mod.fail(&block.base, src, "TODO: implement Sema.zirBuiltinSrc", .{});
|
||||
}
|
||||
|
||||
fn zirTypeInfo(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
|
||||
const src = inst_data.src();
|
||||
|
||||
222
src/Zir.zig
222
src/Zir.zig
@ -42,6 +42,9 @@ string_bytes: []u8,
|
||||
/// payload at this index.
|
||||
extra: []u32,
|
||||
|
||||
pub const main_struct_extra_index = 0;
|
||||
pub const compile_error_extra_index = 1;
|
||||
|
||||
/// Returns the requested data, as well as the new index which is at the start of the
|
||||
/// trailers for the object.
|
||||
pub fn extraData(code: Zir, comptime T: type, index: usize) struct { data: T, end: usize } {
|
||||
@ -76,6 +79,10 @@ pub fn refSlice(code: Zir, start: usize, len: usize) []Inst.Ref {
|
||||
return @bitCast([]Inst.Ref, raw_slice);
|
||||
}
|
||||
|
||||
pub fn hasCompileErrors(code: Zir) bool {
|
||||
return code.extra[compile_error_extra_index] != 0;
|
||||
}
|
||||
|
||||
pub fn deinit(code: *Zir, gpa: *Allocator) void {
|
||||
code.instructions.deinit(gpa);
|
||||
gpa.free(code.string_bytes);
|
||||
@ -83,13 +90,11 @@ pub fn deinit(code: *Zir, gpa: *Allocator) void {
|
||||
code.* = undefined;
|
||||
}
|
||||
|
||||
/// For debugging purposes, like dumpFn but for unanalyzed zir blocks
|
||||
pub fn dump(
|
||||
code: Zir,
|
||||
/// Write human-readable, debug formatted ZIR code to a file.
|
||||
pub fn renderAsTextToFile(
|
||||
gpa: *Allocator,
|
||||
kind: []const u8,
|
||||
scope: *Module.Scope,
|
||||
param_count: usize,
|
||||
scope_file: *Module.Scope.File,
|
||||
fs_file: std.fs.File,
|
||||
) !void {
|
||||
var arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena.deinit();
|
||||
@ -97,17 +102,17 @@ pub fn dump(
|
||||
var writer: Writer = .{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.scope = scope,
|
||||
.code = code,
|
||||
.file = scope_file,
|
||||
.code = scope_file.zir,
|
||||
.indent = 0,
|
||||
.param_count = param_count,
|
||||
.parent_decl_node = 0,
|
||||
.param_count = 0,
|
||||
};
|
||||
|
||||
const decl_name = scope.srcDecl().?.name;
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
try stderr.print("ZIR {s} {s} %0 ", .{ kind, decl_name });
|
||||
try writer.writeInstToStream(stderr, 0);
|
||||
try stderr.print(" // end ZIR {s} {s}\n\n", .{ kind, decl_name });
|
||||
const main_struct_inst = scope_file.zir.extra[0] - @intCast(u32, Inst.Ref.typed_value_map.len);
|
||||
try fs_file.writer().print("%{d} ", .{main_struct_inst});
|
||||
try writer.writeInstToStream(fs_file.writer(), main_struct_inst);
|
||||
try fs_file.writeAll("\n");
|
||||
}
|
||||
|
||||
/// These are untyped instructions generated from an Abstract Syntax Tree.
|
||||
@ -291,12 +296,6 @@ pub const Inst = struct {
|
||||
/// Declares the beginning of a statement. Used for debug info.
|
||||
/// Uses the `node` union field.
|
||||
dbg_stmt_node,
|
||||
/// Represents a pointer to a global decl.
|
||||
/// Uses the `pl_node` union field. `payload_index` is into `decls`.
|
||||
decl_ref,
|
||||
/// Equivalent to a decl_ref followed by load.
|
||||
/// Uses the `pl_node` union field. `payload_index` is into `decls`.
|
||||
decl_val,
|
||||
/// Same as `decl_ref` except instead of indexing into decls, uses
|
||||
/// a name to identify the Decl. Uses the `str_tok` union field.
|
||||
decl_ref_named,
|
||||
@ -705,6 +704,14 @@ pub const Inst = struct {
|
||||
size_of,
|
||||
/// Implements the `@bitSizeOf` builtin. Uses `un_node`.
|
||||
bit_size_of,
|
||||
/// Implements the `@This` builtin. Uses `node`.
|
||||
this,
|
||||
/// Implements the `@fence` builtin. Uses `un_node`.
|
||||
fence,
|
||||
/// Implements the `@returnAddress` builtin. Uses `un_node`.
|
||||
ret_addr,
|
||||
/// Implements the `@src` builtin. Uses `un_node`.
|
||||
builtin_src,
|
||||
|
||||
/// Returns whether the instruction is one of the control flow "noreturn" types.
|
||||
/// Function calls do not count.
|
||||
@ -758,8 +765,6 @@ pub const Inst = struct {
|
||||
.enum_decl_nonexhaustive,
|
||||
.opaque_decl,
|
||||
.dbg_stmt_node,
|
||||
.decl_ref,
|
||||
.decl_val,
|
||||
.decl_ref_named,
|
||||
.decl_val_named,
|
||||
.load,
|
||||
@ -873,6 +878,10 @@ pub const Inst = struct {
|
||||
.type_info,
|
||||
.size_of,
|
||||
.bit_size_of,
|
||||
.this,
|
||||
.fence,
|
||||
.ret_addr,
|
||||
.builtin_src,
|
||||
=> false,
|
||||
|
||||
.@"break",
|
||||
@ -1647,11 +1656,16 @@ pub const SpecialProng = enum { none, @"else", under };
|
||||
const Writer = struct {
|
||||
gpa: *Allocator,
|
||||
arena: *Allocator,
|
||||
scope: *Module.Scope,
|
||||
file: *Module.Scope.File,
|
||||
code: Zir,
|
||||
indent: usize,
|
||||
indent: u32,
|
||||
parent_decl_node: u32,
|
||||
param_count: usize,
|
||||
|
||||
fn relativeToNodeIndex(self: *Writer, offset: i32) ast.Node.Index {
|
||||
return @bitCast(ast.Node.Index, offset + @bitCast(i32, self.parent_decl_node));
|
||||
}
|
||||
|
||||
fn writeInstToStream(
|
||||
self: *Writer,
|
||||
stream: anytype,
|
||||
@ -1832,10 +1846,6 @@ const Writer = struct {
|
||||
.typeof_peer,
|
||||
=> try self.writePlNodeMultiOp(stream, inst),
|
||||
|
||||
.decl_ref,
|
||||
.decl_val,
|
||||
=> try self.writePlNodeDecl(stream, inst),
|
||||
|
||||
.field_ptr,
|
||||
.field_val,
|
||||
=> try self.writePlNodeField(stream, inst),
|
||||
@ -1851,6 +1861,10 @@ const Writer = struct {
|
||||
.repeat_inline,
|
||||
.alloc_inferred,
|
||||
.alloc_inferred_mut,
|
||||
.this,
|
||||
.fence,
|
||||
.ret_addr,
|
||||
.builtin_src,
|
||||
=> try self.writeNode(stream, inst),
|
||||
|
||||
.error_value,
|
||||
@ -2067,68 +2081,114 @@ const Writer = struct {
|
||||
const extra = self.code.extraData(Inst.StructDecl, inst_data.payload_index);
|
||||
const body = self.code.extra[extra.end..][0..extra.data.body_len];
|
||||
const fields_len = extra.data.fields_len;
|
||||
const decls_len = extra.data.decls_len;
|
||||
|
||||
const prev_parent_decl_node = self.parent_decl_node;
|
||||
self.parent_decl_node = self.relativeToNodeIndex(inst_data.src_node);
|
||||
|
||||
var extra_index: usize = undefined;
|
||||
|
||||
if (fields_len == 0) {
|
||||
assert(body.len == 0);
|
||||
try stream.writeAll("{}, {}) ");
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
return;
|
||||
try stream.writeAll("{}, {}, {");
|
||||
extra_index = extra.end;
|
||||
} else {
|
||||
try stream.writeAll("{\n");
|
||||
self.indent += 2;
|
||||
try self.writeBody(stream, body);
|
||||
|
||||
try stream.writeByteNTimes(' ', self.indent - 2);
|
||||
try stream.writeAll("}, {\n");
|
||||
|
||||
const bit_bags_count = std.math.divCeil(usize, fields_len, 16) catch unreachable;
|
||||
const body_end = extra.end + body.len;
|
||||
extra_index = body_end + bit_bags_count;
|
||||
var bit_bag_index: usize = body_end;
|
||||
var cur_bit_bag: u32 = undefined;
|
||||
var field_i: u32 = 0;
|
||||
while (field_i < fields_len) : (field_i += 1) {
|
||||
if (field_i % 16 == 0) {
|
||||
cur_bit_bag = self.code.extra[bit_bag_index];
|
||||
bit_bag_index += 1;
|
||||
}
|
||||
const has_align = @truncate(u1, cur_bit_bag) != 0;
|
||||
cur_bit_bag >>= 1;
|
||||
const has_default = @truncate(u1, cur_bit_bag) != 0;
|
||||
cur_bit_bag >>= 1;
|
||||
|
||||
const field_name = self.code.nullTerminatedString(self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
const field_type = @intToEnum(Inst.Ref, self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
|
||||
try stream.writeByteNTimes(' ', self.indent);
|
||||
try stream.print("{}: ", .{std.zig.fmtId(field_name)});
|
||||
try self.writeInstRef(stream, field_type);
|
||||
|
||||
if (has_align) {
|
||||
const align_ref = @intToEnum(Inst.Ref, self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
|
||||
try stream.writeAll(" align(");
|
||||
try self.writeInstRef(stream, align_ref);
|
||||
try stream.writeAll(")");
|
||||
}
|
||||
if (has_default) {
|
||||
const default_ref = @intToEnum(Inst.Ref, self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
|
||||
try stream.writeAll(" = ");
|
||||
try self.writeInstRef(stream, default_ref);
|
||||
}
|
||||
try stream.writeAll(",\n");
|
||||
}
|
||||
|
||||
self.indent -= 2;
|
||||
try stream.writeByteNTimes(' ', self.indent);
|
||||
try stream.writeAll("}, {");
|
||||
}
|
||||
if (decls_len == 0) {
|
||||
try stream.writeAll("}) ");
|
||||
} else {
|
||||
try stream.writeAll("\n");
|
||||
self.indent += 2;
|
||||
try self.writeDecls(stream, decls_len, extra_index);
|
||||
self.indent -= 2;
|
||||
try stream.writeByteNTimes(' ', self.indent);
|
||||
try stream.writeAll("}) ");
|
||||
}
|
||||
self.parent_decl_node = prev_parent_decl_node;
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
}
|
||||
|
||||
try stream.writeAll("{\n");
|
||||
self.indent += 2;
|
||||
try self.writeBody(stream, body);
|
||||
|
||||
try stream.writeByteNTimes(' ', self.indent - 2);
|
||||
try stream.writeAll("}, {\n");
|
||||
|
||||
const bit_bags_count = std.math.divCeil(usize, fields_len, 16) catch unreachable;
|
||||
const body_end = extra.end + body.len;
|
||||
var extra_index: usize = body_end + bit_bags_count;
|
||||
var bit_bag_index: usize = body_end;
|
||||
fn writeDecls(self: *Writer, stream: anytype, decls_len: u32, extra_start: usize) !void {
|
||||
const bit_bags_count = std.math.divCeil(usize, decls_len, 16) catch unreachable;
|
||||
var extra_index = extra_start + bit_bags_count;
|
||||
var bit_bag_index: usize = extra_start;
|
||||
var cur_bit_bag: u32 = undefined;
|
||||
var field_i: u32 = 0;
|
||||
while (field_i < fields_len) : (field_i += 1) {
|
||||
if (field_i % 16 == 0) {
|
||||
var decl_i: u32 = 0;
|
||||
while (decl_i < decls_len) : (decl_i += 1) {
|
||||
if (decl_i % 16 == 0) {
|
||||
cur_bit_bag = self.code.extra[bit_bag_index];
|
||||
bit_bag_index += 1;
|
||||
}
|
||||
const has_align = @truncate(u1, cur_bit_bag) != 0;
|
||||
const is_pub = @truncate(u1, cur_bit_bag) != 0;
|
||||
cur_bit_bag >>= 1;
|
||||
const has_default = @truncate(u1, cur_bit_bag) != 0;
|
||||
const is_exported = @truncate(u1, cur_bit_bag) != 0;
|
||||
cur_bit_bag >>= 1;
|
||||
|
||||
const field_name = self.code.nullTerminatedString(self.code.extra[extra_index]);
|
||||
const decl_name = self.code.nullTerminatedString(self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
const field_type = @intToEnum(Inst.Ref, self.code.extra[extra_index]);
|
||||
const decl_value = @intToEnum(Inst.Ref, self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
|
||||
const pub_str = if (is_pub) "pub " else "";
|
||||
const export_str = if (is_exported) "export " else "";
|
||||
try stream.writeByteNTimes(' ', self.indent);
|
||||
try stream.print("{}: ", .{std.zig.fmtId(field_name)});
|
||||
try self.writeInstRef(stream, field_type);
|
||||
|
||||
if (has_align) {
|
||||
const align_ref = @intToEnum(Inst.Ref, self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
|
||||
try stream.writeAll(" align(");
|
||||
try self.writeInstRef(stream, align_ref);
|
||||
try stream.writeAll(")");
|
||||
}
|
||||
if (has_default) {
|
||||
const default_ref = @intToEnum(Inst.Ref, self.code.extra[extra_index]);
|
||||
extra_index += 1;
|
||||
|
||||
try stream.writeAll(" = ");
|
||||
try self.writeInstRef(stream, default_ref);
|
||||
}
|
||||
try stream.writeAll(",\n");
|
||||
try stream.print("{s}{s}{} = ", .{ pub_str, export_str, std.zig.fmtId(decl_name) });
|
||||
try self.writeInstRef(stream, decl_value);
|
||||
try stream.writeAll("\n");
|
||||
}
|
||||
|
||||
self.indent -= 2;
|
||||
try stream.writeByteNTimes(' ', self.indent);
|
||||
try stream.writeAll("}) ");
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
}
|
||||
|
||||
fn writeEnumDecl(self: *Writer, stream: anytype, inst: Inst.Index) !void {
|
||||
@ -2374,14 +2434,6 @@ const Writer = struct {
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
}
|
||||
|
||||
fn writePlNodeDecl(self: *Writer, stream: anytype, inst: Inst.Index) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
|
||||
const owner_decl = self.scope.ownerDecl().?;
|
||||
const decl = owner_decl.dependencies.entries.items[inst_data.payload_index].key;
|
||||
try stream.print("{s}) ", .{decl.name});
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
}
|
||||
|
||||
fn writePlNodeField(self: *Writer, stream: anytype, inst: Inst.Index) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
|
||||
const extra = self.code.extraData(Inst.Field, inst_data.payload_index).data;
|
||||
@ -2593,8 +2645,12 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writeSrc(self: *Writer, stream: anytype, src: LazySrcLoc) !void {
|
||||
const tree = self.scope.tree();
|
||||
const src_loc = src.toSrcLoc(self.scope);
|
||||
const tree = self.file.tree;
|
||||
const src_loc: Module.SrcLoc = .{
|
||||
.file_scope = self.file,
|
||||
.parent_decl_node = self.parent_decl_node,
|
||||
.lazy = src,
|
||||
};
|
||||
const abs_byte_off = try src_loc.byteOffset();
|
||||
const delta_line = std.zig.findLineColumn(tree.source, abs_byte_off);
|
||||
try stream.print("{s}:{d}:{d}", .{
|
||||
|
||||
99
src/main.zig
99
src/main.zig
@ -25,7 +25,11 @@ pub fn fatal(comptime format: []const u8, args: anytype) noreturn {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
pub const max_src_size = 2 * 1024 * 1024 * 1024; // 2 GiB
|
||||
/// There are many assumptions in the entire codebase that Zig source files can
|
||||
/// be byte-indexed with a u32 integer.
|
||||
pub const max_src_size = std.math.maxInt(u32);
|
||||
|
||||
pub const debug_extensions_enabled = std.builtin.mode == .Debug;
|
||||
|
||||
pub const Color = enum {
|
||||
auto,
|
||||
@ -33,7 +37,7 @@ pub const Color = enum {
|
||||
on,
|
||||
};
|
||||
|
||||
const usage =
|
||||
const normal_usage =
|
||||
\\Usage: zig [command] [options]
|
||||
\\
|
||||
\\Commands:
|
||||
@ -63,6 +67,16 @@ const usage =
|
||||
\\
|
||||
;
|
||||
|
||||
const debug_usage = normal_usage ++
|
||||
\\
|
||||
\\Debug Commands:
|
||||
\\
|
||||
\\ astgen Print ZIR code for a .zig source file
|
||||
\\
|
||||
;
|
||||
|
||||
const usage = if (debug_extensions_enabled) debug_usage else normal_usage;
|
||||
|
||||
pub const log_level: std.log.Level = switch (std.builtin.mode) {
|
||||
.Debug => .debug,
|
||||
.ReleaseSafe, .ReleaseFast => .info,
|
||||
@ -206,13 +220,15 @@ pub fn mainArgs(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
const stdout = io.getStdOut().writer();
|
||||
return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, info.target);
|
||||
} else if (mem.eql(u8, cmd, "version")) {
|
||||
try std.io.getStdOut().writeAll(build_options.version ++ "\n");
|
||||
return std.io.getStdOut().writeAll(build_options.version ++ "\n");
|
||||
} else if (mem.eql(u8, cmd, "env")) {
|
||||
try @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer());
|
||||
return @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer());
|
||||
} else if (mem.eql(u8, cmd, "zen")) {
|
||||
try io.getStdOut().writeAll(info_zen);
|
||||
return io.getStdOut().writeAll(info_zen);
|
||||
} else if (mem.eql(u8, cmd, "help") or mem.eql(u8, cmd, "-h") or mem.eql(u8, cmd, "--help")) {
|
||||
try io.getStdOut().writeAll(usage);
|
||||
return io.getStdOut().writeAll(usage);
|
||||
} else if (debug_extensions_enabled and mem.eql(u8, cmd, "astgen")) {
|
||||
return cmdAstgen(gpa, arena, cmd_args);
|
||||
} else {
|
||||
std.log.info("{s}", .{usage});
|
||||
fatal("unknown command: {s}", .{args[1]});
|
||||
@ -3485,3 +3501,74 @@ pub fn cleanExit() void {
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
/// This is only enabled for debug builds.
|
||||
pub fn cmdAstgen(
|
||||
gpa: *Allocator,
|
||||
arena: *Allocator,
|
||||
args: []const []const u8,
|
||||
) !void {
|
||||
const Module = @import("Module.zig");
|
||||
const AstGen = @import("AstGen.zig");
|
||||
const Zir = @import("Zir.zig");
|
||||
|
||||
const zig_source_file = args[0];
|
||||
|
||||
var f = try fs.cwd().openFile(zig_source_file, .{});
|
||||
defer f.close();
|
||||
|
||||
const stat = try f.stat();
|
||||
|
||||
if (stat.size > max_src_size)
|
||||
return error.FileTooBig;
|
||||
|
||||
var file: Module.Scope.File = .{
|
||||
.status = .never_loaded,
|
||||
.source_loaded = false,
|
||||
.tree_loaded = false,
|
||||
.zir_loaded = false,
|
||||
.sub_file_path = zig_source_file,
|
||||
.source = undefined,
|
||||
.stat_size = stat.size,
|
||||
.stat_inode = stat.inode,
|
||||
.stat_mtime = stat.mtime,
|
||||
.tree = undefined,
|
||||
.zir = undefined,
|
||||
.pkg = undefined,
|
||||
.namespace = undefined,
|
||||
};
|
||||
|
||||
const source = try arena.allocSentinel(u8, stat.size, 0);
|
||||
const amt = try f.readAll(source);
|
||||
if (amt != stat.size)
|
||||
return error.UnexpectedEndOfFile;
|
||||
file.source = source;
|
||||
file.source_loaded = true;
|
||||
|
||||
file.tree = try std.zig.parse(gpa, file.source);
|
||||
file.tree_loaded = true;
|
||||
defer file.tree.deinit(gpa);
|
||||
|
||||
for (file.tree.errors) |parse_error| {
|
||||
try printErrMsgToFile(gpa, parse_error, file.tree, zig_source_file, io.getStdErr(), .auto);
|
||||
}
|
||||
if (file.tree.errors.len != 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
file.zir = try AstGen.generate(gpa, &file);
|
||||
file.zir_loaded = true;
|
||||
defer file.zir.deinit(gpa);
|
||||
|
||||
if (file.zir.hasCompileErrors()) {
|
||||
var errors = std.ArrayList(Compilation.AllErrors.Message).init(arena);
|
||||
try Compilation.AllErrors.addZir(arena, &errors, &file, source);
|
||||
const ttyconf = std.debug.detectTTYConfig();
|
||||
for (errors.items) |full_err_msg| {
|
||||
full_err_msg.renderToStdErr(ttyconf);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return Zir.renderAsTextToFile(gpa, &file, io.getStdOut());
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user