mirror of
https://github.com/ziglang/zig.git
synced 2025-12-24 15:13:08 +00:00
Merge pull request #12121 from Vexu/span
Stage2 point to error location using spans
This commit is contained in:
commit
b2486fbc5e
@ -338,7 +338,7 @@ pub const AllErrors = struct {
|
||||
src_path: []const u8,
|
||||
line: u32,
|
||||
column: u32,
|
||||
byte_offset: u32,
|
||||
span: Module.SrcLoc.Span,
|
||||
/// Usually one, but incremented for redundant messages.
|
||||
count: u32 = 1,
|
||||
/// Does not include the trailing newline.
|
||||
@ -427,9 +427,16 @@ pub const AllErrors = struct {
|
||||
else => try stderr.writeByte(b),
|
||||
};
|
||||
try stderr.writeByte('\n');
|
||||
try stderr.writeByteNTimes(' ', src.column);
|
||||
// TODO basic unicode code point monospace width
|
||||
const before_caret = src.span.main - src.span.start;
|
||||
// -1 since span.main includes the caret
|
||||
const after_caret = src.span.end - src.span.main -| 1;
|
||||
try stderr.writeByteNTimes(' ', src.column - before_caret);
|
||||
ttyconf.setColor(stderr, .Green);
|
||||
try stderr.writeAll("^\n");
|
||||
try stderr.writeByteNTimes('~', before_caret);
|
||||
try stderr.writeByte('^');
|
||||
try stderr.writeByteNTimes('~', after_caret);
|
||||
try stderr.writeByte('\n');
|
||||
ttyconf.setColor(stderr, .Reset);
|
||||
}
|
||||
}
|
||||
@ -469,7 +476,7 @@ pub const AllErrors = struct {
|
||||
hasher.update(src.src_path);
|
||||
std.hash.autoHash(&hasher, src.line);
|
||||
std.hash.autoHash(&hasher, src.column);
|
||||
std.hash.autoHash(&hasher, src.byte_offset);
|
||||
std.hash.autoHash(&hasher, src.span.main);
|
||||
},
|
||||
.plain => |plain| {
|
||||
hasher.update(plain.msg);
|
||||
@ -488,7 +495,7 @@ pub const AllErrors = struct {
|
||||
mem.eql(u8, a_src.src_path, b_src.src_path) and
|
||||
a_src.line == b_src.line and
|
||||
a_src.column == b_src.column and
|
||||
a_src.byte_offset == b_src.byte_offset;
|
||||
a_src.span.main == b_src.span.main;
|
||||
},
|
||||
.plain => return false,
|
||||
},
|
||||
@ -527,20 +534,20 @@ pub const AllErrors = struct {
|
||||
std.hash_map.default_max_load_percentage,
|
||||
).init(allocator);
|
||||
const err_source = try module_err_msg.src_loc.file_scope.getSource(module.gpa);
|
||||
const err_byte_offset = try module_err_msg.src_loc.byteOffset(module.gpa);
|
||||
const err_loc = std.zig.findLineColumn(err_source.bytes, err_byte_offset);
|
||||
const err_span = try module_err_msg.src_loc.span(module.gpa);
|
||||
const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.main);
|
||||
|
||||
for (module_err_msg.notes) |module_note| {
|
||||
const source = try module_note.src_loc.file_scope.getSource(module.gpa);
|
||||
const byte_offset = try module_note.src_loc.byteOffset(module.gpa);
|
||||
const loc = std.zig.findLineColumn(source.bytes, byte_offset);
|
||||
const span = try module_note.src_loc.span(module.gpa);
|
||||
const loc = std.zig.findLineColumn(source.bytes, span.main);
|
||||
const file_path = try module_note.src_loc.file_scope.fullPath(allocator);
|
||||
const note = ¬es_buf[note_i];
|
||||
note.* = .{
|
||||
.src = .{
|
||||
.src_path = file_path,
|
||||
.msg = try allocator.dupe(u8, module_note.msg),
|
||||
.byte_offset = byte_offset,
|
||||
.span = span,
|
||||
.line = @intCast(u32, loc.line),
|
||||
.column = @intCast(u32, loc.column),
|
||||
.source_line = if (err_loc.eql(loc)) null else try allocator.dupe(u8, loc.source_line),
|
||||
@ -566,7 +573,7 @@ pub const AllErrors = struct {
|
||||
.src = .{
|
||||
.src_path = file_path,
|
||||
.msg = try allocator.dupe(u8, module_err_msg.msg),
|
||||
.byte_offset = err_byte_offset,
|
||||
.span = err_span,
|
||||
.line = @intCast(u32, err_loc.line),
|
||||
.column = @intCast(u32, err_loc.column),
|
||||
.notes = notes_buf[0..note_i],
|
||||
@ -593,16 +600,16 @@ pub const AllErrors = struct {
|
||||
while (item_i < items_len) : (item_i += 1) {
|
||||
const item = file.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
|
||||
extra_index = item.end;
|
||||
const err_byte_offset = blk: {
|
||||
const token_starts = file.tree.tokens.items(.start);
|
||||
const err_span = blk: {
|
||||
if (item.data.node != 0) {
|
||||
const main_tokens = file.tree.nodes.items(.main_token);
|
||||
const main_token = main_tokens[item.data.node];
|
||||
break :blk token_starts[main_token];
|
||||
break :blk Module.SrcLoc.nodeToSpan(&file.tree, item.data.node);
|
||||
}
|
||||
break :blk token_starts[item.data.token] + item.data.byte_offset;
|
||||
const token_starts = file.tree.tokens.items(.start);
|
||||
const start = token_starts[item.data.token] + item.data.byte_offset;
|
||||
const end = start + @intCast(u32, file.tree.tokenSlice(item.data.token).len);
|
||||
break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
|
||||
};
|
||||
const err_loc = std.zig.findLineColumn(file.source, err_byte_offset);
|
||||
const err_loc = std.zig.findLineColumn(file.source, err_span.main);
|
||||
|
||||
var notes: []Message = &[0]Message{};
|
||||
if (item.data.notes != 0) {
|
||||
@ -612,22 +619,22 @@ pub const AllErrors = struct {
|
||||
for (notes) |*note, i| {
|
||||
const note_item = file.zir.extraData(Zir.Inst.CompileErrors.Item, body[i]);
|
||||
const msg = file.zir.nullTerminatedString(note_item.data.msg);
|
||||
const byte_offset = blk: {
|
||||
const token_starts = file.tree.tokens.items(.start);
|
||||
const span = blk: {
|
||||
if (note_item.data.node != 0) {
|
||||
const main_tokens = file.tree.nodes.items(.main_token);
|
||||
const main_token = main_tokens[note_item.data.node];
|
||||
break :blk token_starts[main_token];
|
||||
break :blk Module.SrcLoc.nodeToSpan(&file.tree, note_item.data.node);
|
||||
}
|
||||
break :blk token_starts[note_item.data.token] + note_item.data.byte_offset;
|
||||
const token_starts = file.tree.tokens.items(.start);
|
||||
const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
|
||||
const end = start + @intCast(u32, file.tree.tokenSlice(note_item.data.token).len);
|
||||
break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
|
||||
};
|
||||
const loc = std.zig.findLineColumn(file.source, byte_offset);
|
||||
const loc = std.zig.findLineColumn(file.source, span.main);
|
||||
|
||||
note.* = .{
|
||||
.src = .{
|
||||
.src_path = try file.fullPath(arena),
|
||||
.msg = try arena.dupe(u8, msg),
|
||||
.byte_offset = byte_offset,
|
||||
.span = span,
|
||||
.line = @intCast(u32, loc.line),
|
||||
.column = @intCast(u32, loc.column),
|
||||
.notes = &.{}, // TODO rework this function to be recursive
|
||||
@ -642,7 +649,7 @@ pub const AllErrors = struct {
|
||||
.src = .{
|
||||
.src_path = try file.fullPath(arena),
|
||||
.msg = try arena.dupe(u8, msg),
|
||||
.byte_offset = err_byte_offset,
|
||||
.span = err_span,
|
||||
.line = @intCast(u32, err_loc.line),
|
||||
.column = @intCast(u32, err_loc.column),
|
||||
.notes = notes,
|
||||
@ -688,7 +695,7 @@ pub const AllErrors = struct {
|
||||
.src_path = try arena.dupe(u8, src.src_path),
|
||||
.line = src.line,
|
||||
.column = src.column,
|
||||
.byte_offset = src.byte_offset,
|
||||
.span = src.span,
|
||||
.source_line = if (src.source_line) |s| try arena.dupe(u8, s) else null,
|
||||
.notes = try dupeList(src.notes, arena),
|
||||
} },
|
||||
@ -2662,7 +2669,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
.msg = try std.fmt.allocPrint(arena_allocator, "unable to build C object: {s}", .{
|
||||
err_msg.msg,
|
||||
}),
|
||||
.byte_offset = 0,
|
||||
.span = .{ .start = 0, .end = 1, .main = 0 },
|
||||
.line = err_msg.line,
|
||||
.column = err_msg.column,
|
||||
.source_line = null, // TODO
|
||||
|
||||
243
src/Module.zig
243
src/Module.zig
@ -2082,60 +2082,65 @@ pub const SrcLoc = struct {
|
||||
return @bitCast(Ast.Node.Index, offset + @bitCast(i32, src_loc.parent_decl_node));
|
||||
}
|
||||
|
||||
pub fn byteOffset(src_loc: SrcLoc, gpa: Allocator) !u32 {
|
||||
pub const Span = struct {
|
||||
start: u32,
|
||||
end: u32,
|
||||
main: u32,
|
||||
};
|
||||
|
||||
pub fn span(src_loc: SrcLoc, gpa: Allocator) !Span {
|
||||
switch (src_loc.lazy) {
|
||||
.unneeded => unreachable,
|
||||
.entire_file => return 0,
|
||||
.entire_file => return Span{ .start = 0, .end = 1, .main = 0 },
|
||||
|
||||
.byte_abs => |byte_index| return byte_index,
|
||||
.byte_abs => |byte_index| return Span{ .start = byte_index, .end = byte_index + 1, .main = byte_index },
|
||||
|
||||
.token_abs => |tok_index| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
const start = tree.tokens.items(.start)[tok_index];
|
||||
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||
return Span{ .start = start, .end = end, .main = start };
|
||||
},
|
||||
.node_abs => |node| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
const tok_index = tree.firstToken(node);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node);
|
||||
},
|
||||
.byte_offset => |byte_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[src_loc.declSrcToken()] + byte_off;
|
||||
const tok_index = src_loc.declSrcToken();
|
||||
const start = tree.tokens.items(.start)[tok_index] + byte_off;
|
||||
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||
return Span{ .start = start, .end = end, .main = start };
|
||||
},
|
||||
.token_offset => |tok_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const tok_index = src_loc.declSrcToken() + tok_off;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
const start = tree.tokens.items(.start)[tok_index];
|
||||
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||
return Span{ .start = start, .end = end, .main = start };
|
||||
},
|
||||
.node_offset => |traced_off| {
|
||||
const node_off = traced_off.x;
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
assert(src_loc.file_scope.tree_loaded);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node);
|
||||
},
|
||||
.node_offset_bin_op => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
assert(src_loc.file_scope.tree_loaded);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node);
|
||||
},
|
||||
.node_offset_back2tok => |node_off| {
|
||||
.node_offset_initializer => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tok_index = tree.firstToken(node) - 2;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return tokensToSpan(
|
||||
tree,
|
||||
tree.firstToken(node) - 3,
|
||||
tree.lastToken(node),
|
||||
tree.nodes.items(.main_token)[node] - 2,
|
||||
);
|
||||
},
|
||||
.node_offset_var_decl_ty => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
@ -2148,14 +2153,13 @@ pub const SrcLoc = struct {
|
||||
.aligned_var_decl => tree.alignedVarDecl(node),
|
||||
else => unreachable,
|
||||
};
|
||||
const tok_index = if (full.ast.type_node != 0) blk: {
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
break :blk main_tokens[full.ast.type_node];
|
||||
} else blk: {
|
||||
break :blk full.ast.mut_token + 1; // the name token
|
||||
};
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
if (full.ast.type_node != 0) {
|
||||
return nodeToSpan(tree, full.ast.type_node);
|
||||
}
|
||||
const tok_index = full.ast.mut_token + 1; // the name token
|
||||
const start = tree.tokens.items(.start)[tok_index];
|
||||
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||
return Span{ .start = start, .end = end, .main = start };
|
||||
},
|
||||
.node_offset_builtin_call_arg0 => |n| return src_loc.byteOffsetBuiltinCallArg(gpa, n, 0),
|
||||
.node_offset_builtin_call_arg1 => |n| return src_loc.byteOffsetBuiltinCallArg(gpa, n, 1),
|
||||
@ -2167,10 +2171,7 @@ pub const SrcLoc = struct {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node_datas[node].rhs];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node_datas[node].rhs);
|
||||
},
|
||||
.node_offset_slice_ptr,
|
||||
.node_offset_slice_start,
|
||||
@ -2186,18 +2187,14 @@ pub const SrcLoc = struct {
|
||||
.slice_sentinel => tree.sliceSentinel(node),
|
||||
else => unreachable,
|
||||
};
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[
|
||||
switch (src_loc.lazy) {
|
||||
.node_offset_slice_ptr => full.ast.sliced,
|
||||
.node_offset_slice_start => full.ast.start,
|
||||
.node_offset_slice_end => full.ast.end,
|
||||
.node_offset_slice_sentinel => full.ast.sentinel,
|
||||
else => unreachable,
|
||||
}
|
||||
];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
const part_node = switch (src_loc.lazy) {
|
||||
.node_offset_slice_ptr => full.ast.sliced,
|
||||
.node_offset_slice_start => full.ast.start,
|
||||
.node_offset_slice_end => full.ast.end,
|
||||
.node_offset_slice_sentinel => full.ast.sentinel,
|
||||
else => unreachable,
|
||||
};
|
||||
return nodeToSpan(tree, part_node);
|
||||
},
|
||||
.node_offset_call_func => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
@ -2219,10 +2216,7 @@ pub const SrcLoc = struct {
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[full.ast.fn_expr];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, full.ast.fn_expr);
|
||||
},
|
||||
.node_offset_field_name => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
@ -2233,16 +2227,14 @@ pub const SrcLoc = struct {
|
||||
.field_access => node_datas[node].rhs,
|
||||
else => tree.firstToken(node) - 2,
|
||||
};
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
const start = tree.tokens.items(.start)[tok_index];
|
||||
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||
return Span{ .start = start, .end = end, .main = start };
|
||||
},
|
||||
.node_offset_deref_ptr => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tok_index = node_datas[node].lhs;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node);
|
||||
},
|
||||
.node_offset_asm_source => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
@ -2253,10 +2245,7 @@ pub const SrcLoc = struct {
|
||||
.@"asm" => tree.asmFull(node),
|
||||
else => unreachable,
|
||||
};
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[full.ast.template];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, full.ast.template);
|
||||
},
|
||||
.node_offset_asm_ret_ty => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
@ -2269,11 +2258,7 @@ pub const SrcLoc = struct {
|
||||
};
|
||||
const asm_output = full.outputs[0];
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const ret_ty_node = node_datas[asm_output].lhs;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[ret_ty_node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node_datas[asm_output].lhs);
|
||||
},
|
||||
|
||||
.node_offset_for_cond, .node_offset_if_cond => |node_off| {
|
||||
@ -2290,41 +2275,26 @@ pub const SrcLoc = struct {
|
||||
.@"for" => tree.forFull(node).ast.cond_expr,
|
||||
else => unreachable,
|
||||
};
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[src_node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, src_node);
|
||||
},
|
||||
.node_offset_bin_lhs => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const src_node = node_datas[node].lhs;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[src_node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node_datas[node].lhs);
|
||||
},
|
||||
.node_offset_bin_rhs => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const src_node = node_datas[node].rhs;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[src_node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node_datas[node].rhs);
|
||||
},
|
||||
|
||||
.node_offset_switch_operand => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const src_node = node_datas[node].lhs;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[src_node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node_datas[node].lhs);
|
||||
},
|
||||
|
||||
.node_offset_switch_special_prong => |node_off| {
|
||||
@ -2347,9 +2317,7 @@ pub const SrcLoc = struct {
|
||||
mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"));
|
||||
if (!is_special) continue;
|
||||
|
||||
const tok_index = main_tokens[case_node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, case_node);
|
||||
} else unreachable;
|
||||
},
|
||||
|
||||
@ -2375,9 +2343,7 @@ pub const SrcLoc = struct {
|
||||
|
||||
for (case.ast.values) |item_node| {
|
||||
if (node_tags[item_node] == .switch_range) {
|
||||
const tok_index = main_tokens[item_node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, item_node);
|
||||
}
|
||||
}
|
||||
} else unreachable;
|
||||
@ -2403,10 +2369,7 @@ pub const SrcLoc = struct {
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[full.ast.callconv_expr];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, full.ast.callconv_expr);
|
||||
},
|
||||
|
||||
.node_offset_fn_type_ret_ty => |node_off| {
|
||||
@ -2421,21 +2384,14 @@ pub const SrcLoc = struct {
|
||||
.fn_proto => tree.fnProto(node),
|
||||
else => unreachable,
|
||||
};
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[full.ast.return_type];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, full.ast.return_type);
|
||||
},
|
||||
|
||||
.node_offset_anyframe_type => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const node = node_datas[parent_node].rhs;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node_datas[parent_node].rhs);
|
||||
},
|
||||
|
||||
.node_offset_lib_name => |node_off| {
|
||||
@ -2462,8 +2418,9 @@ pub const SrcLoc = struct {
|
||||
else => unreachable,
|
||||
};
|
||||
const tok_index = full.lib_name.?;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
const start = tree.tokens.items(.start)[tok_index];
|
||||
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||
return Span{ .start = start, .end = end, .main = start };
|
||||
},
|
||||
|
||||
.node_offset_array_type_len => |node_off| {
|
||||
@ -2476,11 +2433,7 @@ pub const SrcLoc = struct {
|
||||
.array_type_sentinel => tree.arrayTypeSentinel(parent_node),
|
||||
else => unreachable,
|
||||
};
|
||||
const node = full.ast.elem_count;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, full.ast.elem_count);
|
||||
},
|
||||
.node_offset_array_type_sentinel => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
@ -2492,11 +2445,7 @@ pub const SrcLoc = struct {
|
||||
.array_type_sentinel => tree.arrayTypeSentinel(parent_node),
|
||||
else => unreachable,
|
||||
};
|
||||
const node = full.ast.sentinel;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, full.ast.sentinel);
|
||||
},
|
||||
.node_offset_array_type_elem => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
@ -2508,21 +2457,14 @@ pub const SrcLoc = struct {
|
||||
.array_type_sentinel => tree.arrayTypeSentinel(parent_node),
|
||||
else => unreachable,
|
||||
};
|
||||
const node = full.ast.elem_type;
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, full.ast.elem_type);
|
||||
},
|
||||
.node_offset_un_op => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[node_datas[node].lhs];
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
return nodeToSpan(tree, node_datas[node].lhs);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -2532,7 +2474,7 @@ pub const SrcLoc = struct {
|
||||
gpa: Allocator,
|
||||
node_off: i32,
|
||||
arg_index: u32,
|
||||
) !u32 {
|
||||
) !Span {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
@ -2546,10 +2488,36 @@ pub const SrcLoc = struct {
|
||||
.builtin_call, .builtin_call_comma => tree.extra_data[node_datas[node].lhs + arg_index],
|
||||
else => unreachable,
|
||||
};
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const tok_index = main_tokens[param];
|
||||
return nodeToSpan(tree, param);
|
||||
}
|
||||
|
||||
pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
|
||||
return tokensToSpan(
|
||||
tree,
|
||||
tree.firstToken(node),
|
||||
tree.lastToken(node),
|
||||
tree.nodes.items(.main_token)[node],
|
||||
);
|
||||
}
|
||||
|
||||
fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
var start_tok = start;
|
||||
var end_tok = end;
|
||||
|
||||
if (tree.tokensOnSameLine(start, end)) {
|
||||
// do nothing
|
||||
} else if (tree.tokensOnSameLine(start, main)) {
|
||||
end_tok = main;
|
||||
} else if (tree.tokensOnSameLine(main, end)) {
|
||||
start_tok = main;
|
||||
} else {
|
||||
start_tok = main;
|
||||
end_tok = main;
|
||||
}
|
||||
const start_off = token_starts[start_tok];
|
||||
const end_off = token_starts[end_tok] + @intCast(u32, tree.tokenSlice(end_tok).len);
|
||||
return Span{ .start = start_off, .end = end_off, .main = token_starts[main] };
|
||||
}
|
||||
};
|
||||
|
||||
@ -2603,10 +2571,9 @@ pub const LazySrcLoc = union(enum) {
|
||||
/// from its containing Decl node AST index.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset: TracedOffset,
|
||||
/// The source location points to two tokens left of the first token of an AST node,
|
||||
/// which is this value offset from its containing Decl node AST index.
|
||||
/// The source location points to the beginning of a struct initializer.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset_back2tok: i32,
|
||||
node_offset_initializer: i32,
|
||||
/// The source location points to a variable declaration type expression,
|
||||
/// found by taking this AST node index offset from the containing
|
||||
/// Decl AST node, which points to a variable declaration AST node. Next, navigate
|
||||
@ -2802,7 +2769,7 @@ pub const LazySrcLoc = union(enum) {
|
||||
.byte_offset,
|
||||
.token_offset,
|
||||
.node_offset,
|
||||
.node_offset_back2tok,
|
||||
.node_offset_initializer,
|
||||
.node_offset_var_decl_ty,
|
||||
.node_offset_for_cond,
|
||||
.node_offset_builtin_call_arg0,
|
||||
@ -3313,7 +3280,11 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
|
||||
.src_loc = .{
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = token_starts[parse_err.token] + extra_offset },
|
||||
.lazy = if (extra_offset == 0) .{
|
||||
.token_abs = parse_err.token,
|
||||
} else .{
|
||||
.byte_abs = token_starts[parse_err.token] + extra_offset,
|
||||
},
|
||||
},
|
||||
.msg = msg.toOwnedSlice(),
|
||||
};
|
||||
@ -3336,7 +3307,7 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
|
||||
.src_loc = .{
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = token_starts[note.token] },
|
||||
.lazy = .{ .token_abs = note.token },
|
||||
},
|
||||
.msg = msg.toOwnedSlice(),
|
||||
};
|
||||
|
||||
16
src/Sema.zig
16
src/Sema.zig
@ -3497,7 +3497,7 @@ fn validateUnionInit(
|
||||
|
||||
for (instrs[1..]) |inst| {
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
|
||||
const inst_src: LazySrcLoc = .{ .node_offset_back2tok = inst_data.src_node };
|
||||
const inst_src: LazySrcLoc = .{ .node_offset_initializer = inst_data.src_node };
|
||||
try sema.errNote(block, inst_src, msg, "additional initializer here", .{});
|
||||
}
|
||||
try sema.addDeclaredHereNote(msg, union_ty);
|
||||
@ -3515,7 +3515,7 @@ fn validateUnionInit(
|
||||
|
||||
const field_ptr = instrs[0];
|
||||
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
|
||||
const field_ptr_extra = sema.code.extraData(Zir.Inst.Field, field_ptr_data.payload_index).data;
|
||||
const field_name = sema.code.nullTerminatedString(field_ptr_extra.field_name_start);
|
||||
const field_index = try sema.unionFieldIndex(block, union_ty, field_name, field_src);
|
||||
@ -3617,7 +3617,7 @@ fn validateStructInit(
|
||||
|
||||
for (instrs) |field_ptr| {
|
||||
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
|
||||
const field_ptr_extra = sema.code.extraData(Zir.Inst.Field, field_ptr_data.payload_index).data;
|
||||
struct_ptr_zir_ref = field_ptr_extra.lhs;
|
||||
const field_name = sema.code.nullTerminatedString(field_ptr_extra.field_name_start);
|
||||
@ -3625,7 +3625,7 @@ fn validateStructInit(
|
||||
if (found_fields[field_index] != 0) {
|
||||
const other_field_ptr = found_fields[field_index];
|
||||
const other_field_ptr_data = sema.code.instructions.items(.data)[other_field_ptr].pl_node;
|
||||
const other_field_src: LazySrcLoc = .{ .node_offset_back2tok = other_field_ptr_data.src_node };
|
||||
const other_field_src: LazySrcLoc = .{ .node_offset_initializer = other_field_ptr_data.src_node };
|
||||
const msg = msg: {
|
||||
const msg = try sema.errMsg(block, field_src, "duplicate field", .{});
|
||||
errdefer msg.destroy(gpa);
|
||||
@ -3700,7 +3700,7 @@ fn validateStructInit(
|
||||
field: for (found_fields) |field_ptr, i| {
|
||||
if (field_ptr != 0) {
|
||||
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
|
||||
|
||||
// Determine whether the value stored to this pointer is comptime-known.
|
||||
const field_ty = struct_ty.structFieldType(i);
|
||||
@ -14096,14 +14096,14 @@ fn zirStructInit(
|
||||
extra_index = item.end;
|
||||
|
||||
const field_type_data = zir_datas[item.data.field_type].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_type_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_type_data.src_node };
|
||||
const field_type_extra = sema.code.extraData(Zir.Inst.FieldType, field_type_data.payload_index).data;
|
||||
const field_name = sema.code.nullTerminatedString(field_type_extra.name_start);
|
||||
const field_index = try sema.structFieldIndex(block, resolved_ty, field_name, field_src);
|
||||
if (field_inits[field_index] != .none) {
|
||||
const other_field_type = found_fields[field_index];
|
||||
const other_field_type_data = zir_datas[other_field_type].pl_node;
|
||||
const other_field_src: LazySrcLoc = .{ .node_offset_back2tok = other_field_type_data.src_node };
|
||||
const other_field_src: LazySrcLoc = .{ .node_offset_initializer = other_field_type_data.src_node };
|
||||
const msg = msg: {
|
||||
const msg = try sema.errMsg(block, field_src, "duplicate field", .{});
|
||||
errdefer msg.destroy(gpa);
|
||||
@ -14125,7 +14125,7 @@ fn zirStructInit(
|
||||
const item = sema.code.extraData(Zir.Inst.StructInit.Item, extra.end);
|
||||
|
||||
const field_type_data = zir_datas[item.data.field_type].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_type_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_type_data.src_node };
|
||||
const field_type_extra = sema.code.extraData(Zir.Inst.FieldType, field_type_data.payload_index).data;
|
||||
const field_name = sema.code.nullTerminatedString(field_type_extra.name_start);
|
||||
const field_index = try sema.unionFieldIndex(block, resolved_ty, field_name, field_src);
|
||||
|
||||
16
src/main.zig
16
src/main.zig
@ -4387,7 +4387,7 @@ fn printErrsMsgToStdErr(
|
||||
.msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{
|
||||
std.zig.fmtEscapes(tree.source[byte_offset..][0..1]),
|
||||
}),
|
||||
.byte_offset = byte_offset,
|
||||
.span = .{ .start = byte_offset, .end = byte_offset + 1, .main = byte_offset },
|
||||
.line = @intCast(u32, start_loc.line),
|
||||
.column = @intCast(u32, start_loc.column) + bad_off,
|
||||
.source_line = source_line,
|
||||
@ -4402,11 +4402,16 @@ fn printErrsMsgToStdErr(
|
||||
text_buf.items.len = 0;
|
||||
try tree.renderError(note, writer);
|
||||
const note_loc = tree.tokenLocation(0, note.token);
|
||||
const byte_offset = @intCast(u32, note_loc.line_start);
|
||||
notes_buffer[notes_len] = .{
|
||||
.src = .{
|
||||
.src_path = path,
|
||||
.msg = try arena.dupe(u8, text_buf.items),
|
||||
.byte_offset = @intCast(u32, note_loc.line_start),
|
||||
.span = .{
|
||||
.start = byte_offset,
|
||||
.end = byte_offset + @intCast(u32, tree.tokenSlice(note.token).len),
|
||||
.main = byte_offset,
|
||||
},
|
||||
.line = @intCast(u32, note_loc.line),
|
||||
.column = @intCast(u32, note_loc.column),
|
||||
.source_line = tree.source[note_loc.line_start..note_loc.line_end],
|
||||
@ -4417,11 +4422,16 @@ fn printErrsMsgToStdErr(
|
||||
}
|
||||
|
||||
const extra_offset = tree.errorOffset(parse_error);
|
||||
const byte_offset = @intCast(u32, start_loc.line_start) + extra_offset;
|
||||
const message: Compilation.AllErrors.Message = .{
|
||||
.src = .{
|
||||
.src_path = path,
|
||||
.msg = text,
|
||||
.byte_offset = @intCast(u32, start_loc.line_start) + extra_offset,
|
||||
.span = .{
|
||||
.start = byte_offset,
|
||||
.end = byte_offset + @intCast(u32, tree.tokenSlice(lok_token).len),
|
||||
.main = byte_offset,
|
||||
},
|
||||
.line = @intCast(u32, start_loc.line),
|
||||
.column = @intCast(u32, start_loc.column) + extra_offset,
|
||||
.source_line = source_line,
|
||||
|
||||
@ -2381,10 +2381,12 @@ const Writer = struct {
|
||||
.parent_decl_node = self.parent_decl_node,
|
||||
.lazy = src,
|
||||
};
|
||||
const abs_byte_off = src_loc.byteOffset(self.gpa) catch unreachable;
|
||||
const delta_line = std.zig.findLineColumn(tree.source, abs_byte_off);
|
||||
try stream.print("{s}:{d}:{d}", .{
|
||||
@tagName(src), delta_line.line + 1, delta_line.column + 1,
|
||||
const src_span = src_loc.span(self.gpa) catch unreachable;
|
||||
const start = std.zig.findLineColumn(tree.source, src_span.start);
|
||||
const end = std.zig.findLineColumn(tree.source, src_span.end);
|
||||
try stream.print("{s}:{d}:{d} to :{d}:{d}", .{
|
||||
@tagName(src), start.line + 1, start.column + 1,
|
||||
end.line + 1, end.column + 1,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -174,6 +174,15 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const case = ctx.obj("missing semicolon at EOF", .{});
|
||||
case.addError(
|
||||
\\const foo = 1
|
||||
, &[_][]const u8{
|
||||
\\:1:14: error: expected ';' after declaration
|
||||
});
|
||||
}
|
||||
|
||||
// TODO test this in stage2, but we won't even try in stage1
|
||||
//ctx.objErrStage1("inline fn calls itself indirectly",
|
||||
// \\export fn foo() void {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user