mirror of
https://github.com/ziglang/zig.git
synced 2026-02-12 20:37:54 +00:00
Module: improve source spans for initializers and var types
```zig
const U = union { foo: u32, bar: u32 };
test {
var a = U{ .foo = 1213, .bar = 1123 };
_ = a;
}
test {
var a: (123 + 5238094) = 0;
_ = a;
}
```
before:
```
:30: note: additional initializer here
var a = U{ .foo = 1213, .bar = 1123 };
^~~
:12: error: expected type 'type', found 'comptime_int'
var a: (123 + 5238094) = 0;
^
```
after:
```
:30: note: additional initializer here
var a = U{ .foo = 1213, .bar = 1123 };
~^~~~~~~~~~
:12: error: expected type 'type', found 'comptime_int'
var a: (123 + 5238094) = 0;
^~~~~~~~~~~~~~~
```
This commit is contained in:
parent
1463144fc8
commit
cf207df592
@ -2132,13 +2132,15 @@ pub const SrcLoc = struct {
|
||||
assert(src_loc.file_scope.tree_loaded);
|
||||
return nodeToSpan(tree, node);
|
||||
},
|
||||
.node_offset_back2tok => |node_off| {
|
||||
.node_offset_initializer => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
const node = src_loc.declRelativeToNodeIndex(node_off);
|
||||
const tok_index = tree.firstToken(node) - 2;
|
||||
const start = tree.tokens.items(.start)[tok_index];
|
||||
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||
return Span{ .start = start, .end = end, .main = start };
|
||||
return tokensToSpan(
|
||||
tree,
|
||||
tree.firstToken(node) - 3,
|
||||
tree.lastToken(node),
|
||||
tree.nodes.items(.main_token)[node] - 2,
|
||||
);
|
||||
},
|
||||
.node_offset_var_decl_ty => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(gpa);
|
||||
@ -2151,12 +2153,10 @@ pub const SrcLoc = struct {
|
||||
.aligned_var_decl => tree.alignedVarDecl(node),
|
||||
else => unreachable,
|
||||
};
|
||||
const tok_index = if (full.ast.type_node != 0) blk: {
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
break :blk main_tokens[full.ast.type_node];
|
||||
} else blk: {
|
||||
break :blk full.ast.mut_token + 1; // the name token
|
||||
};
|
||||
if (full.ast.type_node != 0) {
|
||||
return nodeToSpan(tree, full.ast.type_node);
|
||||
}
|
||||
const tok_index = full.ast.mut_token + 1; // the name token
|
||||
const start = tree.tokens.items(.start)[tok_index];
|
||||
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
|
||||
return Span{ .start = start, .end = end, .main = start };
|
||||
@ -2492,26 +2492,32 @@ pub const SrcLoc = struct {
|
||||
}
|
||||
|
||||
pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
|
||||
return tokensToSpan(
|
||||
tree,
|
||||
tree.firstToken(node),
|
||||
tree.lastToken(node),
|
||||
tree.nodes.items(.main_token)[node],
|
||||
);
|
||||
}
|
||||
|
||||
fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
const main_token = tree.nodes.items(.main_token)[node];
|
||||
const start = tree.firstToken(node);
|
||||
const end = tree.lastToken(node);
|
||||
var start_tok = start;
|
||||
var end_tok = end;
|
||||
|
||||
if (tree.tokensOnSameLine(start, end)) {
|
||||
// do nothing
|
||||
} else if (tree.tokensOnSameLine(start, main_token)) {
|
||||
end_tok = main_token;
|
||||
} else if (tree.tokensOnSameLine(main_token, end)) {
|
||||
start_tok = main_token;
|
||||
} else if (tree.tokensOnSameLine(start, main)) {
|
||||
end_tok = main;
|
||||
} else if (tree.tokensOnSameLine(main, end)) {
|
||||
start_tok = main;
|
||||
} else {
|
||||
start_tok = main_token;
|
||||
end_tok = main_token;
|
||||
start_tok = main;
|
||||
end_tok = main;
|
||||
}
|
||||
const start_off = token_starts[start_tok];
|
||||
const end_off = token_starts[end_tok] + @intCast(u32, tree.tokenSlice(end_tok).len);
|
||||
return Span{ .start = start_off, .end = end_off, .main = token_starts[main_token] };
|
||||
return Span{ .start = start_off, .end = end_off, .main = token_starts[main] };
|
||||
}
|
||||
};
|
||||
|
||||
@ -2565,10 +2571,9 @@ pub const LazySrcLoc = union(enum) {
|
||||
/// from its containing Decl node AST index.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset: TracedOffset,
|
||||
/// The source location points to two tokens left of the first token of an AST node,
|
||||
/// which is this value offset from its containing Decl node AST index.
|
||||
/// The source location points to the beginning of a struct initializer.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset_back2tok: i32,
|
||||
node_offset_initializer: i32,
|
||||
/// The source location points to a variable declaration type expression,
|
||||
/// found by taking this AST node index offset from the containing
|
||||
/// Decl AST node, which points to a variable declaration AST node. Next, navigate
|
||||
@ -2764,7 +2769,7 @@ pub const LazySrcLoc = union(enum) {
|
||||
.byte_offset,
|
||||
.token_offset,
|
||||
.node_offset,
|
||||
.node_offset_back2tok,
|
||||
.node_offset_initializer,
|
||||
.node_offset_var_decl_ty,
|
||||
.node_offset_for_cond,
|
||||
.node_offset_builtin_call_arg0,
|
||||
|
||||
16
src/Sema.zig
16
src/Sema.zig
@ -3403,7 +3403,7 @@ fn validateUnionInit(
|
||||
|
||||
for (instrs[1..]) |inst| {
|
||||
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
|
||||
const inst_src: LazySrcLoc = .{ .node_offset_back2tok = inst_data.src_node };
|
||||
const inst_src: LazySrcLoc = .{ .node_offset_initializer = inst_data.src_node };
|
||||
try sema.errNote(block, inst_src, msg, "additional initializer here", .{});
|
||||
}
|
||||
try sema.addDeclaredHereNote(msg, union_ty);
|
||||
@ -3421,7 +3421,7 @@ fn validateUnionInit(
|
||||
|
||||
const field_ptr = instrs[0];
|
||||
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
|
||||
const field_ptr_extra = sema.code.extraData(Zir.Inst.Field, field_ptr_data.payload_index).data;
|
||||
const field_name = sema.code.nullTerminatedString(field_ptr_extra.field_name_start);
|
||||
const field_index = try sema.unionFieldIndex(block, union_ty, field_name, field_src);
|
||||
@ -3523,7 +3523,7 @@ fn validateStructInit(
|
||||
|
||||
for (instrs) |field_ptr| {
|
||||
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
|
||||
const field_ptr_extra = sema.code.extraData(Zir.Inst.Field, field_ptr_data.payload_index).data;
|
||||
struct_ptr_zir_ref = field_ptr_extra.lhs;
|
||||
const field_name = sema.code.nullTerminatedString(field_ptr_extra.field_name_start);
|
||||
@ -3531,7 +3531,7 @@ fn validateStructInit(
|
||||
if (found_fields[field_index] != 0) {
|
||||
const other_field_ptr = found_fields[field_index];
|
||||
const other_field_ptr_data = sema.code.instructions.items(.data)[other_field_ptr].pl_node;
|
||||
const other_field_src: LazySrcLoc = .{ .node_offset_back2tok = other_field_ptr_data.src_node };
|
||||
const other_field_src: LazySrcLoc = .{ .node_offset_initializer = other_field_ptr_data.src_node };
|
||||
const msg = msg: {
|
||||
const msg = try sema.errMsg(block, field_src, "duplicate field", .{});
|
||||
errdefer msg.destroy(gpa);
|
||||
@ -3606,7 +3606,7 @@ fn validateStructInit(
|
||||
field: for (found_fields) |field_ptr, i| {
|
||||
if (field_ptr != 0) {
|
||||
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
|
||||
|
||||
// Determine whether the value stored to this pointer is comptime-known.
|
||||
const field_ty = struct_ty.structFieldType(i);
|
||||
@ -13999,14 +13999,14 @@ fn zirStructInit(
|
||||
extra_index = item.end;
|
||||
|
||||
const field_type_data = zir_datas[item.data.field_type].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_type_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_type_data.src_node };
|
||||
const field_type_extra = sema.code.extraData(Zir.Inst.FieldType, field_type_data.payload_index).data;
|
||||
const field_name = sema.code.nullTerminatedString(field_type_extra.name_start);
|
||||
const field_index = try sema.structFieldIndex(block, resolved_ty, field_name, field_src);
|
||||
if (field_inits[field_index] != .none) {
|
||||
const other_field_type = found_fields[field_index];
|
||||
const other_field_type_data = zir_datas[other_field_type].pl_node;
|
||||
const other_field_src: LazySrcLoc = .{ .node_offset_back2tok = other_field_type_data.src_node };
|
||||
const other_field_src: LazySrcLoc = .{ .node_offset_initializer = other_field_type_data.src_node };
|
||||
const msg = msg: {
|
||||
const msg = try sema.errMsg(block, field_src, "duplicate field", .{});
|
||||
errdefer msg.destroy(gpa);
|
||||
@ -14028,7 +14028,7 @@ fn zirStructInit(
|
||||
const item = sema.code.extraData(Zir.Inst.StructInit.Item, extra.end);
|
||||
|
||||
const field_type_data = zir_datas[item.data.field_type].pl_node;
|
||||
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_type_data.src_node };
|
||||
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_type_data.src_node };
|
||||
const field_type_extra = sema.code.extraData(Zir.Inst.FieldType, field_type_data.payload_index).data;
|
||||
const field_name = sema.code.nullTerminatedString(field_type_extra.name_start);
|
||||
const field_index = try sema.unionFieldIndex(block, resolved_ty, field_name, field_src);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user