mirror of
https://github.com/ziglang/zig.git
synced 2026-01-20 14:25:16 +00:00
parser: make missing semicolon error point to the end of the previous token
This commit is contained in:
parent
0b7347fd18
commit
ddd6de86f7
@ -64,6 +64,17 @@ pub fn renderToArrayList(tree: Ast, buffer: *std.ArrayList(u8)) RenderError!void
|
||||
return @import("./render.zig").renderTree(buffer, tree);
|
||||
}
|
||||
|
||||
/// Returns an extra offset for column and byte offset of errors that
|
||||
/// should point after the token in the error message.
|
||||
pub fn errorOffset(tree:Ast, error_tag: Error.Tag, token: TokenIndex) u32 {
|
||||
return switch (error_tag) {
|
||||
.expected_semi_after_decl,
|
||||
.expected_semi_after_stmt,
|
||||
=> @intCast(u32, tree.tokenSlice(token).len),
|
||||
else => 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn tokenLocation(self: Ast, start_offset: ByteOffset, token_index: TokenIndex) Location {
|
||||
var loc = Location{
|
||||
.line = 0,
|
||||
@ -306,6 +317,13 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
|
||||
return stream.writeAll("function prototype has parameter after varargs");
|
||||
},
|
||||
|
||||
.expected_semi_after_decl => {
|
||||
return stream.writeAll("expected ';' after declaration");
|
||||
},
|
||||
.expected_semi_after_stmt => {
|
||||
return stream.writeAll("expected ';' after statement");
|
||||
},
|
||||
|
||||
.expected_token => {
|
||||
const found_tag = token_tags[parse_error.token];
|
||||
const expected_symbol = parse_error.extra.expected_tag.symbol();
|
||||
@ -2495,6 +2513,10 @@ pub const Error = struct {
|
||||
unattached_doc_comment,
|
||||
varargs_nonfinal,
|
||||
|
||||
// these have `token` set to token after which a semicolon was expected
|
||||
expected_semi_after_decl,
|
||||
expected_semi_after_stmt,
|
||||
|
||||
/// `expected_tag` is populated.
|
||||
expected_token,
|
||||
};
|
||||
|
||||
@ -586,7 +586,7 @@ const Parser = struct {
|
||||
const thread_local_token = p.eatToken(.keyword_threadlocal);
|
||||
const var_decl = try p.parseVarDecl();
|
||||
if (var_decl != 0) {
|
||||
_ = try p.expectToken(.semicolon);
|
||||
try p.expectSemicolon(.expected_semi_after_decl, false);
|
||||
return var_decl;
|
||||
}
|
||||
if (thread_local_token != null) {
|
||||
@ -614,7 +614,7 @@ const Parser = struct {
|
||||
fn expectUsingNamespace(p: *Parser) !Node.Index {
|
||||
const usingnamespace_token = p.assertToken(.keyword_usingnamespace);
|
||||
const expr = try p.expectExpr();
|
||||
_ = try p.expectToken(.semicolon);
|
||||
try p.expectSemicolon(.expected_semi_after_decl, false);
|
||||
return p.addNode(.{
|
||||
.tag = .@"usingnamespace",
|
||||
.main_token = usingnamespace_token,
|
||||
@ -851,7 +851,7 @@ const Parser = struct {
|
||||
|
||||
const var_decl = try p.parseVarDecl();
|
||||
if (var_decl != 0) {
|
||||
_ = try p.expectTokenRecoverable(.semicolon);
|
||||
try p.expectSemicolon(.expected_semi_after_decl, true);
|
||||
return var_decl;
|
||||
}
|
||||
|
||||
@ -915,7 +915,7 @@ const Parser = struct {
|
||||
|
||||
const assign_expr = try p.parseAssignExpr();
|
||||
if (assign_expr != 0) {
|
||||
_ = try p.expectTokenRecoverable(.semicolon);
|
||||
try p.expectSemicolon(.expected_semi_after_stmt, true);
|
||||
return assign_expr;
|
||||
}
|
||||
|
||||
@ -1205,7 +1205,7 @@ const Parser = struct {
|
||||
}
|
||||
const assign_expr = try p.parseAssignExpr();
|
||||
if (assign_expr != 0) {
|
||||
_ = try p.expectTokenRecoverable(.semicolon);
|
||||
try p.expectSemicolon(.expected_semi_after_stmt, true);
|
||||
return assign_expr;
|
||||
}
|
||||
return null_node;
|
||||
@ -3664,6 +3664,15 @@ const Parser = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn expectSemicolon(p: *Parser, tag: AstError.Tag, recoverable: bool) Error!void {
|
||||
if (p.token_tags[p.tok_i] == .semicolon) {
|
||||
_ = p.nextToken();
|
||||
return;
|
||||
}
|
||||
try p.warnMsg(.{ .tag = tag, .token = p.tok_i - 1 });
|
||||
if (!recoverable) return error.ParseError;
|
||||
}
|
||||
|
||||
fn nextToken(p: *Parser) TokenIndex {
|
||||
const result = p.tok_i;
|
||||
p.tok_i += 1;
|
||||
|
||||
@ -2995,13 +2995,14 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
|
||||
const token_starts = file.tree.tokens.items(.start);
|
||||
const token_tags = file.tree.tokens.items(.tag);
|
||||
|
||||
const extra_offset = file.tree.errorOffset(parse_err.tag, parse_err.token);
|
||||
try file.tree.renderError(parse_err, msg.writer());
|
||||
const err_msg = try gpa.create(ErrorMsg);
|
||||
err_msg.* = .{
|
||||
.src_loc = .{
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = token_starts[parse_err.token] },
|
||||
.lazy = .{ .byte_abs = token_starts[parse_err.token] + extra_offset },
|
||||
},
|
||||
.msg = msg.toOwnedSlice(),
|
||||
};
|
||||
|
||||
@ -4040,13 +4040,14 @@ fn printErrMsgToStdErr(
|
||||
notes_len += 1;
|
||||
}
|
||||
|
||||
const extra_offset = tree.errorOffset(parse_error.tag, parse_error.token);
|
||||
const message: Compilation.AllErrors.Message = .{
|
||||
.src = .{
|
||||
.src_path = path,
|
||||
.msg = text,
|
||||
.byte_offset = @intCast(u32, start_loc.line_start),
|
||||
.byte_offset = @intCast(u32, start_loc.line_start) + extra_offset,
|
||||
.line = @intCast(u32, start_loc.line),
|
||||
.column = @intCast(u32, start_loc.column),
|
||||
.column = @intCast(u32, start_loc.column) + extra_offset,
|
||||
.source_line = source_line,
|
||||
.notes = notes_buffer[0..notes_len],
|
||||
},
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user