mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 12:59:04 +00:00
stage2: handle parser notes in a more general way
This commit is contained in:
parent
5d22204d2d
commit
2a3f3766a4
@ -178,7 +178,6 @@ const Parser = struct {
|
||||
.expected_block_or_assignment,
|
||||
.expected_block_or_expr,
|
||||
.expected_block_or_field,
|
||||
.expected_container_members,
|
||||
.expected_expr,
|
||||
.expected_expr_or_assignment,
|
||||
.expected_fn,
|
||||
@ -401,10 +400,12 @@ const Parser = struct {
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .previous_field,
|
||||
.is_note = true,
|
||||
.token = last_field,
|
||||
});
|
||||
try p.warnMsg(.{
|
||||
.tag = .next_field,
|
||||
.is_note = true,
|
||||
.token = identifier,
|
||||
});
|
||||
// Continue parsing; error will be reported later.
|
||||
@ -985,7 +986,7 @@ const Parser = struct {
|
||||
.keyword_switch => return p.expectSwitchExpr(),
|
||||
.keyword_if => return p.expectIfStatement(),
|
||||
.keyword_enum, .keyword_struct, .keyword_union => {
|
||||
const identifier = p.tok_i + 2;
|
||||
const identifier = p.tok_i + 1;
|
||||
if (try p.parseCStyleContainer()) {
|
||||
// Return something so that `expectStatement` is happy.
|
||||
return p.addNode(.{
|
||||
|
||||
@ -3324,26 +3324,21 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = byte_abs },
|
||||
}, err_msg, "invalid byte: '{'}'", .{std.zig.fmtEscapes(source[byte_abs..][0..1])});
|
||||
} else if (parse_err.tag == .decl_between_fields) {
|
||||
try mod.errNoteNonLazy(.{
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = token_starts[file.tree.errors[1].token] },
|
||||
}, err_msg, "field before declarations here", .{});
|
||||
try mod.errNoteNonLazy(.{
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = token_starts[file.tree.errors[2].token] },
|
||||
}, err_msg, "field after declarations here", .{});
|
||||
} else if (parse_err.tag == .c_style_container) {
|
||||
const note = file.tree.errors[1];
|
||||
try mod.errNoteNonLazy(.{
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = token_starts[note.token] },
|
||||
}, err_msg, "to declare a container do 'const {s} = {s}'", .{
|
||||
file.tree.tokenSlice(note.token), note.extra.expected_tag.symbol(),
|
||||
});
|
||||
}
|
||||
|
||||
for (file.tree.errors[1..]) |note| {
|
||||
if (!note.is_note) break;
|
||||
|
||||
try file.tree.renderError(note, msg.writer());
|
||||
err_msg.notes = try mod.gpa.realloc(err_msg.notes, err_msg.notes.len + 1);
|
||||
err_msg.notes[err_msg.notes.len - 1] = .{
|
||||
.src_loc = .{
|
||||
.file_scope = file,
|
||||
.parent_decl_node = 0,
|
||||
.lazy = .{ .byte_abs = token_starts[note.token] },
|
||||
},
|
||||
.msg = msg.toOwnedSlice(),
|
||||
};
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
53
src/main.zig
53
src/main.zig
@ -4367,7 +4367,7 @@ fn printErrsMsgToStdErr(
|
||||
defer text_buf.deinit();
|
||||
const writer = text_buf.writer();
|
||||
try tree.renderError(parse_error, writer);
|
||||
const text = text_buf.items;
|
||||
const text = try arena.dupe(u8, text_buf.items);
|
||||
|
||||
var notes_buffer: [2]Compilation.AllErrors.Message = undefined;
|
||||
var notes_len: usize = 0;
|
||||
@ -4388,49 +4388,26 @@ fn printErrsMsgToStdErr(
|
||||
},
|
||||
};
|
||||
notes_len += 1;
|
||||
} else if (parse_error.tag == .decl_between_fields) {
|
||||
const prev_loc = tree.tokenLocation(0, parse_errors[i + 1].token);
|
||||
notes_buffer[0] = .{
|
||||
.src = .{
|
||||
.src_path = path,
|
||||
.msg = "field before declarations here",
|
||||
.byte_offset = @intCast(u32, prev_loc.line_start),
|
||||
.line = @intCast(u32, prev_loc.line),
|
||||
.column = @intCast(u32, prev_loc.column),
|
||||
.source_line = tree.source[prev_loc.line_start..prev_loc.line_end],
|
||||
},
|
||||
};
|
||||
const next_loc = tree.tokenLocation(0, parse_errors[i + 2].token);
|
||||
notes_buffer[1] = .{
|
||||
.src = .{
|
||||
.src_path = path,
|
||||
.msg = "field after declarations here",
|
||||
.byte_offset = @intCast(u32, next_loc.line_start),
|
||||
.line = @intCast(u32, next_loc.line),
|
||||
.column = @intCast(u32, next_loc.column),
|
||||
.source_line = tree.source[next_loc.line_start..next_loc.line_end],
|
||||
},
|
||||
};
|
||||
notes_len = 2;
|
||||
i += 2;
|
||||
} else if (parse_error.tag == .c_style_container) {
|
||||
const note = tree.errors[i + 1];
|
||||
}
|
||||
|
||||
const prev_loc = tree.tokenLocation(0, parse_errors[i + 1].token);
|
||||
notes_buffer[0] = .{
|
||||
for (parse_errors[i + 1 ..]) |note| {
|
||||
if (!note.is_note) break;
|
||||
|
||||
text_buf.items.len = 0;
|
||||
try tree.renderError(note, writer);
|
||||
const note_loc = tree.tokenLocation(0, note.token);
|
||||
notes_buffer[notes_len] = .{
|
||||
.src = .{
|
||||
.src_path = path,
|
||||
.msg = try std.fmt.allocPrint(arena, "to declare a container do 'const {s} = {s}'", .{
|
||||
tree.tokenSlice(note.token), note.extra.expected_tag.symbol(),
|
||||
}),
|
||||
.byte_offset = @intCast(u32, prev_loc.line_start),
|
||||
.line = @intCast(u32, prev_loc.line),
|
||||
.column = @intCast(u32, prev_loc.column),
|
||||
.source_line = tree.source[prev_loc.line_start..prev_loc.line_end],
|
||||
.msg = try arena.dupe(u8, text_buf.items),
|
||||
.byte_offset = @intCast(u32, note_loc.line_start),
|
||||
.line = @intCast(u32, note_loc.line),
|
||||
.column = @intCast(u32, note_loc.column),
|
||||
.source_line = tree.source[note_loc.line_start..note_loc.line_end],
|
||||
},
|
||||
};
|
||||
notes_len = 1;
|
||||
i += 1;
|
||||
notes_len += 1;
|
||||
}
|
||||
|
||||
const extra_offset = tree.errorOffset(parse_error);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user