mirror of
https://github.com/ziglang/zig.git
synced 2025-12-29 09:33:18 +00:00
Merge pull request #5336 from Vexu/parser
Make self-hosted parser more error tolerant
This commit is contained in:
commit
f8b99331a2
@ -10104,6 +10104,7 @@ ContainerField <- IDENTIFIER (COLON TypeExpr)? (EQUAL Expr)?
|
||||
Statement
|
||||
<- KEYWORD_comptime? VarDecl
|
||||
/ KEYWORD_comptime BlockExprStatement
|
||||
/ KEYWORD_nosuspend BlockExprStatement
|
||||
/ KEYWORD_suspend (SEMICOLON / BlockExprStatement)
|
||||
/ KEYWORD_defer BlockExprStatement
|
||||
/ KEYWORD_errdefer BlockExprStatement
|
||||
@ -10160,6 +10161,7 @@ PrimaryExpr
|
||||
/ IfExpr
|
||||
/ KEYWORD_break BreakLabel? Expr?
|
||||
/ KEYWORD_comptime Expr
|
||||
/ KEYWORD_nosuspend Expr
|
||||
/ KEYWORD_continue BreakLabel?
|
||||
/ KEYWORD_resume Expr
|
||||
/ KEYWORD_return Expr?
|
||||
@ -10522,6 +10524,7 @@ KEYWORD_for <- 'for' end_of_word
|
||||
KEYWORD_if <- 'if' end_of_word
|
||||
KEYWORD_inline <- 'inline' end_of_word
|
||||
KEYWORD_noalias <- 'noalias' end_of_word
|
||||
KEYWORD_nosuspend <- 'nosuspend' end_of_word
|
||||
KEYWORD_null <- 'null' end_of_word
|
||||
KEYWORD_or <- 'or' end_of_word
|
||||
KEYWORD_orelse <- 'orelse' end_of_word
|
||||
|
||||
@ -129,6 +129,7 @@ pub const Error = union(enum) {
|
||||
ExpectedStatement: ExpectedStatement,
|
||||
ExpectedVarDeclOrFn: ExpectedVarDeclOrFn,
|
||||
ExpectedVarDecl: ExpectedVarDecl,
|
||||
ExpectedFn: ExpectedFn,
|
||||
ExpectedReturnType: ExpectedReturnType,
|
||||
ExpectedAggregateKw: ExpectedAggregateKw,
|
||||
UnattachedDocComment: UnattachedDocComment,
|
||||
@ -165,6 +166,7 @@ pub const Error = union(enum) {
|
||||
ExpectedDerefOrUnwrap: ExpectedDerefOrUnwrap,
|
||||
ExpectedSuffixOp: ExpectedSuffixOp,
|
||||
DeclBetweenFields: DeclBetweenFields,
|
||||
InvalidAnd: InvalidAnd,
|
||||
|
||||
pub fn render(self: *const Error, tokens: *Tree.TokenList, stream: var) !void {
|
||||
switch (self.*) {
|
||||
@ -177,6 +179,7 @@ pub const Error = union(enum) {
|
||||
.ExpectedStatement => |*x| return x.render(tokens, stream),
|
||||
.ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream),
|
||||
.ExpectedVarDecl => |*x| return x.render(tokens, stream),
|
||||
.ExpectedFn => |*x| return x.render(tokens, stream),
|
||||
.ExpectedReturnType => |*x| return x.render(tokens, stream),
|
||||
.ExpectedAggregateKw => |*x| return x.render(tokens, stream),
|
||||
.UnattachedDocComment => |*x| return x.render(tokens, stream),
|
||||
@ -213,6 +216,7 @@ pub const Error = union(enum) {
|
||||
.ExpectedDerefOrUnwrap => |*x| return x.render(tokens, stream),
|
||||
.ExpectedSuffixOp => |*x| return x.render(tokens, stream),
|
||||
.DeclBetweenFields => |*x| return x.render(tokens, stream),
|
||||
.InvalidAnd => |*x| return x.render(tokens, stream),
|
||||
}
|
||||
}
|
||||
|
||||
@ -227,6 +231,7 @@ pub const Error = union(enum) {
|
||||
.ExpectedStatement => |x| return x.token,
|
||||
.ExpectedVarDeclOrFn => |x| return x.token,
|
||||
.ExpectedVarDecl => |x| return x.token,
|
||||
.ExpectedFn => |x| return x.token,
|
||||
.ExpectedReturnType => |x| return x.token,
|
||||
.ExpectedAggregateKw => |x| return x.token,
|
||||
.UnattachedDocComment => |x| return x.token,
|
||||
@ -263,6 +268,7 @@ pub const Error = union(enum) {
|
||||
.ExpectedDerefOrUnwrap => |x| return x.token,
|
||||
.ExpectedSuffixOp => |x| return x.token,
|
||||
.DeclBetweenFields => |x| return x.token,
|
||||
.InvalidAnd => |x| return x.token,
|
||||
}
|
||||
}
|
||||
|
||||
@ -274,6 +280,7 @@ pub const Error = union(enum) {
|
||||
pub const ExpectedStatement = SingleTokenError("Expected statement, found '{}'");
|
||||
pub const ExpectedVarDeclOrFn = SingleTokenError("Expected variable declaration or function, found '{}'");
|
||||
pub const ExpectedVarDecl = SingleTokenError("Expected variable declaration, found '{}'");
|
||||
pub const ExpectedFn = SingleTokenError("Expected function, found '{}'");
|
||||
pub const ExpectedReturnType = SingleTokenError("Expected 'var' or return type expression, found '{}'");
|
||||
pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Id.Keyword_struct.symbol() ++ "', '" ++ Token.Id.Keyword_union.symbol() ++ "', or '" ++ Token.Id.Keyword_enum.symbol() ++ "', found '{}'");
|
||||
pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found '{}'");
|
||||
@ -308,6 +315,7 @@ pub const Error = union(enum) {
|
||||
pub const ExtraVolatileQualifier = SimpleError("Extra volatile qualifier");
|
||||
pub const ExtraAllowZeroQualifier = SimpleError("Extra allowzero qualifier");
|
||||
pub const DeclBetweenFields = SimpleError("Declarations are not allowed between container fields");
|
||||
pub const InvalidAnd = SimpleError("`&&` is invalid. Note that `and` is boolean AND.");
|
||||
|
||||
pub const ExpectedCall = struct {
|
||||
node: *Node,
|
||||
@ -335,9 +343,6 @@ pub const Error = union(enum) {
|
||||
pub fn render(self: *const ExpectedToken, tokens: *Tree.TokenList, stream: var) !void {
|
||||
const found_token = tokens.at(self.token);
|
||||
switch (found_token.id) {
|
||||
.Invalid_ampersands => {
|
||||
return stream.print("`&&` is invalid. Note that `and` is boolean AND.", .{});
|
||||
},
|
||||
.Invalid => {
|
||||
return stream.print("expected '{}', found invalid bytes", .{self.expected_id.symbol()});
|
||||
},
|
||||
@ -888,6 +893,7 @@ pub const Node = struct {
|
||||
pub const ReturnType = union(enum) {
|
||||
Explicit: *Node,
|
||||
InferErrorSet: *Node,
|
||||
Invalid: TokenIndex,
|
||||
};
|
||||
|
||||
pub fn iterate(self: *FnProto, index: usize) ?*Node {
|
||||
@ -916,6 +922,7 @@ pub const Node = struct {
|
||||
if (i < 1) return node;
|
||||
i -= 1;
|
||||
},
|
||||
.Invalid => {},
|
||||
}
|
||||
|
||||
if (self.body_node) |body_node| {
|
||||
@ -937,6 +944,7 @@ pub const Node = struct {
|
||||
if (self.body_node) |body_node| return body_node.lastToken();
|
||||
switch (self.return_type) {
|
||||
.Explicit, .InferErrorSet => |node| return node.lastToken(),
|
||||
.Invalid => |tok| return tok,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -48,31 +48,24 @@ pub fn parse(allocator: *Allocator, source: []const u8) Allocator.Error!*Tree {
|
||||
|
||||
while (it.peek().?.id == .LineComment) _ = it.next();
|
||||
|
||||
tree.root_node = parseRoot(arena, &it, tree) catch |err| blk: {
|
||||
switch (err) {
|
||||
error.ParseError => {
|
||||
assert(tree.errors.len != 0);
|
||||
break :blk undefined;
|
||||
},
|
||||
error.OutOfMemory => {
|
||||
return error.OutOfMemory;
|
||||
},
|
||||
}
|
||||
};
|
||||
tree.root_node = try parseRoot(arena, &it, tree);
|
||||
|
||||
return tree;
|
||||
}
|
||||
|
||||
/// Root <- skip ContainerMembers eof
|
||||
fn parseRoot(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!*Node.Root {
|
||||
fn parseRoot(arena: *Allocator, it: *TokenIterator, tree: *Tree) Allocator.Error!*Node.Root {
|
||||
const node = try arena.create(Node.Root);
|
||||
node.* = .{
|
||||
.decls = try parseContainerMembers(arena, it, tree),
|
||||
.eof_token = eatToken(it, .Eof) orelse {
|
||||
.eof_token = eatToken(it, .Eof) orelse blk: {
|
||||
// parseContainerMembers will try to skip as much
|
||||
// invalid tokens as it can so this can only be a '}'
|
||||
const tok = eatToken(it, .RBrace).?;
|
||||
try tree.errors.push(.{
|
||||
.ExpectedContainerMembers = .{ .token = it.index },
|
||||
.ExpectedContainerMembers = .{ .token = tok },
|
||||
});
|
||||
return error.ParseError;
|
||||
break :blk tok;
|
||||
},
|
||||
};
|
||||
return node;
|
||||
@ -108,7 +101,13 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
|
||||
|
||||
const doc_comments = try parseDocComment(arena, it, tree);
|
||||
|
||||
if (try parseTestDecl(arena, it, tree)) |node| {
|
||||
if (parseTestDecl(arena, it, tree) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => {
|
||||
findNextContainerMember(it);
|
||||
continue;
|
||||
},
|
||||
}) |node| {
|
||||
if (field_state == .seen) {
|
||||
field_state = .{ .end = node.firstToken() };
|
||||
}
|
||||
@ -117,7 +116,13 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
|
||||
continue;
|
||||
}
|
||||
|
||||
if (try parseTopLevelComptime(arena, it, tree)) |node| {
|
||||
if (parseTopLevelComptime(arena, it, tree) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => {
|
||||
findNextContainerMember(it);
|
||||
continue;
|
||||
},
|
||||
}) |node| {
|
||||
if (field_state == .seen) {
|
||||
field_state = .{ .end = node.firstToken() };
|
||||
}
|
||||
@ -128,7 +133,13 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
|
||||
|
||||
const visib_token = eatToken(it, .Keyword_pub);
|
||||
|
||||
if (try parseTopLevelDecl(arena, it, tree)) |node| {
|
||||
if (parseTopLevelDecl(arena, it, tree) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => {
|
||||
findNextContainerMember(it);
|
||||
continue;
|
||||
},
|
||||
}) |node| {
|
||||
if (field_state == .seen) {
|
||||
field_state = .{ .end = visib_token orelse node.firstToken() };
|
||||
}
|
||||
@ -163,10 +174,18 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
|
||||
try tree.errors.push(.{
|
||||
.ExpectedPubItem = .{ .token = it.index },
|
||||
});
|
||||
return error.ParseError;
|
||||
// ignore this pub
|
||||
continue;
|
||||
}
|
||||
|
||||
if (try parseContainerField(arena, it, tree)) |node| {
|
||||
if (parseContainerField(arena, it, tree) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => {
|
||||
// attempt to recover
|
||||
findNextContainerMember(it);
|
||||
continue;
|
||||
},
|
||||
}) |node| {
|
||||
switch (field_state) {
|
||||
.none => field_state = .seen,
|
||||
.err, .seen => {},
|
||||
@ -182,7 +201,21 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
|
||||
const field = node.cast(Node.ContainerField).?;
|
||||
field.doc_comments = doc_comments;
|
||||
try list.push(node);
|
||||
const comma = eatToken(it, .Comma) orelse break;
|
||||
const comma = eatToken(it, .Comma) orelse {
|
||||
// try to continue parsing
|
||||
const index = it.index;
|
||||
findNextContainerMember(it);
|
||||
switch (it.peek().?.id) {
|
||||
.Eof, .RBrace => break,
|
||||
else => {
|
||||
// add error and continue
|
||||
try tree.errors.push(.{
|
||||
.ExpectedToken = .{ .token = index, .expected_id = .Comma },
|
||||
});
|
||||
continue;
|
||||
},
|
||||
}
|
||||
};
|
||||
if (try parseAppendedDocComment(arena, it, tree, comma)) |appended_comment|
|
||||
field.doc_comments = appended_comment;
|
||||
continue;
|
||||
@ -194,12 +227,102 @@ fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree) !No
|
||||
.UnattachedDocComment = .{ .token = doc_comments.?.firstToken() },
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
switch (it.peek().?.id) {
|
||||
.Eof, .RBrace => break,
|
||||
else => {
|
||||
// this was likely not supposed to end yet,
|
||||
// try to find the next declaration
|
||||
const index = it.index;
|
||||
findNextContainerMember(it);
|
||||
try tree.errors.push(.{
|
||||
.ExpectedContainerMembers = .{ .token = index },
|
||||
});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
/// Attempts to find next container member by searching for certain tokens
|
||||
fn findNextContainerMember(it: *TokenIterator) void {
|
||||
var level: u32 = 0;
|
||||
while (true) {
|
||||
const tok = nextToken(it);
|
||||
switch (tok.ptr.id) {
|
||||
// any of these can start a new top level declaration
|
||||
.Keyword_test,
|
||||
.Keyword_comptime,
|
||||
.Keyword_pub,
|
||||
.Keyword_export,
|
||||
.Keyword_extern,
|
||||
.Keyword_inline,
|
||||
.Keyword_noinline,
|
||||
.Keyword_usingnamespace,
|
||||
.Keyword_threadlocal,
|
||||
.Keyword_const,
|
||||
.Keyword_var,
|
||||
.Keyword_fn,
|
||||
.Identifier,
|
||||
=> {
|
||||
if (level == 0) {
|
||||
putBackToken(it, tok.index);
|
||||
return;
|
||||
}
|
||||
},
|
||||
.Comma, .Semicolon => {
|
||||
// this decl was likely meant to end here
|
||||
if (level == 0) {
|
||||
return;
|
||||
}
|
||||
},
|
||||
.LParen, .LBracket, .LBrace => level += 1,
|
||||
.RParen, .RBracket, .RBrace => {
|
||||
if (level == 0) {
|
||||
// end of container, exit
|
||||
putBackToken(it, tok.index);
|
||||
return;
|
||||
}
|
||||
level -= 1;
|
||||
},
|
||||
.Eof => {
|
||||
putBackToken(it, tok.index);
|
||||
return;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to find the next statement by searching for a semicolon
|
||||
fn findNextStmt(it: *TokenIterator) void {
|
||||
var level: u32 = 0;
|
||||
while (true) {
|
||||
const tok = nextToken(it);
|
||||
switch (tok.ptr.id) {
|
||||
.LBrace => level += 1,
|
||||
.RBrace => {
|
||||
if (level == 0) {
|
||||
putBackToken(it, tok.index);
|
||||
return;
|
||||
}
|
||||
level -= 1;
|
||||
},
|
||||
.Semicolon => {
|
||||
if (level == 0) {
|
||||
return;
|
||||
}
|
||||
},
|
||||
.Eof => {
|
||||
putBackToken(it, tok.index);
|
||||
return;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Eat a multiline container doc comment
|
||||
fn parseContainerDocComments(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
|
||||
var lines = Node.DocComment.LineList.init(arena);
|
||||
@ -279,22 +402,30 @@ fn parseTopLevelDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
fn_node.*.extern_export_inline_token = extern_export_inline_token;
|
||||
fn_node.*.lib_name = lib_name;
|
||||
if (eatToken(it, .Semicolon)) |_| return node;
|
||||
if (try parseBlock(arena, it, tree)) |body_node| {
|
||||
if (parseBlock(arena, it, tree) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
// since parseBlock only return error.ParseError on
|
||||
// a missing '}' we can assume this function was
|
||||
// supposed to end here.
|
||||
error.ParseError => return node,
|
||||
}) |body_node| {
|
||||
fn_node.body_node = body_node;
|
||||
return node;
|
||||
}
|
||||
try tree.errors.push(.{
|
||||
.ExpectedSemiOrLBrace = .{ .token = it.index },
|
||||
});
|
||||
return null;
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
if (extern_export_inline_token) |token| {
|
||||
if (tree.tokens.at(token).id == .Keyword_inline or
|
||||
tree.tokens.at(token).id == .Keyword_noinline)
|
||||
{
|
||||
putBackToken(it, token);
|
||||
return null;
|
||||
try tree.errors.push(.{
|
||||
.ExpectedFn = .{ .token = it.index },
|
||||
});
|
||||
return error.ParseError;
|
||||
}
|
||||
}
|
||||
|
||||
@ -313,26 +444,19 @@ fn parseTopLevelDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
try tree.errors.push(.{
|
||||
.ExpectedVarDecl = .{ .token = it.index },
|
||||
});
|
||||
// ignore this and try again;
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
if (extern_export_inline_token) |token| {
|
||||
if (lib_name) |string_literal_node|
|
||||
putBackToken(it, string_literal_node.cast(Node.StringLiteral).?.token);
|
||||
putBackToken(it, token);
|
||||
return null;
|
||||
try tree.errors.push(.{
|
||||
.ExpectedVarDeclOrFn = .{ .token = it.index },
|
||||
});
|
||||
// ignore this and try again;
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
const use_node = (try parseUse(arena, it, tree)) orelse return null;
|
||||
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
|
||||
.ExpectedExpr = .{ .token = it.index },
|
||||
});
|
||||
const semicolon_token = try expectToken(it, tree, .Semicolon);
|
||||
const use_node_raw = use_node.cast(Node.Use).?;
|
||||
use_node_raw.*.expr = expr_node;
|
||||
use_node_raw.*.semicolon_token = semicolon_token;
|
||||
|
||||
return use_node;
|
||||
return try parseUse(arena, it, tree);
|
||||
}
|
||||
|
||||
/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
|
||||
@ -366,18 +490,23 @@ fn parseFnProto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
|
||||
const exclamation_token = eatToken(it, .Bang);
|
||||
|
||||
const return_type_expr = (try parseVarType(arena, it, tree)) orelse
|
||||
try expectNode(arena, it, tree, parseTypeExpr, .{
|
||||
.ExpectedReturnType = .{ .token = it.index },
|
||||
});
|
||||
(try parseTypeExpr(arena, it, tree)) orelse blk: {
|
||||
try tree.errors.push(.{
|
||||
.ExpectedReturnType = .{ .token = it.index },
|
||||
});
|
||||
// most likely the user forgot to specify the return type.
|
||||
// Mark return type as invalid and try to continue.
|
||||
break :blk null;
|
||||
};
|
||||
|
||||
const return_type: Node.FnProto.ReturnType = if (exclamation_token != null)
|
||||
.{
|
||||
.InferErrorSet = return_type_expr,
|
||||
}
|
||||
// TODO https://github.com/ziglang/zig/issues/3750
|
||||
const R = Node.FnProto.ReturnType;
|
||||
const return_type = if (return_type_expr == null)
|
||||
R{ .Invalid = rparen }
|
||||
else if (exclamation_token != null)
|
||||
R{ .InferErrorSet = return_type_expr.? }
|
||||
else
|
||||
.{
|
||||
.Explicit = return_type_expr,
|
||||
};
|
||||
R{ .Explicit = return_type_expr.? };
|
||||
|
||||
const var_args_token = if (params.len > 0)
|
||||
params.at(params.len - 1).*.cast(Node.ParamDecl).?.var_args_token
|
||||
@ -578,7 +707,12 @@ fn parseStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*No
|
||||
if (try parseLabeledStatement(arena, it, tree)) |node| return node;
|
||||
if (try parseSwitchExpr(arena, it, tree)) |node| return node;
|
||||
if (try parseAssignExpr(arena, it, tree)) |node| {
|
||||
_ = try expectToken(it, tree, .Semicolon);
|
||||
_ = eatToken(it, .Semicolon) orelse {
|
||||
try tree.errors.push(.{
|
||||
.ExpectedToken = .{ .token = it.index, .expected_id = .Semicolon },
|
||||
});
|
||||
// pretend we saw a semicolon and continue parsing
|
||||
};
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -687,8 +821,13 @@ fn parseLoopStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Nod
|
||||
node.cast(Node.While).?.inline_token = inline_token;
|
||||
return node;
|
||||
}
|
||||
if (inline_token == null) return null;
|
||||
|
||||
return null;
|
||||
// If we've seen "inline", there should have been a "for" or "while"
|
||||
try tree.errors.push(.{
|
||||
.ExpectedInlinable = .{ .token = it.index },
|
||||
});
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
/// ForStatement
|
||||
@ -817,7 +956,12 @@ fn parseWhileStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*No
|
||||
fn parseBlockExprStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
|
||||
if (try parseBlockExpr(arena, it, tree)) |node| return node;
|
||||
if (try parseAssignExpr(arena, it, tree)) |node| {
|
||||
_ = try expectToken(it, tree, .Semicolon);
|
||||
_ = eatToken(it, .Semicolon) orelse {
|
||||
try tree.errors.push(.{
|
||||
.ExpectedToken = .{ .token = it.index, .expected_id = .Semicolon },
|
||||
});
|
||||
// pretend we saw a semicolon and continue parsing
|
||||
};
|
||||
return node;
|
||||
}
|
||||
return null;
|
||||
@ -924,7 +1068,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
const node = try arena.create(Node.ControlFlowExpression);
|
||||
node.* = .{
|
||||
.ltoken = token,
|
||||
.kind = Node.ControlFlowExpression.Kind{ .Break = label },
|
||||
.kind = .{ .Break = label },
|
||||
.rhs = expr_node,
|
||||
};
|
||||
return &node.base;
|
||||
@ -960,7 +1104,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
const node = try arena.create(Node.ControlFlowExpression);
|
||||
node.* = .{
|
||||
.ltoken = token,
|
||||
.kind = Node.ControlFlowExpression.Kind{ .Continue = label },
|
||||
.kind = .{ .Continue = label },
|
||||
.rhs = null,
|
||||
};
|
||||
return &node.base;
|
||||
@ -984,7 +1128,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
const node = try arena.create(Node.ControlFlowExpression);
|
||||
node.* = .{
|
||||
.ltoken = token,
|
||||
.kind = Node.ControlFlowExpression.Kind.Return,
|
||||
.kind = .Return,
|
||||
.rhs = expr_node,
|
||||
};
|
||||
return &node.base;
|
||||
@ -1022,7 +1166,14 @@ fn parseBlock(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
|
||||
|
||||
var statements = Node.Block.StatementList.init(arena);
|
||||
while (true) {
|
||||
const statement = (try parseStatement(arena, it, tree)) orelse break;
|
||||
const statement = (parseStatement(arena, it, tree) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.ParseError => {
|
||||
// try to skip to the next statement
|
||||
findNextStmt(it);
|
||||
continue;
|
||||
},
|
||||
}) orelse break;
|
||||
try statements.push(statement);
|
||||
}
|
||||
|
||||
@ -1222,7 +1373,8 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
|
||||
try tree.errors.push(.{
|
||||
.ExpectedParamList = .{ .token = it.index },
|
||||
});
|
||||
return null;
|
||||
// ignore this, continue parsing
|
||||
return res;
|
||||
};
|
||||
const node = try arena.create(Node.SuffixOp);
|
||||
node.* = .{
|
||||
@ -1287,7 +1439,6 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
|
||||
/// / IfTypeExpr
|
||||
/// / INTEGER
|
||||
/// / KEYWORD_comptime TypeExpr
|
||||
/// / KEYWORD_nosuspend TypeExpr
|
||||
/// / KEYWORD_error DOT IDENTIFIER
|
||||
/// / KEYWORD_false
|
||||
/// / KEYWORD_null
|
||||
@ -1326,15 +1477,6 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
|
||||
};
|
||||
return &node.base;
|
||||
}
|
||||
if (eatToken(it, .Keyword_nosuspend)) |token| {
|
||||
const expr = (try parseTypeExpr(arena, it, tree)) orelse return null;
|
||||
const node = try arena.create(Node.Nosuspend);
|
||||
node.* = .{
|
||||
.nosuspend_token = token,
|
||||
.expr = expr,
|
||||
};
|
||||
return &node.base;
|
||||
}
|
||||
if (eatToken(it, .Keyword_error)) |token| {
|
||||
const period = try expectToken(it, tree, .Period);
|
||||
const identifier = try expectNode(arena, it, tree, parseIdentifier, .{
|
||||
@ -2271,7 +2413,7 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
const node = try arena.create(Node.AnyFrameType);
|
||||
node.* = .{
|
||||
.anyframe_token = token,
|
||||
.result = Node.AnyFrameType.Result{
|
||||
.result = .{
|
||||
.arrow_token = arrow,
|
||||
.return_type = undefined, // set by caller
|
||||
},
|
||||
@ -2312,6 +2454,13 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
} else null;
|
||||
_ = try expectToken(it, tree, .RParen);
|
||||
|
||||
if (ptr_info.align_info != null) {
|
||||
try tree.errors.push(.{
|
||||
.ExtraAlignQualifier = .{ .token = it.index - 1 },
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
ptr_info.align_info = Node.PrefixOp.PtrInfo.Align{
|
||||
.node = expr_node,
|
||||
.bit_range = bit_range,
|
||||
@ -2320,14 +2469,32 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
continue;
|
||||
}
|
||||
if (eatToken(it, .Keyword_const)) |const_token| {
|
||||
if (ptr_info.const_token != null) {
|
||||
try tree.errors.push(.{
|
||||
.ExtraConstQualifier = .{ .token = it.index - 1 },
|
||||
});
|
||||
continue;
|
||||
}
|
||||
ptr_info.const_token = const_token;
|
||||
continue;
|
||||
}
|
||||
if (eatToken(it, .Keyword_volatile)) |volatile_token| {
|
||||
if (ptr_info.volatile_token != null) {
|
||||
try tree.errors.push(.{
|
||||
.ExtraVolatileQualifier = .{ .token = it.index - 1 },
|
||||
});
|
||||
continue;
|
||||
}
|
||||
ptr_info.volatile_token = volatile_token;
|
||||
continue;
|
||||
}
|
||||
if (eatToken(it, .Keyword_allowzero)) |allowzero_token| {
|
||||
if (ptr_info.allowzero_token != null) {
|
||||
try tree.errors.push(.{
|
||||
.ExtraAllowZeroQualifier = .{ .token = it.index - 1 },
|
||||
});
|
||||
continue;
|
||||
}
|
||||
ptr_info.allowzero_token = allowzero_token;
|
||||
continue;
|
||||
}
|
||||
@ -2346,9 +2513,9 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
if (try parseByteAlign(arena, it, tree)) |align_expr| {
|
||||
if (slice_type.align_info != null) {
|
||||
try tree.errors.push(.{
|
||||
.ExtraAlignQualifier = .{ .token = it.index },
|
||||
.ExtraAlignQualifier = .{ .token = it.index - 1 },
|
||||
});
|
||||
return error.ParseError;
|
||||
continue;
|
||||
}
|
||||
slice_type.align_info = Node.PrefixOp.PtrInfo.Align{
|
||||
.node = align_expr,
|
||||
@ -2359,9 +2526,9 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
if (eatToken(it, .Keyword_const)) |const_token| {
|
||||
if (slice_type.const_token != null) {
|
||||
try tree.errors.push(.{
|
||||
.ExtraConstQualifier = .{ .token = it.index },
|
||||
.ExtraConstQualifier = .{ .token = it.index - 1 },
|
||||
});
|
||||
return error.ParseError;
|
||||
continue;
|
||||
}
|
||||
slice_type.const_token = const_token;
|
||||
continue;
|
||||
@ -2369,9 +2536,9 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
if (eatToken(it, .Keyword_volatile)) |volatile_token| {
|
||||
if (slice_type.volatile_token != null) {
|
||||
try tree.errors.push(.{
|
||||
.ExtraVolatileQualifier = .{ .token = it.index },
|
||||
.ExtraVolatileQualifier = .{ .token = it.index - 1 },
|
||||
});
|
||||
return error.ParseError;
|
||||
continue;
|
||||
}
|
||||
slice_type.volatile_token = volatile_token;
|
||||
continue;
|
||||
@ -2379,9 +2546,9 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
if (eatToken(it, .Keyword_allowzero)) |allowzero_token| {
|
||||
if (slice_type.allowzero_token != null) {
|
||||
try tree.errors.push(.{
|
||||
.ExtraAllowZeroQualifier = .{ .token = it.index },
|
||||
.ExtraAllowZeroQualifier = .{ .token = it.index - 1 },
|
||||
});
|
||||
return error.ParseError;
|
||||
continue;
|
||||
}
|
||||
slice_type.allowzero_token = allowzero_token;
|
||||
continue;
|
||||
@ -2730,7 +2897,19 @@ fn ListParseFn(comptime L: type, comptime nodeParseFn: var) ParseFn(L) {
|
||||
var list = L.init(arena);
|
||||
while (try nodeParseFn(arena, it, tree)) |node| {
|
||||
try list.push(node);
|
||||
if (eatToken(it, .Comma) == null) break;
|
||||
|
||||
switch (it.peek().?.id) {
|
||||
.Comma => _ = nextToken(it),
|
||||
// all possible delimiters
|
||||
.Colon, .RParen, .RBrace, .RBracket => break,
|
||||
else => {
|
||||
// this is likely just a missing comma,
|
||||
// continue parsing this list and give an error
|
||||
try tree.errors.push(.{
|
||||
.ExpectedToken = .{ .token = it.index, .expected_id = .Comma },
|
||||
});
|
||||
},
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
@ -2740,7 +2919,17 @@ fn ListParseFn(comptime L: type, comptime nodeParseFn: var) ParseFn(L) {
|
||||
fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.InfixOp.Op) NodeParseFn {
|
||||
return struct {
|
||||
pub fn parse(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
|
||||
const op_token = eatToken(it, token) orelse return null;
|
||||
const op_token = if (token == .Keyword_and) switch (it.peek().?.id) {
|
||||
.Keyword_and => nextToken(it).index,
|
||||
.Invalid_ampersands => blk: {
|
||||
try tree.errors.push(.{
|
||||
.InvalidAnd = .{ .token = it.index },
|
||||
});
|
||||
break :blk nextToken(it).index;
|
||||
},
|
||||
else => return null,
|
||||
} else eatToken(it, token) orelse return null;
|
||||
|
||||
const node = try arena.create(Node.InfixOp);
|
||||
node.* = .{
|
||||
.op_token = op_token,
|
||||
@ -2761,7 +2950,13 @@ fn parseBuiltinCall(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
|
||||
try tree.errors.push(.{
|
||||
.ExpectedParamList = .{ .token = it.index },
|
||||
});
|
||||
return error.ParseError;
|
||||
|
||||
// lets pretend this was an identifier so we can continue parsing
|
||||
const node = try arena.create(Node.Identifier);
|
||||
node.* = .{
|
||||
.token = token,
|
||||
};
|
||||
return &node.base;
|
||||
};
|
||||
const node = try arena.create(Node.BuiltinCall);
|
||||
node.* = .{
|
||||
@ -2877,8 +3072,10 @@ fn parseUse(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
|
||||
.doc_comments = null,
|
||||
.visib_token = null,
|
||||
.use_token = token,
|
||||
.expr = undefined, // set by caller
|
||||
.semicolon_token = undefined, // set by caller
|
||||
.expr = try expectNode(arena, it, tree, parseExpr, .{
|
||||
.ExpectedExpr = .{ .token = it.index },
|
||||
}),
|
||||
.semicolon_token = try expectToken(it, tree, .Semicolon),
|
||||
};
|
||||
return &node.base;
|
||||
}
|
||||
@ -3058,6 +3255,8 @@ fn expectToken(it: *TokenIterator, tree: *Tree, id: Token.Id) Error!TokenIndex {
|
||||
try tree.errors.push(.{
|
||||
.ExpectedToken = .{ .token = token.index, .expected_id = id },
|
||||
});
|
||||
// go back so that we can recover properly
|
||||
putBackToken(it, token.index);
|
||||
return error.ParseError;
|
||||
}
|
||||
return token.index;
|
||||
|
||||
@ -1,3 +1,153 @@
|
||||
test "recovery: top level" {
|
||||
try testError(
|
||||
\\test "" {inline}
|
||||
\\test "" {inline}
|
||||
, &[_]Error{
|
||||
.ExpectedInlinable,
|
||||
.ExpectedInlinable,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: block statements" {
|
||||
try testError(
|
||||
\\test "" {
|
||||
\\ foo + +;
|
||||
\\ inline;
|
||||
\\}
|
||||
, &[_]Error{
|
||||
.InvalidToken,
|
||||
.ExpectedInlinable,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: missing comma" {
|
||||
try testError(
|
||||
\\test "" {
|
||||
\\ switch (foo) {
|
||||
\\ 2 => {}
|
||||
\\ 3 => {}
|
||||
\\ else => {
|
||||
\\ foo && bar +;
|
||||
\\ }
|
||||
\\ }
|
||||
\\}
|
||||
, &[_]Error{
|
||||
.ExpectedToken,
|
||||
.ExpectedToken,
|
||||
.InvalidAnd,
|
||||
.InvalidToken,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: extra qualifier" {
|
||||
try testError(
|
||||
\\const a: *const const u8;
|
||||
\\test ""
|
||||
, &[_]Error{
|
||||
.ExtraConstQualifier,
|
||||
.ExpectedLBrace,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: missing return type" {
|
||||
try testError(
|
||||
\\fn foo() {
|
||||
\\ a && b;
|
||||
\\}
|
||||
\\test ""
|
||||
, &[_]Error{
|
||||
.ExpectedReturnType,
|
||||
.InvalidAnd,
|
||||
.ExpectedLBrace,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: continue after invalid decl" {
|
||||
try testError(
|
||||
\\fn foo {
|
||||
\\ inline;
|
||||
\\}
|
||||
\\pub test "" {
|
||||
\\ async a && b;
|
||||
\\}
|
||||
, &[_]Error{
|
||||
.ExpectedToken,
|
||||
.ExpectedPubItem,
|
||||
.ExpectedParamList,
|
||||
.InvalidAnd,
|
||||
});
|
||||
try testError(
|
||||
\\threadlocal test "" {
|
||||
\\ @a && b;
|
||||
\\}
|
||||
, &[_]Error{
|
||||
.ExpectedVarDecl,
|
||||
.ExpectedParamList,
|
||||
.InvalidAnd,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: invalid extern/inline" {
|
||||
try testError(
|
||||
\\inline test "" { a && b; }
|
||||
, &[_]Error{
|
||||
.ExpectedFn,
|
||||
.InvalidAnd,
|
||||
});
|
||||
try testError(
|
||||
\\extern "" test "" { a && b; }
|
||||
, &[_]Error{
|
||||
.ExpectedVarDeclOrFn,
|
||||
.InvalidAnd,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: missing semicolon" {
|
||||
try testError(
|
||||
\\test "" {
|
||||
\\ comptime a && b
|
||||
\\ c && d
|
||||
\\ @foo
|
||||
\\}
|
||||
, &[_]Error{
|
||||
.InvalidAnd,
|
||||
.ExpectedToken,
|
||||
.InvalidAnd,
|
||||
.ExpectedToken,
|
||||
.ExpectedParamList,
|
||||
.ExpectedToken,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: invalid container members" {
|
||||
try testError(
|
||||
\\usingnamespace;
|
||||
\\foo+
|
||||
\\bar@,
|
||||
\\while (a == 2) { test "" {}}
|
||||
\\test "" {
|
||||
\\ a && b
|
||||
\\}
|
||||
, &[_]Error{
|
||||
.ExpectedExpr,
|
||||
.ExpectedToken,
|
||||
.ExpectedToken,
|
||||
.ExpectedContainerMembers,
|
||||
.InvalidAnd,
|
||||
.ExpectedToken,
|
||||
});
|
||||
}
|
||||
|
||||
test "recovery: invalid parameter" {
|
||||
try testError(
|
||||
\\fn main() void {
|
||||
\\ a(comptime T: type)
|
||||
\\}
|
||||
, &[_]Error{
|
||||
.ExpectedToken,
|
||||
});
|
||||
}
|
||||
|
||||
test "zig fmt: top-level fields" {
|
||||
try testCanonical(
|
||||
\\a: did_you_know,
|
||||
@ -19,7 +169,9 @@ test "zig fmt: decl between fields" {
|
||||
\\ const baz1 = 2;
|
||||
\\ b: usize,
|
||||
\\};
|
||||
);
|
||||
, &[_]Error{
|
||||
.DeclBetweenFields,
|
||||
});
|
||||
}
|
||||
|
||||
test "zig fmt: errdefer with payload" {
|
||||
@ -2828,7 +2980,10 @@ test "zig fmt: extern without container keyword returns error" {
|
||||
try testError(
|
||||
\\const container = extern {};
|
||||
\\
|
||||
);
|
||||
, &[_]Error{
|
||||
.ExpectedExpr,
|
||||
.ExpectedVarDeclOrFn,
|
||||
});
|
||||
}
|
||||
|
||||
test "zig fmt: integer literals with underscore separators" {
|
||||
@ -3030,9 +3185,17 @@ fn testTransform(source: []const u8, expected_source: []const u8) !void {
|
||||
fn testCanonical(source: []const u8) !void {
|
||||
return testTransform(source, source);
|
||||
}
|
||||
fn testError(source: []const u8) !void {
|
||||
|
||||
const Error = @TagType(std.zig.ast.Error);
|
||||
|
||||
fn testError(source: []const u8, expected_errors: []const Error) !void {
|
||||
const tree = try std.zig.parse(std.testing.allocator, source);
|
||||
defer tree.deinit();
|
||||
|
||||
std.testing.expect(tree.errors.len != 0);
|
||||
std.testing.expect(tree.errors.len == expected_errors.len);
|
||||
for (expected_errors) |expected, i| {
|
||||
const err = tree.errors.at(i);
|
||||
|
||||
std.testing.expect(expected == err.*);
|
||||
}
|
||||
}
|
||||
|
||||
@ -13,6 +13,9 @@ pub const Error = error{
|
||||
|
||||
/// Returns whether anything changed
|
||||
pub fn render(allocator: *mem.Allocator, stream: var, tree: *ast.Tree) (@TypeOf(stream).Error || Error)!bool {
|
||||
// cannot render an invalid tree
|
||||
std.debug.assert(tree.errors.len == 0);
|
||||
|
||||
// make a passthrough stream that checks whether something changed
|
||||
const MyStream = struct {
|
||||
const MyStream = @This();
|
||||
@ -1444,6 +1447,7 @@ fn renderExpression(
|
||||
else switch (fn_proto.return_type) {
|
||||
.Explicit => |node| node.firstToken(),
|
||||
.InferErrorSet => |node| tree.prevToken(node.firstToken()),
|
||||
.Invalid => unreachable,
|
||||
});
|
||||
assert(tree.tokens.at(rparen).id == .RParen);
|
||||
|
||||
@ -1518,13 +1522,14 @@ fn renderExpression(
|
||||
}
|
||||
|
||||
switch (fn_proto.return_type) {
|
||||
ast.Node.FnProto.ReturnType.Explicit => |node| {
|
||||
.Explicit => |node| {
|
||||
return renderExpression(allocator, stream, tree, indent, start_col, node, space);
|
||||
},
|
||||
ast.Node.FnProto.ReturnType.InferErrorSet => |node| {
|
||||
.InferErrorSet => |node| {
|
||||
try renderToken(tree, stream, tree.prevToken(node.firstToken()), indent, start_col, Space.None); // !
|
||||
return renderExpression(allocator, stream, tree, indent, start_col, node, space);
|
||||
},
|
||||
.Invalid => unreachable,
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -668,7 +668,7 @@ fn transTypeDefAsBuiltin(c: *Context, typedef_decl: *const ZigClangTypedefNameDe
|
||||
return transCreateNodeIdentifier(c, builtin_name);
|
||||
}
|
||||
|
||||
fn checkForBuiltinTypedef(checked_name: []const u8) !?[]const u8 {
|
||||
fn checkForBuiltinTypedef(checked_name: []const u8) ?[]const u8 {
|
||||
const table = [_][2][]const u8{
|
||||
.{ "uint8_t", "u8" },
|
||||
.{ "int8_t", "i8" },
|
||||
@ -703,7 +703,7 @@ fn transTypeDef(c: *Context, typedef_decl: *const ZigClangTypedefNameDecl, top_l
|
||||
// TODO https://github.com/ziglang/zig/issues/3756
|
||||
// TODO https://github.com/ziglang/zig/issues/1802
|
||||
const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.a(), "{}_{}", .{ typedef_name, c.getMangle() }) else typedef_name;
|
||||
if (try checkForBuiltinTypedef(checked_name)) |builtin| {
|
||||
if (checkForBuiltinTypedef(checked_name)) |builtin| {
|
||||
return transTypeDefAsBuiltin(c, typedef_decl, builtin);
|
||||
}
|
||||
|
||||
@ -1411,7 +1411,7 @@ fn transDeclStmt(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangDeclStmt)
|
||||
const underlying_type = ZigClangQualType_getTypePtr(underlying_qual);
|
||||
|
||||
const mangled_name = try block_scope.makeMangledName(c, name);
|
||||
if (try checkForBuiltinTypedef(name)) |builtin| {
|
||||
if (checkForBuiltinTypedef(name)) |builtin| {
|
||||
try block_scope.variables.push(.{
|
||||
.alias = builtin,
|
||||
.name = mangled_name,
|
||||
|
||||
@ -1609,7 +1609,6 @@ static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
|
||||
// / IfTypeExpr
|
||||
// / INTEGER
|
||||
// / KEYWORD_comptime TypeExpr
|
||||
// / KEYWORD_nosuspend TypeExpr
|
||||
// / KEYWORD_error DOT IDENTIFIER
|
||||
// / KEYWORD_false
|
||||
// / KEYWORD_null
|
||||
@ -1711,14 +1710,6 @@ static AstNode *ast_parse_primary_type_expr(ParseContext *pc) {
|
||||
return res;
|
||||
}
|
||||
|
||||
Token *nosuspend = eat_token_if(pc, TokenIdKeywordNoSuspend);
|
||||
if (nosuspend != nullptr) {
|
||||
AstNode *expr = ast_expect(pc, ast_parse_type_expr);
|
||||
AstNode *res = ast_create_node(pc, NodeTypeNoSuspend, nosuspend);
|
||||
res->data.nosuspend_expr.expr = expr;
|
||||
return res;
|
||||
}
|
||||
|
||||
Token *error = eat_token_if(pc, TokenIdKeywordError);
|
||||
if (error != nullptr) {
|
||||
Token *dot = expect_token(pc, TokenIdDot);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user