update parsers to new noasync syntax

This commit is contained in:
Vexu 2020-03-09 11:02:16 +02:00
parent e2fd289a33
commit 6f8d732599
No known key found for this signature in database
GPG Key ID: 59AEB8936E16A6AC
6 changed files with 108 additions and 43 deletions

View File

@ -431,6 +431,7 @@ pub const Node = struct {
ContainerDecl,
Asm,
Comptime,
Noasync,
Block,
// Misc
@ -1078,6 +1079,30 @@ pub const Node = struct {
}
};
pub const Noasync = struct {
base: Node = Node{ .id = .Noasync },
doc_comments: ?*DocComment,
noasync_token: TokenIndex,
expr: *Node,
pub fn iterate(self: *Noasync, index: usize) ?*Node {
var i = index;
if (i < 1) return self.expr;
i -= 1;
return null;
}
pub fn firstToken(self: *const Noasync) TokenIndex {
return self.noasync_token;
}
pub fn lastToken(self: *const Noasync) TokenIndex {
return self.expr.lastToken();
}
};
pub const Payload = struct {
base: Node = Node{ .id = .Payload },
lpipe: TokenIndex,
@ -1560,9 +1585,7 @@ pub const Node = struct {
pub const Op = union(enum) {
AddressOf,
ArrayType: ArrayInfo,
Await: struct {
noasync_token: ?TokenIndex = null,
},
Await,
BitNot,
BoolNot,
Cancel,

View File

@ -856,6 +856,7 @@ fn parsePrefixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
/// / IfExpr
/// / KEYWORD_break BreakLabel? Expr?
/// / KEYWORD_comptime Expr
/// / KEYWORD_noasync Expr
/// / KEYWORD_continue BreakLabel?
/// / KEYWORD_resume Expr
/// / KEYWORD_return Expr?
@ -870,7 +871,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
const label = try parseBreakLabel(arena, it, tree);
const expr_node = try parseExpr(arena, it, tree);
const node = try arena.create(Node.ControlFlowExpression);
node.* = Node.ControlFlowExpression{
node.* = .{
.ltoken = token,
.kind = Node.ControlFlowExpression.Kind{ .Break = label },
.rhs = expr_node,
@ -883,7 +884,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
.ExpectedExpr = AstError.ExpectedExpr{ .token = it.index },
});
const node = try arena.create(Node.Comptime);
node.* = Node.Comptime{
node.* = .{
.doc_comments = null,
.comptime_token = token,
.expr = expr_node,
@ -891,10 +892,23 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
return &node.base;
}
if (eatToken(it, .Keyword_noasync)) |token| {
const expr_node = try expectNode(arena, it, tree, parseExpr, AstError{
.ExpectedExpr = AstError.ExpectedExpr{ .token = it.index },
});
const node = try arena.create(Node.Noasync);
node.* = .{
.doc_comments = null,
.noasync_token = token,
.expr = expr_node,
};
return &node.base;
}
if (eatToken(it, .Keyword_continue)) |token| {
const label = try parseBreakLabel(arena, it, tree);
const node = try arena.create(Node.ControlFlowExpression);
node.* = Node.ControlFlowExpression{
node.* = .{
.ltoken = token,
.kind = Node.ControlFlowExpression.Kind{ .Continue = label },
.rhs = null,
@ -907,7 +921,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
.ExpectedExpr = AstError.ExpectedExpr{ .token = it.index },
});
const node = try arena.create(Node.PrefixOp);
node.* = Node.PrefixOp{
node.* = .{
.op_token = token,
.op = Node.PrefixOp.Op.Resume,
.rhs = expr_node,
@ -918,7 +932,7 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
if (eatToken(it, .Keyword_return)) |token| {
const expr_node = try parseExpr(arena, it, tree);
const node = try arena.create(Node.ControlFlowExpression);
node.* = Node.ControlFlowExpression{
node.* = .{
.ltoken = token,
.kind = Node.ControlFlowExpression.Kind.Return,
.rhs = expr_node,
@ -1126,19 +1140,18 @@ fn parseErrorUnionExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*No
/// SuffixExpr
/// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
/// / KEYWORD_noasync PrimaryTypeExpr SuffixOp* FnCallArguments
/// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const maybe_async = eatAnnotatedToken(it, .Keyword_async) orelse eatAnnotatedToken(it, .Keyword_noasync);
const maybe_async = eatToken(it, .Keyword_async);
if (maybe_async) |async_token| {
const token_fn = eatToken(it, .Keyword_fn);
if (async_token.ptr.id == .Keyword_async and token_fn != null) {
if (token_fn != null) {
// HACK: If we see the keyword `fn`, then we assume that
// we are parsing an async fn proto, and not a call.
// We therefore put back all tokens consumed by the async
// prefix...
putBackToken(it, token_fn.?);
putBackToken(it, async_token.index);
putBackToken(it, async_token);
return parsePrimaryTypeExpr(arena, it, tree);
}
// TODO: Implement hack for parsing `async fn ...` in ast_parse_suffix_expr
@ -1167,7 +1180,7 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
.op = Node.SuffixOp.Op{
.Call = Node.SuffixOp.Op.Call{
.params = params.list,
.async_token = async_token.index,
.async_token = async_token,
},
},
.rtoken = params.rparen,
@ -1224,6 +1237,7 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
/// / IfTypeExpr
/// / INTEGER
/// / KEYWORD_comptime TypeExpr
/// / KEYWORD_noasync TypeExpr
/// / KEYWORD_error DOT IDENTIFIER
/// / KEYWORD_false
/// / KEYWORD_null
@ -1255,13 +1269,23 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
if (eatToken(it, .Keyword_comptime)) |token| {
const expr = (try parseTypeExpr(arena, it, tree)) orelse return null;
const node = try arena.create(Node.Comptime);
node.* = Node.Comptime{
node.* = .{
.doc_comments = null,
.comptime_token = token,
.expr = expr,
};
return &node.base;
}
if (eatToken(it, .Keyword_noasync)) |token| {
const expr = (try parseTypeExpr(arena, it, tree)) orelse return null;
const node = try arena.create(Node.Noasync);
node.* = .{
.doc_comments = null,
.noasync_token = token,
.expr = expr,
};
return &node.base;
}
if (eatToken(it, .Keyword_error)) |token| {
const period = try expectToken(it, tree, .Period);
const identifier = try expectNode(arena, it, tree, parseIdentifier, AstError{
@ -1269,7 +1293,7 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
});
const global_error_set = try createLiteral(arena, Node.ErrorType, token);
const node = try arena.create(Node.InfixOp);
node.* = Node.InfixOp{
node.* = .{
.op_token = period,
.lhs = global_error_set,
.op = Node.InfixOp.Op.Period,
@ -1281,7 +1305,7 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
if (eatToken(it, .Keyword_null)) |token| return createLiteral(arena, Node.NullLiteral, token);
if (eatToken(it, .Keyword_anyframe)) |token| {
const node = try arena.create(Node.AnyFrameType);
node.* = Node.AnyFrameType{
node.* = .{
.anyframe_token = token,
.result = null,
};
@ -2180,18 +2204,6 @@ fn parsePrefixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
.Ampersand => ops{ .AddressOf = {} },
.Keyword_try => ops{ .Try = {} },
.Keyword_await => ops{ .Await = .{} },
.Keyword_noasync => if (eatToken(it, .Keyword_await)) |await_tok| {
const node = try arena.create(Node.PrefixOp);
node.* = Node.PrefixOp{
.op_token = await_tok,
.op = .{ .Await = .{ .noasync_token = token.index } },
.rhs = undefined, // set by caller
};
return &node.base;
} else {
putBackToken(it, token.index);
return null;
},
else => {
putBackToken(it, token.index);
return null;

View File

@ -390,6 +390,12 @@ fn renderExpression(
try renderToken(tree, stream, comptime_node.comptime_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, comptime_node.expr, space);
},
.Noasync => {
const noasync_node = @fieldParentPtr(ast.Node.Noasync, "base", base);
try renderToken(tree, stream, noasync_node.noasync_token, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, noasync_node.expr, space);
},
.Suspend => {
const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
@ -590,9 +596,6 @@ fn renderExpression(
},
.Await => |await_info| {
if (await_info.noasync_token) |tok| {
try renderToken(tree, stream, tok, indent, start_col, Space.Space);
}
try renderToken(tree, stream, prefix_op_node.op_token, indent, start_col, Space.Space);
},
}

View File

@ -651,6 +651,7 @@ enum NodeType {
NodeTypeSwitchProng,
NodeTypeSwitchRange,
NodeTypeCompTime,
NodeTypeNoAsync,
NodeTypeBreak,
NodeTypeContinue,
NodeTypeAsmExpr,
@ -991,6 +992,10 @@ struct AstNodeCompTime {
AstNode *expr;
};
struct AstNodeNoAsync {
AstNode *expr;
};
struct AsmOutput {
Buf *asm_symbolic_name;
Buf *constraint;
@ -1148,7 +1153,6 @@ struct AstNodeErrorType {
};
struct AstNodeAwaitExpr {
Token *noasync_token;
AstNode *expr;
};
@ -1199,6 +1203,7 @@ struct AstNode {
AstNodeSwitchProng switch_prong;
AstNodeSwitchRange switch_range;
AstNodeCompTime comptime_expr;
AstNodeNoAsync noasync_expr;
AstNodeAsmExpr asm_expr;
AstNodeFieldAccessExpr field_access_expr;
AstNodePtrDerefExpr ptr_deref_expr;

View File

@ -220,6 +220,8 @@ static const char *node_type_str(NodeType node_type) {
return "SwitchRange";
case NodeTypeCompTime:
return "CompTime";
case NodeTypeNoAsync:
return "NoAsync";
case NodeTypeBreak:
return "Break";
case NodeTypeContinue:
@ -1091,6 +1093,12 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
render_node_grouped(ar, node->data.comptime_expr.expr);
break;
}
case NodeTypeNoAsync:
{
fprintf(ar->f, "noasync ");
render_node_grouped(ar, node->data.noasync_expr.expr);
break;
}
case NodeTypeForExpr:
{
if (node->data.for_expr.name != nullptr) {

View File

@ -1237,6 +1237,7 @@ static AstNode *ast_parse_prefix_expr(ParseContext *pc) {
// / IfExpr
// / KEYWORD_break BreakLabel? Expr?
// / KEYWORD_comptime Expr
// / KEYWORD_noasync Expr
// / KEYWORD_continue BreakLabel?
// / KEYWORD_resume Expr
// / KEYWORD_return Expr?
@ -1271,6 +1272,14 @@ static AstNode *ast_parse_primary_expr(ParseContext *pc) {
return res;
}
Token *noasync = eat_token_if(pc, TokenIdKeywordNoAsync);
if (noasync != nullptr) {
AstNode *expr = ast_expect(pc, ast_parse_expr);
AstNode *res = ast_create_node(pc, NodeTypeNoAsync, noasync);
res->data.noasync_expr.expr = expr;
return res;
}
Token *continue_token = eat_token_if(pc, TokenIdKeywordContinue);
if (continue_token != nullptr) {
Token *label = ast_parse_break_label(pc);
@ -1459,13 +1468,11 @@ static AstNode *ast_parse_error_union_expr(ParseContext *pc) {
// SuffixExpr
// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
// / KEYWORD_noasync PrimaryTypeExpr SuffixOp* FnCallArguments
// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
Token *async_token = eat_token(pc);
bool is_async = async_token->id == TokenIdKeywordAsync;
if (is_async || async_token->id == TokenIdKeywordNoAsync) {
if (is_async && eat_token_if(pc, TokenIdKeywordFn) != nullptr) {
Token *async_token = eat_token_if(pc, TokenIdKeywordAsync);
if (async_token) {
if (eat_token_if(pc, TokenIdKeywordFn) != nullptr) {
// HACK: If we see the keyword `fn`, then we assume that
// we are parsing an async fn proto, and not a call.
// We therefore put back all tokens consumed by the async
@ -1515,13 +1522,12 @@ static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
assert(args->type == NodeTypeFnCallExpr);
AstNode *res = ast_create_node(pc, NodeTypeFnCallExpr, async_token);
res->data.fn_call_expr.modifier = is_async ? CallModifierAsync : CallModifierNoAsync;
res->data.fn_call_expr.modifier = CallModifierAsync;
res->data.fn_call_expr.seen = false;
res->data.fn_call_expr.fn_ref_expr = child;
res->data.fn_call_expr.params = args->data.fn_call_expr.params;
return res;
}
put_back_token(pc);
AstNode *res = ast_parse_primary_type_expr(pc);
if (res == nullptr)
@ -1582,6 +1588,7 @@ static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
// / IfTypeExpr
// / INTEGER
// / KEYWORD_comptime TypeExpr
// / KEYWORD_noasync TypeExpr
// / KEYWORD_error DOT IDENTIFIER
// / KEYWORD_false
// / KEYWORD_null
@ -1683,6 +1690,14 @@ static AstNode *ast_parse_primary_type_expr(ParseContext *pc) {
return res;
}
Token *noasync = eat_token_if(pc, TokenIdKeywordNoAsync);
if (noasync != nullptr) {
AstNode *expr = ast_expect(pc, ast_parse_type_expr);
AstNode *res = ast_create_node(pc, NodeTypeNoAsync, noasync);
res->data.noasync_expr.expr = expr;
return res;
}
Token *error = eat_token_if(pc, TokenIdKeywordError);
if (error != nullptr) {
Token *dot = expect_token(pc, TokenIdDot);
@ -2599,14 +2614,10 @@ static AstNode *ast_parse_prefix_op(ParseContext *pc) {
return res;
}
Token *noasync_token = eat_token_if(pc, TokenIdKeywordNoAsync);
Token *await = eat_token_if(pc, TokenIdKeywordAwait);
if (await != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeAwaitExpr, await);
res->data.await_expr.noasync_token = noasync_token;
return res;
} else if (noasync_token != nullptr) {
put_back_token(pc);
}
return nullptr;
@ -3125,6 +3136,9 @@ void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *cont
case NodeTypeCompTime:
visit_field(&node->data.comptime_expr.expr, visit, context);
break;
case NodeTypeNoAsync:
visit_field(&node->data.comptime_expr.expr, visit, context);
break;
case NodeTypeBreak:
// none
break;