std-c ifstmt compoundstmt and errors

This commit is contained in:
Vexu 2020-01-04 22:27:05 +02:00
parent a20c0b31de
commit dccf1247b2
No known key found for this signature in database
GPG Key ID: 59AEB8936E16A6AC
3 changed files with 211 additions and 10 deletions

View File

@ -26,21 +26,43 @@ pub const Tree = struct {
};
pub const Error = union(enum) {
InvalidToken: InvalidToken,
InvalidToken: SingleTokenError("Invalid token '{}'"),
ExpectedToken: ExpectedToken,
ExpectedExpr: SingleTokenError("Expected expression, found '{}'"),
ExpectedStmt: SingleTokenError("Expected statement, found '{}'"),
pub fn render(self: *const Error, tokens: *Tree.TokenList, stream: var) !void {
switch (self.*) {
.InvalidToken => |*x| return x.render(tokens, stream),
.ExpectedToken => |*x| return x.render(tokens, stream),
.ExpectedExpr => |*x| return x.render(tokens, stream),
.ExpectedStmt => |*x| return x.render(tokens, stream),
}
}
pub fn loc(self: *const Error) TokenIndex {
switch (self.*) {
.InvalidToken => |x| return x.token,
.ExpectedToken => |x| return x.token,
.ExpectedExpr => |x| return x.token,
.ExpectedStmt => |x| return x.token,
}
}
pub const InvalidToken = SingleTokenError("Invalid token '{}'");
pub const ExpectedToken = struct {
token: TokenIndex,
expected_id: @TagType(Token.Id),
pub fn render(self: *const ExpectedToken, tokens: *Tree.TokenList, stream: var) !void {
const found_token = tokens.at(self.token);
if (found_token.id == .Invalid) {
return stream.print("expected '{}', found invalid bytes", .{self.expected_id.symbol()});
} else {
const token_name = found_token.id.symbol();
return stream.print("expected '{}', found '{}'", .{ self.expected_id.symbol(), token_name });
}
}
};
fn SingleTokenError(comptime msg: []const u8) type {
return struct {
@ -62,6 +84,8 @@ pub const Node = struct {
JumpStmt,
ExprStmt,
Label,
CompoundStmt,
IfStmt,
};
pub const Root = struct {
@ -73,7 +97,7 @@ pub const Node = struct {
};
pub const JumpStmt = struct {
base: Node = Node{ .id = .JumpStmt},
base: Node = Node{ .id = .JumpStmt },
ltoken: TokenIndex,
kind: Kind,
semicolon: TokenIndex,
@ -87,14 +111,33 @@ pub const Node = struct {
};
pub const ExprStmt = struct {
base: Node = Node{ .id = .ExprStmt},
base: Node = Node{ .id = .ExprStmt },
expr: ?*Node,
semicolon: TokenIndex,
};
pub const Label = struct {
base: Node = Node{ .id = .Label},
base: Node = Node{ .id = .Label },
identifier: TokenIndex,
colon: TokenIndex,
};
pub const CompoundStmt = struct {
base: Node = Node{ .id = .CompoundStmt },
lbrace: TokenIndex,
statements: StmtList,
rbrace: TokenIndex,
pub const StmtList = Root.DeclList;
};
pub const IfStmt = struct {
base: Node = Node{ .id = .IfStmt },
@"if": TokenIndex,
cond: *Node,
@"else": ?struct {
tok: TokenIndex,
stmt: *Node,
},
};
};

View File

@ -284,7 +284,19 @@ const Parser = struct {
fn designator(parser: *Parser) !*Node {}
/// CompoundStmt <- LBRACE (Declaration / Stmt)* RBRACE
fn compoundStmt(parser: *Parser) !?*Node {}
fn compoundStmt(parser: *Parser) !?*Node {
const lbrace = parser.eatToken(.LBrace) orelse return null;
const node = try parser.arena.create(Node.CompoundStmt);
node.* = .{
.lbrace = lbrace,
.statements = Node.JumpStmt.StmtList.init(parser.arena),
.rbrace = undefined,
};
while (parser.declaration() orelse parser.stmt()) |node|
try node.statements.push(node);
node.rbrace = try parser.expectToken(.RBrace);
return &node.base;
}
/// Stmt
/// <- CompoundStmt
@ -303,7 +315,27 @@ const Parser = struct {
/// / ExprStmt
fn stmt(parser: *Parser) !?*Node {
if (parser.compoundStmt()) |node| return node;
// if (parser.eatToken(.Keyword_if)) |tok| {}
if (parser.eatToken(.Keyword_if)) |tok| {
const node = try parser.arena.create(Node.IfStmt);
_ = try parser.expectToken(.LParen);
node.* = .{
.@"if" = tok,
.cond = try parser.expect(expr, .{
.ExpectedExpr = .{ .token = it.index },
}),
.@"else" = null,
};
_ = try parser.expectToken(.RParen);
if (parser.eatToken(.Keyword_else)) |else_tok| {
node.@"else" = .{
.tok = else_tok,
.stmt = try parser.stmt(expr, .{
.ExpectedStmt = .{ .token = it.index },
}),
};
}
return &node.base;
}
// if (parser.eatToken(.Keyword_switch)) |tok| {}
// if (parser.eatToken(.Keyword_while)) |tok| {}
// if (parser.eatToken(.Keyword_do)) |tok| {}
@ -407,4 +439,15 @@ const Parser = struct {
return;
}
}
fn expect(
parser: *Parser,
parseFn: fn (*Parser) Error!?*Node,
err: ast.Error, // if parsing fails
) Error!*Node {
return (try parseFn(arena, it, tree)) orelse {
try parser.tree.errors.push(err);
return error.ParseError;
};
}
};

View File

@ -6,7 +6,7 @@ pub const Source = struct {
file_name: []const u8,
tokens: TokenList,
pub const TokenList = SegmentedList(Token, 64);
pub const TokenList = std.SegmentedList(Token, 64);
};
pub const Token = struct {
@ -134,6 +134,121 @@ pub const Token = struct {
Keyword_ifndef,
Keyword_error,
Keyword_pragma,
pub fn symbol(tok: Token) []const u8 {
return switch (tok.id) {
.Invalid => "Invalid",
.Eof => "Eof",
.Nl => "NewLine",
.Identifier => "Identifier",
.MacroString => "MacroString",
.StringLiteral => "StringLiteral",
.CharLiteral => "CharLiteral",
.IntegerLiteral => "IntegerLiteral",
.FloatLiteral => "FloatLiteral",
.LineComment => "LineComment",
.MultiLineComment => "MultiLineComment",
.Bang => "!",
.BangEqual => "!=",
.Pipe => "|",
.PipePipe => "||",
.PipeEqual => "|=",
.Equal => "=",
.EqualEqual => "==",
.LParen => "(",
.RParen => ")",
.LBrace => "{",
.RBrace => "}",
.LBracket => "[",
.RBracket => "]",
.Period => ".",
.Ellipsis => "...",
.Caret => "^",
.CaretEqual => "^=",
.Plus => "+",
.PlusPlus => "++",
.PlusEqual => "+=",
.Minus => "-",
.MinusMinus => "--",
.MinusEqual => "-=",
.Asterisk => "*",
.AsteriskEqual => "*=",
.Percent => "%",
.PercentEqual => "%=",
.Arrow => "->",
.Colon => ":",
.Semicolon => ";",
.Slash => "/",
.SlashEqual => "/=",
.Comma => ",",
.Ampersand => "&",
.AmpersandAmpersand => "&&",
.AmpersandEqual => "&=",
.QuestionMark => "?",
.AngleBracketLeft => "<",
.AngleBracketLeftEqual => "<=",
.AngleBracketAngleBracketLeft => "<<",
.AngleBracketAngleBracketLeftEqual => "<<=",
.AngleBracketRight => ">",
.AngleBracketRightEqual => ">=",
.AngleBracketAngleBracketRight => ">>",
.AngleBracketAngleBracketRightEqual => ">>=",
.Tilde => "~",
.Hash => "#",
.HashHash => "##",
.Keyword_auto => "auto",
.Keyword_break => "break",
.Keyword_case => "case",
.Keyword_char => "char",
.Keyword_const => "const",
.Keyword_continue => "continue",
.Keyword_default => "default",
.Keyword_do => "do",
.Keyword_double => "double",
.Keyword_else => "else",
.Keyword_enum => "enum",
.Keyword_extern => "extern",
.Keyword_float => "float",
.Keyword_for => "for",
.Keyword_goto => "goto",
.Keyword_if => "if",
.Keyword_int => "int",
.Keyword_long => "long",
.Keyword_register => "register",
.Keyword_return => "return",
.Keyword_short => "short",
.Keyword_signed => "signed",
.Keyword_sizeof => "sizeof",
.Keyword_static => "static",
.Keyword_struct => "struct",
.Keyword_switch => "switch",
.Keyword_typedef => "typedef",
.Keyword_union => "union",
.Keyword_unsigned => "unsigned",
.Keyword_void => "void",
.Keyword_volatile => "volatile",
.Keyword_while => "while",
.Keyword_bool => "_Bool",
.Keyword_complex => "_Complex",
.Keyword_imaginary => "_Imaginary",
.Keyword_inline => "inline",
.Keyword_restrict => "restrict",
.Keyword_alignas => "_Alignas",
.Keyword_alignof => "_Alignof",
.Keyword_atomic => "_Atomic",
.Keyword_generic => "_Generic",
.Keyword_noreturn => "_Noreturn",
.Keyword_static_assert => "_Static_assert",
.Keyword_thread_local => "_Thread_local",
.Keyword_include => "include",
.Keyword_define => "define",
.Keyword_ifdef => "ifdef",
.Keyword_ifndef => "ifndef",
.Keyword_error => "error",
.Keyword_pragma => "pragma",
};
}
};
pub const Keyword = struct {
@ -1121,8 +1236,7 @@ pub const Tokenizer = struct {
}
} else if (self.index == self.source.buffer.len) {
switch (state) {
.AfterStringLiteral,
.Start => {},
.AfterStringLiteral, .Start => {},
.u, .u8, .U, .L, .Identifier => {
result.id = Token.getKeyword(self.source.buffer[result.start..self.index], self.prev_tok_id == .Hash and !self.pp_directive) orelse .Identifier;
},
@ -1416,6 +1530,7 @@ fn expectTokens(source: []const u8, expected_tokens: []const Token.Id) void {
.source = &Source{
.buffer = source,
.file_name = undefined,
.tokens = undefined,
},
};
for (expected_tokens) |expected_token_id| {