std.zig.tokinizer: fixed failing tests

This commit is contained in:
Jimmi Holst Christensen 2018-04-10 11:35:41 +02:00
parent f85b9f2bf3
commit 34af38e09b

View File

@ -613,17 +613,25 @@ pub const Tokenizer = struct {
'\\' => { '\\' => {
state = State.CharLiteralBackslash; state = State.CharLiteralBackslash;
}, },
'\'' => break, // Look for this error later. '\'' => {
result.id = Token.Id.Invalid;
break;
},
else => { else => {
if (c < 0x20 or c == 0x7f) if (c < 0x20 or c == 0x7f) {
break; // Look for this error later. result.id = Token.Id.Invalid;
break;
}
state = State.CharLiteralEnd; state = State.CharLiteralEnd;
} }
}, },
State.CharLiteralBackslash => switch (c) { State.CharLiteralBackslash => switch (c) {
'\n' => break, // Look for this error later. '\n' => {
result.id = Token.Id.Invalid;
break;
},
else => { else => {
state = State.CharLiteralEnd; state = State.CharLiteralEnd;
}, },
@ -635,7 +643,10 @@ pub const Tokenizer = struct {
self.index += 1; self.index += 1;
break; break;
}, },
else => break, // Look for this error later. else => {
result.id = Token.Id.Invalid;
break;
},
}, },
State.MultilineStringLiteralLine => switch (c) { State.MultilineStringLiteralLine => switch (c) {
@ -903,7 +914,6 @@ pub const Tokenizer = struct {
State.FloatExponentNumber, State.FloatExponentNumber,
State.StringLiteral, // find this error later State.StringLiteral, // find this error later
State.MultilineStringLiteralLine, State.MultilineStringLiteralLine,
State.CharLiteralEnd,
State.Builtin => {}, State.Builtin => {},
State.Identifier => { State.Identifier => {
@ -922,6 +932,7 @@ pub const Tokenizer = struct {
State.MultilineStringLiteralLineBackslash, State.MultilineStringLiteralLineBackslash,
State.CharLiteral, State.CharLiteral,
State.CharLiteralBackslash, State.CharLiteralBackslash,
State.CharLiteralEnd,
State.StringLiteralBackslash => { State.StringLiteralBackslash => {
result.id = Token.Id.Invalid; result.id = Token.Id.Invalid;
}, },
@ -1073,7 +1084,7 @@ test "tokenizer - invalid token characters" {
testTokenize("`", []Token.Id{Token.Id.Invalid}); testTokenize("`", []Token.Id{Token.Id.Invalid});
testTokenize("'c", []Token.Id {Token.Id.Invalid}); testTokenize("'c", []Token.Id {Token.Id.Invalid});
testTokenize("'", []Token.Id {Token.Id.Invalid}); testTokenize("'", []Token.Id {Token.Id.Invalid});
testTokenize("''", []Token.Id {Token.Id.Invalid}); testTokenize("''", []Token.Id {Token.Id.Invalid, Token.Id.Invalid});
} }
test "tokenizer - invalid literal/comment characters" { test "tokenizer - invalid literal/comment characters" {
@ -1147,6 +1158,7 @@ fn testTokenize(source: []const u8, expected_tokens: []const Token.Id) void {
var tokenizer = Tokenizer.init(source); var tokenizer = Tokenizer.init(source);
for (expected_tokens) |expected_token_id| { for (expected_tokens) |expected_token_id| {
const token = tokenizer.next(); const token = tokenizer.next();
std.debug.warn("{} {}\n", @tagName(expected_token_id), @tagName(token.id));
std.debug.assert(@TagType(Token.Id)(token.id) == @TagType(Token.Id)(expected_token_id)); std.debug.assert(@TagType(Token.Id)(token.id) == @TagType(Token.Id)(expected_token_id));
switch (expected_token_id) { switch (expected_token_id) {
Token.Id.StringLiteral => |expected_kind| { Token.Id.StringLiteral => |expected_kind| {