std.zig.parser now supports all infix operators

This commit is contained in:
Jimmi Holst Christensen 2018-03-29 22:31:17 +02:00
parent b80398b355
commit 530f795769
3 changed files with 535 additions and 62 deletions

View File

@ -20,6 +20,7 @@ pub const Node = struct {
IntegerLiteral,
FloatLiteral,
StringLiteral,
UndefinedLiteral,
BuiltinCall,
LineComment,
TestDecl,
@ -38,6 +39,7 @@ pub const Node = struct {
Id.IntegerLiteral => @fieldParentPtr(NodeIntegerLiteral, "base", base).iterate(index),
Id.FloatLiteral => @fieldParentPtr(NodeFloatLiteral, "base", base).iterate(index),
Id.StringLiteral => @fieldParentPtr(NodeStringLiteral, "base", base).iterate(index),
Id.UndefinedLiteral => @fieldParentPtr(NodeUndefinedLiteral, "base", base).iterate(index),
Id.BuiltinCall => @fieldParentPtr(NodeBuiltinCall, "base", base).iterate(index),
Id.LineComment => @fieldParentPtr(NodeLineComment, "base", base).iterate(index),
Id.TestDecl => @fieldParentPtr(NodeTestDecl, "base", base).iterate(index),
@ -57,6 +59,7 @@ pub const Node = struct {
Id.IntegerLiteral => @fieldParentPtr(NodeIntegerLiteral, "base", base).firstToken(),
Id.FloatLiteral => @fieldParentPtr(NodeFloatLiteral, "base", base).firstToken(),
Id.StringLiteral => @fieldParentPtr(NodeStringLiteral, "base", base).firstToken(),
Id.UndefinedLiteral => @fieldParentPtr(NodeUndefinedLiteral, "base", base).firstToken(),
Id.BuiltinCall => @fieldParentPtr(NodeBuiltinCall, "base", base).firstToken(),
Id.LineComment => @fieldParentPtr(NodeLineComment, "base", base).firstToken(),
Id.TestDecl => @fieldParentPtr(NodeTestDecl, "base", base).firstToken(),
@ -76,6 +79,7 @@ pub const Node = struct {
Id.IntegerLiteral => @fieldParentPtr(NodeIntegerLiteral, "base", base).lastToken(),
Id.FloatLiteral => @fieldParentPtr(NodeFloatLiteral, "base", base).lastToken(),
Id.StringLiteral => @fieldParentPtr(NodeStringLiteral, "base", base).lastToken(),
Id.UndefinedLiteral => @fieldParentPtr(NodeUndefinedLiteral, "base", base).lastToken(),
Id.BuiltinCall => @fieldParentPtr(NodeBuiltinCall, "base", base).lastToken(),
Id.LineComment => @fieldParentPtr(NodeLineComment, "base", base).lastToken(),
Id.TestDecl => @fieldParentPtr(NodeTestDecl, "base", base).lastToken(),
@ -309,9 +313,47 @@ pub const NodeInfixOp = struct {
rhs: &Node,
const InfixOp = enum {
EqualEqual,
Add,
AddWrap,
ArrayCat,
ArrayMult,
Assign,
AssignBitAnd,
AssignBitOr,
AssignBitShiftLeft,
AssignBitShiftRight,
AssignBitXor,
AssignDiv,
AssignMinus,
AssignMinusWrap,
AssignMod,
AssignPlus,
AssignPlusWrap,
AssignTimes,
AssignTimesWarp,
BangEqual,
BitAnd,
BitOr,
BitShiftLeft,
BitShiftRight,
BitXor,
BoolAnd,
BoolOr,
Div,
EqualEqual,
ErrorUnion,
GreaterOrEqual,
GreaterThan,
LessOrEqual,
LessThan,
MergeErrorSets,
Mod,
Mult,
MultWrap,
Period,
Sub,
SubWrap,
UnwrapMaybe,
};
pub fn iterate(self: &NodeInfixOp, index: usize) ?&Node {
@ -464,6 +506,23 @@ pub const NodeStringLiteral = struct {
}
};
pub const NodeUndefinedLiteral = struct {
base: Node,
token: Token,
pub fn iterate(self: &NodeUndefinedLiteral, index: usize) ?&Node {
return null;
}
pub fn firstToken(self: &NodeUndefinedLiteral) Token {
return self.token;
}
pub fn lastToken(self: &NodeUndefinedLiteral) Token {
return self.token;
}
};
pub const NodeLineComment = struct {
base: Node,
lines: ArrayList(Token),

View File

@ -371,6 +371,13 @@ pub const Parser = struct {
try stack.append(State.AfterOperand);
continue;
},
Token.Id.Keyword_undefined => {
try stack.append(State {
.Operand = &(try self.createUndefined(arena, token)).base
});
try stack.append(State.AfterOperand);
continue;
},
Token.Id.Builtin => {
const node = try arena.create(ast.NodeBuiltinCall);
*node = ast.NodeBuiltinCall {
@ -414,56 +421,41 @@ pub const Parser = struct {
// or a postfix operator (like () or {}),
// otherwise this expression is done (like on a ; or else).
var token = self.getNextToken();
switch (token.id) {
Token.Id.EqualEqual => {
if (tokenIdToInfixOp(token.id)) |infix_id| {
try stack.append(State {
.InfixOp = try self.createInfixOp(arena, token, ast.NodeInfixOp.InfixOp.EqualEqual)
.InfixOp = try self.createInfixOp(arena, token, infix_id)
});
try stack.append(State.ExpectOperand);
continue;
},
Token.Id.BangEqual => {
try stack.append(State {
.InfixOp = try self.createInfixOp(arena, token, ast.NodeInfixOp.InfixOp.BangEqual)
});
try stack.append(State.ExpectOperand);
continue;
},
Token.Id.Period => {
try stack.append(State {
.InfixOp = try self.createInfixOp(arena, token, ast.NodeInfixOp.InfixOp.Period)
});
try stack.append(State.ExpectOperand);
continue;
},
else => {
// no postfix/infix operator after this operand.
self.putBackToken(token);
// reduce the stack
var expression: &ast.Node = stack.pop().Operand;
while (true) {
switch (stack.pop()) {
State.Expression => |dest_ptr| {
// we're done
try dest_ptr.store(expression);
break;
},
State.InfixOp => |infix_op| {
infix_op.rhs = expression;
infix_op.lhs = stack.pop().Operand;
expression = &infix_op.base;
continue;
},
State.PrefixOp => |prefix_op| {
prefix_op.rhs = expression;
expression = &prefix_op.base;
continue;
},
else => unreachable,
}
// TODO: Parse postfix operator
} else {
// no postfix/infix operator after this operand.
self.putBackToken(token);
// reduce the stack
var expression: &ast.Node = stack.pop().Operand;
while (true) {
switch (stack.pop()) {
State.Expression => |dest_ptr| {
// we're done
try dest_ptr.store(expression);
break;
},
State.InfixOp => |infix_op| {
infix_op.rhs = expression;
infix_op.lhs = stack.pop().Operand;
expression = &infix_op.base;
continue;
},
State.PrefixOp => |prefix_op| {
prefix_op.rhs = expression;
expression = &prefix_op.base;
continue;
},
else => unreachable,
}
continue;
},
}
continue;
}
},
@ -706,6 +698,53 @@ pub const Parser = struct {
}
}
fn tokenIdToInfixOp(id: &const Token.Id) ?ast.NodeInfixOp.InfixOp {
return switch (*id) {
Token.Id.Ampersand => ast.NodeInfixOp.InfixOp.BitAnd,
Token.Id.AmpersandEqual => ast.NodeInfixOp.InfixOp.AssignBitAnd,
Token.Id.AngleBracketAngleBracketLeft => ast.NodeInfixOp.InfixOp.BitShiftLeft,
Token.Id.AngleBracketAngleBracketLeftEqual => ast.NodeInfixOp.InfixOp.AssignBitShiftLeft,
Token.Id.AngleBracketAngleBracketRight => ast.NodeInfixOp.InfixOp.BitShiftRight,
Token.Id.AngleBracketAngleBracketRightEqual => ast.NodeInfixOp.InfixOp.AssignBitShiftRight,
Token.Id.AngleBracketLeft => ast.NodeInfixOp.InfixOp.LessThan,
Token.Id.AngleBracketLeftEqual => ast.NodeInfixOp.InfixOp.LessOrEqual,
Token.Id.AngleBracketRight => ast.NodeInfixOp.InfixOp.GreaterThan,
Token.Id.AngleBracketRightEqual => ast.NodeInfixOp.InfixOp.GreaterOrEqual,
Token.Id.Asterisk => ast.NodeInfixOp.InfixOp.Mult,
Token.Id.AsteriskAsterisk => ast.NodeInfixOp.InfixOp.ArrayMult,
Token.Id.AsteriskEqual => ast.NodeInfixOp.InfixOp.AssignTimes,
Token.Id.AsteriskPercent => ast.NodeInfixOp.InfixOp.MultWrap,
Token.Id.AsteriskPercentEqual => ast.NodeInfixOp.InfixOp.AssignTimesWarp,
Token.Id.Bang => ast.NodeInfixOp.InfixOp.ErrorUnion,
Token.Id.BangEqual => ast.NodeInfixOp.InfixOp.BangEqual,
Token.Id.Caret => ast.NodeInfixOp.InfixOp.BitXor,
Token.Id.CaretEqual => ast.NodeInfixOp.InfixOp.AssignBitXor,
Token.Id.Equal => ast.NodeInfixOp.InfixOp.Assign,
Token.Id.EqualEqual => ast.NodeInfixOp.InfixOp.EqualEqual,
Token.Id.Keyword_and => ast.NodeInfixOp.InfixOp.BoolAnd,
Token.Id.Keyword_or => ast.NodeInfixOp.InfixOp.BoolOr,
Token.Id.Minus => ast.NodeInfixOp.InfixOp.Sub,
Token.Id.MinusEqual => ast.NodeInfixOp.InfixOp.AssignMinus,
Token.Id.MinusPercent => ast.NodeInfixOp.InfixOp.SubWrap,
Token.Id.MinusPercentEqual => ast.NodeInfixOp.InfixOp.AssignMinusWrap,
Token.Id.Percent => ast.NodeInfixOp.InfixOp.Mod,
Token.Id.PercentEqual => ast.NodeInfixOp.InfixOp.AssignMod,
Token.Id.Period => ast.NodeInfixOp.InfixOp.Period,
Token.Id.Pipe => ast.NodeInfixOp.InfixOp.BitOr,
Token.Id.PipeEqual => ast.NodeInfixOp.InfixOp.AssignBitOr,
Token.Id.PipePipe => ast.NodeInfixOp.InfixOp.MergeErrorSets,
Token.Id.Plus => ast.NodeInfixOp.InfixOp.Add,
Token.Id.PlusEqual => ast.NodeInfixOp.InfixOp.AssignPlus,
Token.Id.PlusPercent => ast.NodeInfixOp.InfixOp.AddWrap,
Token.Id.PlusPercentEqual => ast.NodeInfixOp.InfixOp.AssignPlusWrap,
Token.Id.PlusPlus => ast.NodeInfixOp.InfixOp.ArrayCat,
Token.Id.QuestionMarkQuestionMark => ast.NodeInfixOp.InfixOp.UnwrapMaybe,
Token.Id.Slash => ast.NodeInfixOp.InfixOp.Div,
Token.Id.SlashEqual => ast.NodeInfixOp.InfixOp.AssignDiv,
else => null,
};
}
fn initNode(self: &Parser, id: ast.Node.Id) ast.Node {
if (self.pending_line_comment_node) |comment_node| {
self.pending_line_comment_node = null;
@ -867,6 +906,16 @@ pub const Parser = struct {
return node;
}
fn createUndefined(self: &Parser, arena: &mem.Allocator, token: &const Token) !&ast.NodeUndefinedLiteral {
const node = try arena.create(ast.NodeUndefinedLiteral);
*node = ast.NodeUndefinedLiteral {
.base = self.initNode(ast.Node.Id.UndefinedLiteral),
.token = *token,
};
return node;
}
fn createAttachIdentifier(self: &Parser, arena: &mem.Allocator, dest_ptr: &const DestPtr, name_token: &const Token) !&ast.NodeIdentifier {
const node = try self.createIdentifier(arena, name_token);
try dest_ptr.store(&node.base);
@ -1173,17 +1222,51 @@ pub const Parser = struct {
ast.Node.Id.InfixOp => {
const prefix_op_node = @fieldParentPtr(ast.NodeInfixOp, "base", base);
try stack.append(RenderState { .Expression = prefix_op_node.rhs });
switch (prefix_op_node.op) {
ast.NodeInfixOp.InfixOp.EqualEqual => {
try stack.append(RenderState { .Text = " == "});
},
ast.NodeInfixOp.InfixOp.BangEqual => {
try stack.append(RenderState { .Text = " != "});
},
ast.NodeInfixOp.InfixOp.Period => {
try stack.append(RenderState { .Text = "."});
},
}
const text = switch (prefix_op_node.op) {
ast.NodeInfixOp.InfixOp.Add => " + ",
ast.NodeInfixOp.InfixOp.AddWrap => " +% ",
ast.NodeInfixOp.InfixOp.ArrayCat => " ++ ",
ast.NodeInfixOp.InfixOp.ArrayMult => " ** ",
ast.NodeInfixOp.InfixOp.Assign => " = ",
ast.NodeInfixOp.InfixOp.AssignBitAnd => " &= ",
ast.NodeInfixOp.InfixOp.AssignBitOr => " |= ",
ast.NodeInfixOp.InfixOp.AssignBitShiftLeft => " <<= ",
ast.NodeInfixOp.InfixOp.AssignBitShiftRight => " >>= ",
ast.NodeInfixOp.InfixOp.AssignBitXor => " ^= ",
ast.NodeInfixOp.InfixOp.AssignDiv => " /= ",
ast.NodeInfixOp.InfixOp.AssignMinus => " -= ",
ast.NodeInfixOp.InfixOp.AssignMinusWrap => " -%= ",
ast.NodeInfixOp.InfixOp.AssignMod => " %= ",
ast.NodeInfixOp.InfixOp.AssignPlus => " += ",
ast.NodeInfixOp.InfixOp.AssignPlusWrap => " +%= ",
ast.NodeInfixOp.InfixOp.AssignTimes => " *= ",
ast.NodeInfixOp.InfixOp.AssignTimesWarp => " *%= ",
ast.NodeInfixOp.InfixOp.BangEqual => " != ",
ast.NodeInfixOp.InfixOp.BitAnd => " & ",
ast.NodeInfixOp.InfixOp.BitOr => " | ",
ast.NodeInfixOp.InfixOp.BitShiftLeft => " << ",
ast.NodeInfixOp.InfixOp.BitShiftRight => " >> ",
ast.NodeInfixOp.InfixOp.BitXor => " ^ ",
ast.NodeInfixOp.InfixOp.BoolAnd => " and ",
ast.NodeInfixOp.InfixOp.BoolOr => " or ",
ast.NodeInfixOp.InfixOp.Div => " / ",
ast.NodeInfixOp.InfixOp.EqualEqual => " == ",
ast.NodeInfixOp.InfixOp.ErrorUnion => "!",
ast.NodeInfixOp.InfixOp.GreaterOrEqual => " >= ",
ast.NodeInfixOp.InfixOp.GreaterThan => " > ",
ast.NodeInfixOp.InfixOp.LessOrEqual => " <= ",
ast.NodeInfixOp.InfixOp.LessThan => " < ",
ast.NodeInfixOp.InfixOp.MergeErrorSets => " || ",
ast.NodeInfixOp.InfixOp.Mod => " % ",
ast.NodeInfixOp.InfixOp.Mult => " * ",
ast.NodeInfixOp.InfixOp.MultWrap => " *% ",
ast.NodeInfixOp.InfixOp.Period => ".",
ast.NodeInfixOp.InfixOp.Sub => " - ",
ast.NodeInfixOp.InfixOp.SubWrap => " -% ",
ast.NodeInfixOp.InfixOp.UnwrapMaybe => " ?? ",
};
try stack.append(RenderState { .Text = text });
try stack.append(RenderState { .Expression = prefix_op_node.lhs });
},
ast.Node.Id.PrefixOp => {
@ -1224,6 +1307,10 @@ pub const Parser = struct {
const string_literal = @fieldParentPtr(ast.NodeStringLiteral, "base", base);
try stream.print("{}", self.tokenizer.getTokenSlice(string_literal.token));
},
ast.Node.Id.UndefinedLiteral => {
const undefined_literal = @fieldParentPtr(ast.NodeUndefinedLiteral, "base", base);
try stream.print("{}", self.tokenizer.getTokenSlice(undefined_literal.token));
},
ast.Node.Id.BuiltinCall => {
const builtin_call = @fieldParentPtr(ast.NodeBuiltinCall, "base", base);
try stream.print("{}(", self.tokenizer.getTokenSlice(builtin_call.builtin_token));
@ -1473,4 +1560,54 @@ test "zig fmt" {
\\}
\\
);
try testCanonical(
\\test "operators" {
\\ var i = undefined;
\\ i = 2;
\\ i *= 2;
\\ i |= 2;
\\ i ^= 2;
\\ i <<= 2;
\\ i >>= 2;
\\ i &= 2;
\\ i *= 2;
\\ i *%= 2;
\\ i -= 2;
\\ i -%= 2;
\\ i += 2;
\\ i +%= 2;
\\ i /= 2;
\\ i %= 2;
\\ _ = i == i;
\\ _ = i != i;
\\ _ = i != i;
\\ _ = i.i;
\\ _ = i || i;
\\ _ = i!i;
\\ _ = i ** i;
\\ _ = i ++ i;
\\ _ = i ?? i;
\\ _ = i % i;
\\ _ = i / i;
\\ _ = i *% i;
\\ _ = i * i;
\\ _ = i -% i;
\\ _ = i - i;
\\ _ = i +% i;
\\ _ = i + i;
\\ _ = i << i;
\\ _ = i >> i;
\\ _ = i & i;
\\ _ = i ^ i;
\\ _ = i | i;
\\ _ = i >= i;
\\ _ = i <= i;
\\ _ = i > i;
\\ _ = i < i;
\\ _ = i and i;
\\ _ = i or i;
\\}
\\
);
}

View File

@ -77,6 +77,7 @@ pub const Token = struct {
Builtin,
Bang,
Pipe,
PipePipe,
PipeEqual,
Equal,
EqualEqual,
@ -85,18 +86,45 @@ pub const Token = struct {
RParen,
Semicolon,
Percent,
PercentEqual,
LBrace,
RBrace,
Period,
Ellipsis2,
Ellipsis3,
Caret,
CaretEqual,
Plus,
PlusPlus,
PlusEqual,
PlusPercent,
PlusPercentEqual,
Minus,
MinusEqual,
MinusPercent,
MinusPercentEqual,
Asterisk,
AsteriskEqual,
AsteriskAsterisk,
AsteriskPercent,
AsteriskPercentEqual,
Arrow,
Colon,
Slash,
SlashEqual,
Comma,
Ampersand,
AmpersandEqual,
QuestionMark,
QuestionMarkQuestionMark,
AngleBracketLeft,
AngleBracketLeftEqual,
AngleBracketAngleBracketLeft,
AngleBracketAngleBracketLeftEqual,
AngleBracketRight,
AngleBracketRightEqual,
AngleBracketAngleBracketRight,
AngleBracketAngleBracketRightEqual,
IntegerLiteral,
FloatLiteral,
LineComment,
@ -200,6 +228,9 @@ pub const Tokenizer = struct {
Bang,
Pipe,
Minus,
MinusPercent,
Asterisk,
AsteriskPercent,
Slash,
LineComment,
Zero,
@ -210,6 +241,15 @@ pub const Tokenizer = struct {
FloatExponentUnsigned,
FloatExponentNumber,
Ampersand,
Caret,
Percent,
QuestionMark,
Plus,
PlusPercent,
AngleBracketLeft,
AngleBracketAngleBracketLeft,
AngleBracketRight,
AngleBracketAngleBracketRight,
Period,
Period2,
SawAtSign,
@ -291,9 +331,25 @@ pub const Tokenizer = struct {
break;
},
'%' => {
result.id = Token.Id.Percent;
self.index += 1;
break;
state = State.Percent;
},
'*' => {
state = State.Asterisk;
},
'+' => {
state = State.Plus;
},
'?' => {
state = State.QuestionMark;
},
'<' => {
state = State.AngleBracketLeft;
},
'>' => {
state = State.AngleBracketRight;
},
'^' => {
state = State.Caret;
},
'{' => {
result.id = Token.Id.LBrace;
@ -356,6 +412,107 @@ pub const Tokenizer = struct {
break;
},
},
State.Asterisk => switch (c) {
'=' => {
result.id = Token.Id.AsteriskEqual;
self.index += 1;
break;
},
'*' => {
result.id = Token.Id.AsteriskAsterisk;
self.index += 1;
break;
},
'%' => {
state = State.AsteriskPercent;
},
else => {
result.id = Token.Id.Asterisk;
break;
}
},
State.AsteriskPercent => switch (c) {
'=' => {
result.id = Token.Id.AsteriskPercentEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.AsteriskPercent;
break;
}
},
State.QuestionMark => switch (c) {
'?' => {
result.id = Token.Id.QuestionMarkQuestionMark;
self.index += 1;
break;
},
else => {
result.id = Token.Id.QuestionMark;
break;
},
},
State.Percent => switch (c) {
'=' => {
result.id = Token.Id.PercentEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.Percent;
break;
},
},
State.Plus => switch (c) {
'=' => {
result.id = Token.Id.PlusEqual;
self.index += 1;
break;
},
'+' => {
result.id = Token.Id.PlusPlus;
self.index += 1;
break;
},
'%' => {
state = State.PlusPercent;
},
else => {
result.id = Token.Id.Plus;
break;
},
},
State.PlusPercent => switch (c) {
'=' => {
result.id = Token.Id.PlusPercentEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.PlusPercent;
break;
},
},
State.Caret => switch (c) {
'=' => {
result.id = Token.Id.CaretEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.Caret;
break;
}
},
State.Identifier => switch (c) {
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
else => {
@ -417,6 +574,11 @@ pub const Tokenizer = struct {
self.index += 1;
break;
},
'|' => {
result.id = Token.Id.PipePipe;
self.index += 1;
break;
},
else => {
result.id = Token.Id.Pipe;
break;
@ -441,12 +603,86 @@ pub const Tokenizer = struct {
self.index += 1;
break;
},
'=' => {
result.id = Token.Id.MinusEqual;
self.index += 1;
break;
},
'%' => {
state = State.MinusPercent;
},
else => {
result.id = Token.Id.Minus;
break;
},
},
State.MinusPercent => switch (c) {
'=' => {
result.id = Token.Id.MinusPercentEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.MinusPercent;
break;
}
},
State.AngleBracketLeft => switch (c) {
'<' => {
state = State.AngleBracketAngleBracketLeft;
},
'=' => {
result.id = Token.Id.AngleBracketLeftEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.AngleBracketLeft;
break;
},
},
State.AngleBracketAngleBracketLeft => switch (c) {
'=' => {
result.id = Token.Id.AngleBracketAngleBracketLeftEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.AngleBracketAngleBracketLeft;
break;
},
},
State.AngleBracketRight => switch (c) {
'>' => {
state = State.AngleBracketAngleBracketRight;
},
'=' => {
result.id = Token.Id.AngleBracketRightEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.AngleBracketRight;
break;
},
},
State.AngleBracketAngleBracketRight => switch (c) {
'=' => {
result.id = Token.Id.AngleBracketAngleBracketRightEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.AngleBracketAngleBracketRight;
break;
},
},
State.Period => switch (c) {
'.' => {
state = State.Period2;
@ -474,6 +710,11 @@ pub const Tokenizer = struct {
result.id = Token.Id.LineComment;
state = State.LineComment;
},
'=' => {
result.id = Token.Id.SlashEqual;
self.index += 1;
break;
},
else => {
result.id = Token.Id.Slash;
break;
@ -609,6 +850,42 @@ pub const Tokenizer = struct {
State.Pipe => {
result.id = Token.Id.Pipe;
},
State.AngleBracketAngleBracketRight => {
result.id = Token.Id.AngleBracketAngleBracketRight;
},
State.AngleBracketRight => {
result.id = Token.Id.AngleBracketRight;
},
State.AngleBracketAngleBracketLeft => {
result.id = Token.Id.AngleBracketAngleBracketLeft;
},
State.AngleBracketLeft => {
result.id = Token.Id.AngleBracketLeft;
},
State.PlusPercent => {
result.id = Token.Id.PlusPercent;
},
State.Plus => {
result.id = Token.Id.Plus;
},
State.QuestionMark => {
result.id = Token.Id.QuestionMark;
},
State.Percent => {
result.id = Token.Id.Percent;
},
State.Caret => {
result.id = Token.Id.Caret;
},
State.AsteriskPercent => {
result.id = Token.Id.AsteriskPercent;
},
State.Asterisk => {
result.id = Token.Id.Asterisk;
},
State.MinusPercent => {
result.id = Token.Id.MinusPercent;
},
}
}
if (result.id == Token.Id.Eof) {
@ -752,8 +1029,8 @@ test "tokenizer - string identifier and builtin fns" {
test "tokenizer - pipe and then invalid" {
testTokenize("||=", []Token.Id{
Token.Id.Pipe,
Token.Id.PipeEqual,
Token.Id.PipePipe,
Token.Id.Equal,
});
}