mirror of
https://github.com/ziglang/zig.git
synced 2025-12-22 22:23:10 +00:00
Merge pull request #15139 from ziglang/macos-13-fixes
Fixes for latest macOS 13.3 SDK release
This commit is contained in:
commit
381dc2d950
879
lib/libc/darwin/libSystem.13.tbd
vendored
879
lib/libc/darwin/libSystem.13.tbd
vendored
File diff suppressed because it is too large
Load Diff
@ -36,7 +36,7 @@ pub const TbdV3 = struct {
|
|||||||
pub const TbdV4 = struct {
|
pub const TbdV4 = struct {
|
||||||
tbd_version: u3,
|
tbd_version: u3,
|
||||||
targets: []const []const u8,
|
targets: []const []const u8,
|
||||||
uuids: []const struct {
|
uuids: ?[]const struct {
|
||||||
target: []const u8,
|
target: []const u8,
|
||||||
value: []const u8,
|
value: []const u8,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
const Tokenizer = @This();
|
const Tokenizer = @This();
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const log = std.log.scoped(.tapi);
|
const log = std.log.scoped(.yaml);
|
||||||
const testing = std.testing;
|
const testing = std.testing;
|
||||||
|
|
||||||
buffer: []const u8,
|
buffer: []const u8,
|
||||||
@ -13,29 +13,31 @@ pub const Token = struct {
|
|||||||
end: usize,
|
end: usize,
|
||||||
|
|
||||||
pub const Id = enum {
|
pub const Id = enum {
|
||||||
Eof,
|
// zig fmt: off
|
||||||
|
eof,
|
||||||
|
|
||||||
NewLine,
|
new_line,
|
||||||
DocStart, // ---
|
doc_start, // ---
|
||||||
DocEnd, // ...
|
doc_end, // ...
|
||||||
SeqItemInd, // -
|
seq_item_ind, // -
|
||||||
MapValueInd, // :
|
map_value_ind, // :
|
||||||
FlowMapStart, // {
|
flow_map_start, // {
|
||||||
FlowMapEnd, // }
|
flow_map_end, // }
|
||||||
FlowSeqStart, // [
|
flow_seq_start, // [
|
||||||
FlowSeqEnd, // ]
|
flow_seq_end, // ]
|
||||||
|
|
||||||
Comma,
|
comma,
|
||||||
Space,
|
space,
|
||||||
Tab,
|
tab,
|
||||||
Comment, // #
|
comment, // #
|
||||||
Alias, // *
|
alias, // *
|
||||||
Anchor, // &
|
anchor, // &
|
||||||
Tag, // !
|
tag, // !
|
||||||
SingleQuote, // '
|
|
||||||
DoubleQuote, // "
|
|
||||||
|
|
||||||
Literal,
|
single_quoted, // '...'
|
||||||
|
double_quoted, // "..."
|
||||||
|
literal,
|
||||||
|
// zig fmt: on
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -45,8 +47,8 @@ pub const TokenIterator = struct {
|
|||||||
buffer: []const Token,
|
buffer: []const Token,
|
||||||
pos: TokenIndex = 0,
|
pos: TokenIndex = 0,
|
||||||
|
|
||||||
pub fn next(self: *TokenIterator) Token {
|
pub fn next(self: *TokenIterator) ?Token {
|
||||||
const token = self.buffer[self.pos];
|
const token = self.peek() orelse return null;
|
||||||
self.pos += 1;
|
self.pos += 1;
|
||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
@ -74,180 +76,212 @@ pub const TokenIterator = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
fn stringMatchesPattern(comptime pattern: []const u8, slice: []const u8) bool {
|
||||||
|
comptime var count: usize = 0;
|
||||||
|
inline while (count < pattern.len) : (count += 1) {
|
||||||
|
if (count >= slice.len) return false;
|
||||||
|
const c = slice[count];
|
||||||
|
if (pattern[count] != c) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matchesPattern(self: Tokenizer, comptime pattern: []const u8) bool {
|
||||||
|
return stringMatchesPattern(pattern, self.buffer[self.index..]);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn next(self: *Tokenizer) Token {
|
pub fn next(self: *Tokenizer) Token {
|
||||||
var result = Token{
|
var result = Token{
|
||||||
.id = .Eof,
|
.id = .eof,
|
||||||
.start = self.index,
|
.start = self.index,
|
||||||
.end = undefined,
|
.end = undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
var state: union(enum) {
|
var state: enum {
|
||||||
Start,
|
start,
|
||||||
NewLine,
|
new_line,
|
||||||
Space,
|
space,
|
||||||
Tab,
|
tab,
|
||||||
Hyphen: usize,
|
comment,
|
||||||
Dot: usize,
|
single_quoted,
|
||||||
Literal,
|
double_quoted,
|
||||||
} = .Start;
|
literal,
|
||||||
|
} = .start;
|
||||||
|
|
||||||
while (self.index < self.buffer.len) : (self.index += 1) {
|
while (self.index < self.buffer.len) : (self.index += 1) {
|
||||||
const c = self.buffer[self.index];
|
const c = self.buffer[self.index];
|
||||||
switch (state) {
|
switch (state) {
|
||||||
.Start => switch (c) {
|
.start => switch (c) {
|
||||||
' ' => {
|
' ' => {
|
||||||
state = .Space;
|
state = .space;
|
||||||
},
|
},
|
||||||
'\t' => {
|
'\t' => {
|
||||||
state = .Tab;
|
state = .tab;
|
||||||
},
|
},
|
||||||
'\n' => {
|
'\n' => {
|
||||||
result.id = .NewLine;
|
result.id = .new_line;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
'\r' => {
|
'\r' => {
|
||||||
state = .NewLine;
|
state = .new_line;
|
||||||
},
|
},
|
||||||
'-' => {
|
|
||||||
state = .{ .Hyphen = 1 };
|
'-' => if (self.matchesPattern("---")) {
|
||||||
|
result.id = .doc_start;
|
||||||
|
self.index += "---".len;
|
||||||
|
break;
|
||||||
|
} else if (self.matchesPattern("- ")) {
|
||||||
|
result.id = .seq_item_ind;
|
||||||
|
self.index += "- ".len;
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
state = .literal;
|
||||||
},
|
},
|
||||||
'.' => {
|
|
||||||
state = .{ .Dot = 1 };
|
'.' => if (self.matchesPattern("...")) {
|
||||||
|
result.id = .doc_end;
|
||||||
|
self.index += "...".len;
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
state = .literal;
|
||||||
},
|
},
|
||||||
|
|
||||||
',' => {
|
',' => {
|
||||||
result.id = .Comma;
|
result.id = .comma;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
'#' => {
|
'#' => {
|
||||||
result.id = .Comment;
|
state = .comment;
|
||||||
self.index += 1;
|
|
||||||
break;
|
|
||||||
},
|
},
|
||||||
'*' => {
|
'*' => {
|
||||||
result.id = .Alias;
|
result.id = .alias;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
'&' => {
|
'&' => {
|
||||||
result.id = .Anchor;
|
result.id = .anchor;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
'!' => {
|
'!' => {
|
||||||
result.id = .Tag;
|
result.id = .tag;
|
||||||
self.index += 1;
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
'\'' => {
|
|
||||||
result.id = .SingleQuote;
|
|
||||||
self.index += 1;
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
'"' => {
|
|
||||||
result.id = .DoubleQuote;
|
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
'[' => {
|
'[' => {
|
||||||
result.id = .FlowSeqStart;
|
result.id = .flow_seq_start;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
']' => {
|
']' => {
|
||||||
result.id = .FlowSeqEnd;
|
result.id = .flow_seq_end;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
':' => {
|
':' => {
|
||||||
result.id = .MapValueInd;
|
result.id = .map_value_ind;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
'{' => {
|
'{' => {
|
||||||
result.id = .FlowMapStart;
|
result.id = .flow_map_start;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
'}' => {
|
'}' => {
|
||||||
result.id = .FlowMapEnd;
|
result.id = .flow_map_end;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
|
'\'' => {
|
||||||
|
state = .single_quoted;
|
||||||
|
},
|
||||||
|
'"' => {
|
||||||
|
state = .double_quoted;
|
||||||
|
},
|
||||||
else => {
|
else => {
|
||||||
state = .Literal;
|
state = .literal;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
.Space => switch (c) {
|
|
||||||
|
.comment => switch (c) {
|
||||||
|
'\r', '\n' => {
|
||||||
|
result.id = .comment;
|
||||||
|
break;
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
},
|
||||||
|
|
||||||
|
.space => switch (c) {
|
||||||
' ' => {},
|
' ' => {},
|
||||||
else => {
|
else => {
|
||||||
result.id = .Space;
|
result.id = .space;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
.Tab => switch (c) {
|
|
||||||
|
.tab => switch (c) {
|
||||||
'\t' => {},
|
'\t' => {},
|
||||||
else => {
|
else => {
|
||||||
result.id = .Tab;
|
result.id = .tab;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
.NewLine => switch (c) {
|
|
||||||
|
.new_line => switch (c) {
|
||||||
'\n' => {
|
'\n' => {
|
||||||
result.id = .NewLine;
|
result.id = .new_line;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
else => {}, // TODO this should be an error condition
|
else => {}, // TODO this should be an error condition
|
||||||
},
|
},
|
||||||
.Hyphen => |*count| switch (c) {
|
|
||||||
' ' => {
|
.single_quoted => switch (c) {
|
||||||
result.id = .SeqItemInd;
|
'\'' => if (!self.matchesPattern("''")) {
|
||||||
|
result.id = .single_quoted;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
|
} else {
|
||||||
|
self.index += "''".len - 1;
|
||||||
},
|
},
|
||||||
'-' => {
|
else => {},
|
||||||
count.* += 1;
|
},
|
||||||
|
|
||||||
if (count.* == 3) {
|
.double_quoted => switch (c) {
|
||||||
result.id = .DocStart;
|
'"' => {
|
||||||
|
if (stringMatchesPattern("\\", self.buffer[self.index - 1 ..])) {
|
||||||
|
self.index += 1;
|
||||||
|
} else {
|
||||||
|
result.id = .double_quoted;
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
else => {
|
else => {},
|
||||||
state = .Literal;
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
.Dot => |*count| switch (c) {
|
|
||||||
'.' => {
|
|
||||||
count.* += 1;
|
|
||||||
|
|
||||||
if (count.* == 3) {
|
.literal => switch (c) {
|
||||||
result.id = .DocEnd;
|
|
||||||
self.index += 1;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
state = .Literal;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
.Literal => switch (c) {
|
|
||||||
'\r', '\n', ' ', '\'', '"', ',', ':', ']', '}' => {
|
'\r', '\n', ' ', '\'', '"', ',', ':', ']', '}' => {
|
||||||
result.id = .Literal;
|
result.id = .literal;
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
else => {
|
else => {
|
||||||
result.id = .Literal;
|
result.id = .literal;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (state == .Literal and result.id == .Eof) {
|
if (self.index >= self.buffer.len) {
|
||||||
result.id = .Literal;
|
switch (state) {
|
||||||
|
.literal => {
|
||||||
|
result.id = .literal;
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result.end = self.index;
|
result.end = self.index;
|
||||||
@ -263,22 +297,24 @@ fn testExpected(source: []const u8, expected: []const Token.Id) !void {
|
|||||||
.buffer = source,
|
.buffer = source,
|
||||||
};
|
};
|
||||||
|
|
||||||
var token_len: usize = 0;
|
var given = std.ArrayList(Token.Id).init(testing.allocator);
|
||||||
for (expected) |exp| {
|
defer given.deinit();
|
||||||
token_len += 1;
|
|
||||||
|
while (true) {
|
||||||
const token = tokenizer.next();
|
const token = tokenizer.next();
|
||||||
try testing.expectEqual(exp, token.id);
|
try given.append(token.id);
|
||||||
|
if (token.id == .eof) break;
|
||||||
}
|
}
|
||||||
|
|
||||||
while (tokenizer.next().id != .Eof) {
|
try testing.expectEqualSlices(Token.Id, expected, given.items);
|
||||||
token_len += 1; // consume all tokens
|
}
|
||||||
}
|
|
||||||
|
|
||||||
try testing.expectEqual(expected.len, token_len);
|
test {
|
||||||
|
std.testing.refAllDecls(@This());
|
||||||
}
|
}
|
||||||
|
|
||||||
test "empty doc" {
|
test "empty doc" {
|
||||||
try testExpected("", &[_]Token.Id{.Eof});
|
try testExpected("", &[_]Token.Id{.eof});
|
||||||
}
|
}
|
||||||
|
|
||||||
test "empty doc with explicit markers" {
|
test "empty doc with explicit markers" {
|
||||||
@ -286,7 +322,22 @@ test "empty doc with explicit markers" {
|
|||||||
\\---
|
\\---
|
||||||
\\...
|
\\...
|
||||||
, &[_]Token.Id{
|
, &[_]Token.Id{
|
||||||
.DocStart, .NewLine, .DocEnd, .Eof,
|
.doc_start, .new_line, .doc_end, .eof,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "empty doc with explicit markers and a directive" {
|
||||||
|
try testExpected(
|
||||||
|
\\--- !tbd-v1
|
||||||
|
\\...
|
||||||
|
, &[_]Token.Id{
|
||||||
|
.doc_start,
|
||||||
|
.space,
|
||||||
|
.tag,
|
||||||
|
.literal,
|
||||||
|
.new_line,
|
||||||
|
.doc_end,
|
||||||
|
.eof,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -296,15 +347,15 @@ test "sequence of values" {
|
|||||||
\\- 1
|
\\- 1
|
||||||
\\- 2
|
\\- 2
|
||||||
, &[_]Token.Id{
|
, &[_]Token.Id{
|
||||||
.SeqItemInd,
|
.seq_item_ind,
|
||||||
.Literal,
|
.literal,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.SeqItemInd,
|
.seq_item_ind,
|
||||||
.Literal,
|
.literal,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.SeqItemInd,
|
.seq_item_ind,
|
||||||
.Literal,
|
.literal,
|
||||||
.Eof,
|
.eof,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -313,24 +364,24 @@ test "sequence of sequences" {
|
|||||||
\\- [ val1, val2]
|
\\- [ val1, val2]
|
||||||
\\- [val3, val4 ]
|
\\- [val3, val4 ]
|
||||||
, &[_]Token.Id{
|
, &[_]Token.Id{
|
||||||
.SeqItemInd,
|
.seq_item_ind,
|
||||||
.FlowSeqStart,
|
.flow_seq_start,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.Comma,
|
.comma,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.FlowSeqEnd,
|
.flow_seq_end,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.SeqItemInd,
|
.seq_item_ind,
|
||||||
.FlowSeqStart,
|
.flow_seq_start,
|
||||||
.Literal,
|
.literal,
|
||||||
.Comma,
|
.comma,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.Space,
|
.space,
|
||||||
.FlowSeqEnd,
|
.flow_seq_end,
|
||||||
.Eof,
|
.eof,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -339,16 +390,16 @@ test "mappings" {
|
|||||||
\\key1: value1
|
\\key1: value1
|
||||||
\\key2: value2
|
\\key2: value2
|
||||||
, &[_]Token.Id{
|
, &[_]Token.Id{
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.Eof,
|
.eof,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -357,21 +408,21 @@ test "inline mapped sequence of values" {
|
|||||||
\\key : [ val1,
|
\\key : [ val1,
|
||||||
\\ val2 ]
|
\\ val2 ]
|
||||||
, &[_]Token.Id{
|
, &[_]Token.Id{
|
||||||
.Literal,
|
.literal,
|
||||||
.Space,
|
.space,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.FlowSeqStart,
|
.flow_seq_start,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.Comma,
|
.comma,
|
||||||
.Space,
|
.space,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.Space,
|
.space,
|
||||||
.FlowSeqEnd,
|
.flow_seq_end,
|
||||||
.Eof,
|
.eof,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -388,52 +439,50 @@ test "part of tbd" {
|
|||||||
\\install-name: '/usr/lib/libSystem.B.dylib'
|
\\install-name: '/usr/lib/libSystem.B.dylib'
|
||||||
\\...
|
\\...
|
||||||
, &[_]Token.Id{
|
, &[_]Token.Id{
|
||||||
.DocStart,
|
.doc_start,
|
||||||
.Space,
|
.space,
|
||||||
.Tag,
|
.tag,
|
||||||
.Literal,
|
.literal,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.FlowSeqStart,
|
.flow_seq_start,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.Space,
|
.space,
|
||||||
.FlowSeqEnd,
|
.flow_seq_end,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Space,
|
.space,
|
||||||
.SeqItemInd,
|
.seq_item_ind,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.SingleQuote,
|
.single_quoted,
|
||||||
.Literal,
|
.new_line,
|
||||||
.SingleQuote,
|
.doc_end,
|
||||||
.NewLine,
|
.eof,
|
||||||
.DocEnd,
|
|
||||||
.Eof,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -443,18 +492,84 @@ test "Unindented list" {
|
|||||||
\\- foo: 1
|
\\- foo: 1
|
||||||
\\c: 1
|
\\c: 1
|
||||||
, &[_]Token.Id{
|
, &[_]Token.Id{
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.SeqItemInd,
|
.seq_item_ind,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
.NewLine,
|
.new_line,
|
||||||
.Literal,
|
.literal,
|
||||||
.MapValueInd,
|
.map_value_ind,
|
||||||
.Space,
|
.space,
|
||||||
.Literal,
|
.literal,
|
||||||
|
.eof,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "escape sequences" {
|
||||||
|
try testExpected(
|
||||||
|
\\a: 'here''s an apostrophe'
|
||||||
|
\\b: "a newline\nand a\ttab"
|
||||||
|
\\c: "\"here\" and there"
|
||||||
|
, &[_]Token.Id{
|
||||||
|
.literal,
|
||||||
|
.map_value_ind,
|
||||||
|
.space,
|
||||||
|
.single_quoted,
|
||||||
|
.new_line,
|
||||||
|
.literal,
|
||||||
|
.map_value_ind,
|
||||||
|
.space,
|
||||||
|
.double_quoted,
|
||||||
|
.new_line,
|
||||||
|
.literal,
|
||||||
|
.map_value_ind,
|
||||||
|
.space,
|
||||||
|
.double_quoted,
|
||||||
|
.eof,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "comments" {
|
||||||
|
try testExpected(
|
||||||
|
\\key: # some comment about the key
|
||||||
|
\\# first value
|
||||||
|
\\- val1
|
||||||
|
\\# second value
|
||||||
|
\\- val2
|
||||||
|
, &[_]Token.Id{
|
||||||
|
.literal,
|
||||||
|
.map_value_ind,
|
||||||
|
.space,
|
||||||
|
.comment,
|
||||||
|
.new_line,
|
||||||
|
.comment,
|
||||||
|
.new_line,
|
||||||
|
.seq_item_ind,
|
||||||
|
.literal,
|
||||||
|
.new_line,
|
||||||
|
.comment,
|
||||||
|
.new_line,
|
||||||
|
.seq_item_ind,
|
||||||
|
.literal,
|
||||||
|
.eof,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "quoted literals" {
|
||||||
|
try testExpected(
|
||||||
|
\\'#000000'
|
||||||
|
\\'[000000'
|
||||||
|
\\"&someString"
|
||||||
|
, &[_]Token.Id{
|
||||||
|
.single_quoted,
|
||||||
|
.new_line,
|
||||||
|
.single_quoted,
|
||||||
|
.new_line,
|
||||||
|
.double_quoted,
|
||||||
|
.eof,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,8 +1,7 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
const log = std.log.scoped(.tapi);
|
const log = std.log.scoped(.yaml);
|
||||||
const mem = std.mem;
|
const mem = std.mem;
|
||||||
const testing = std.testing;
|
|
||||||
|
|
||||||
const Allocator = mem.Allocator;
|
const Allocator = mem.Allocator;
|
||||||
const Tokenizer = @import("Tokenizer.zig");
|
const Tokenizer = @import("Tokenizer.zig");
|
||||||
@ -11,9 +10,9 @@ const TokenIndex = Tokenizer.TokenIndex;
|
|||||||
const TokenIterator = Tokenizer.TokenIterator;
|
const TokenIterator = Tokenizer.TokenIterator;
|
||||||
|
|
||||||
pub const ParseError = error{
|
pub const ParseError = error{
|
||||||
|
InvalidEscapeSequence,
|
||||||
MalformedYaml,
|
MalformedYaml,
|
||||||
NestedDocuments,
|
NestedDocuments,
|
||||||
UnexpectedTag,
|
|
||||||
UnexpectedEof,
|
UnexpectedEof,
|
||||||
UnexpectedToken,
|
UnexpectedToken,
|
||||||
Unhandled,
|
Unhandled,
|
||||||
@ -22,6 +21,8 @@ pub const ParseError = error{
|
|||||||
pub const Node = struct {
|
pub const Node = struct {
|
||||||
tag: Tag,
|
tag: Tag,
|
||||||
tree: *const Tree,
|
tree: *const Tree,
|
||||||
|
start: TokenIndex,
|
||||||
|
end: TokenIndex,
|
||||||
|
|
||||||
pub const Tag = enum {
|
pub const Tag = enum {
|
||||||
doc,
|
doc,
|
||||||
@ -61,9 +62,12 @@ pub const Node = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub const Doc = struct {
|
pub const Doc = struct {
|
||||||
base: Node = Node{ .tag = Tag.doc, .tree = undefined },
|
base: Node = Node{
|
||||||
start: ?TokenIndex = null,
|
.tag = Tag.doc,
|
||||||
end: ?TokenIndex = null,
|
.tree = undefined,
|
||||||
|
.start = undefined,
|
||||||
|
.end = undefined,
|
||||||
|
},
|
||||||
directive: ?TokenIndex = null,
|
directive: ?TokenIndex = null,
|
||||||
value: ?*Node = null,
|
value: ?*Node = null,
|
||||||
|
|
||||||
@ -86,10 +90,8 @@ pub const Node = struct {
|
|||||||
_ = fmt;
|
_ = fmt;
|
||||||
if (self.directive) |id| {
|
if (self.directive) |id| {
|
||||||
try std.fmt.format(writer, "{{ ", .{});
|
try std.fmt.format(writer, "{{ ", .{});
|
||||||
const directive = self.base.tree.tokens[id];
|
const directive = self.base.tree.getRaw(id, id);
|
||||||
try std.fmt.format(writer, ".directive = {s}, ", .{
|
try std.fmt.format(writer, ".directive = {s}, ", .{directive});
|
||||||
self.base.tree.source[directive.start..directive.end],
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
if (self.value) |node| {
|
if (self.value) |node| {
|
||||||
try std.fmt.format(writer, "{}", .{node});
|
try std.fmt.format(writer, "{}", .{node});
|
||||||
@ -101,22 +103,27 @@ pub const Node = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const Map = struct {
|
pub const Map = struct {
|
||||||
base: Node = Node{ .tag = Tag.map, .tree = undefined },
|
base: Node = Node{
|
||||||
start: ?TokenIndex = null,
|
.tag = Tag.map,
|
||||||
end: ?TokenIndex = null,
|
.tree = undefined,
|
||||||
|
.start = undefined,
|
||||||
|
.end = undefined,
|
||||||
|
},
|
||||||
values: std.ArrayListUnmanaged(Entry) = .{},
|
values: std.ArrayListUnmanaged(Entry) = .{},
|
||||||
|
|
||||||
pub const base_tag: Node.Tag = .map;
|
pub const base_tag: Node.Tag = .map;
|
||||||
|
|
||||||
pub const Entry = struct {
|
pub const Entry = struct {
|
||||||
key: TokenIndex,
|
key: TokenIndex,
|
||||||
value: *Node,
|
value: ?*Node,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn deinit(self: *Map, allocator: Allocator) void {
|
pub fn deinit(self: *Map, allocator: Allocator) void {
|
||||||
for (self.values.items) |entry| {
|
for (self.values.items) |entry| {
|
||||||
entry.value.deinit(allocator);
|
if (entry.value) |value| {
|
||||||
allocator.destroy(entry.value);
|
value.deinit(allocator);
|
||||||
|
allocator.destroy(value);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
self.values.deinit(allocator);
|
self.values.deinit(allocator);
|
||||||
}
|
}
|
||||||
@ -131,20 +138,24 @@ pub const Node = struct {
|
|||||||
_ = fmt;
|
_ = fmt;
|
||||||
try std.fmt.format(writer, "{{ ", .{});
|
try std.fmt.format(writer, "{{ ", .{});
|
||||||
for (self.values.items) |entry| {
|
for (self.values.items) |entry| {
|
||||||
const key = self.base.tree.tokens[entry.key];
|
const key = self.base.tree.getRaw(entry.key, entry.key);
|
||||||
try std.fmt.format(writer, "{s} => {}, ", .{
|
if (entry.value) |value| {
|
||||||
self.base.tree.source[key.start..key.end],
|
try std.fmt.format(writer, "{s} => {}, ", .{ key, value });
|
||||||
entry.value,
|
} else {
|
||||||
});
|
try std.fmt.format(writer, "{s} => null, ", .{key});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return std.fmt.format(writer, " }}", .{});
|
return std.fmt.format(writer, " }}", .{});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const List = struct {
|
pub const List = struct {
|
||||||
base: Node = Node{ .tag = Tag.list, .tree = undefined },
|
base: Node = Node{
|
||||||
start: ?TokenIndex = null,
|
.tag = Tag.list,
|
||||||
end: ?TokenIndex = null,
|
.tree = undefined,
|
||||||
|
.start = undefined,
|
||||||
|
.end = undefined,
|
||||||
|
},
|
||||||
values: std.ArrayListUnmanaged(*Node) = .{},
|
values: std.ArrayListUnmanaged(*Node) = .{},
|
||||||
|
|
||||||
pub const base_tag: Node.Tag = .list;
|
pub const base_tag: Node.Tag = .list;
|
||||||
@ -174,15 +185,18 @@ pub const Node = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub const Value = struct {
|
pub const Value = struct {
|
||||||
base: Node = Node{ .tag = Tag.value, .tree = undefined },
|
base: Node = Node{
|
||||||
start: ?TokenIndex = null,
|
.tag = Tag.value,
|
||||||
end: ?TokenIndex = null,
|
.tree = undefined,
|
||||||
|
.start = undefined,
|
||||||
|
.end = undefined,
|
||||||
|
},
|
||||||
|
string_value: std.ArrayListUnmanaged(u8) = .{},
|
||||||
|
|
||||||
pub const base_tag: Node.Tag = .value;
|
pub const base_tag: Node.Tag = .value;
|
||||||
|
|
||||||
pub fn deinit(self: *Value, allocator: Allocator) void {
|
pub fn deinit(self: *Value, allocator: Allocator) void {
|
||||||
_ = self;
|
self.string_value.deinit(allocator);
|
||||||
_ = allocator;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn format(
|
pub fn format(
|
||||||
@ -193,11 +207,8 @@ pub const Node = struct {
|
|||||||
) !void {
|
) !void {
|
||||||
_ = options;
|
_ = options;
|
||||||
_ = fmt;
|
_ = fmt;
|
||||||
const start = self.base.tree.tokens[self.start.?];
|
const raw = self.base.tree.getRaw(self.base.start, self.base.end);
|
||||||
const end = self.base.tree.tokens[self.end.?];
|
return std.fmt.format(writer, "{s}", .{raw});
|
||||||
return std.fmt.format(writer, "{s}", .{
|
|
||||||
self.base.tree.source[start.start..end.end],
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -233,6 +244,21 @@ pub const Tree = struct {
|
|||||||
self.docs.deinit(self.allocator);
|
self.docs.deinit(self.allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn getDirective(self: Tree, doc_index: usize) ?[]const u8 {
|
||||||
|
assert(doc_index < self.docs.items.len);
|
||||||
|
const doc = self.docs.items[doc_index].cast(Node.Doc) orelse return null;
|
||||||
|
const id = doc.directive orelse return null;
|
||||||
|
return self.getRaw(id, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn getRaw(self: Tree, start: TokenIndex, end: TokenIndex) []const u8 {
|
||||||
|
assert(start <= end);
|
||||||
|
assert(start < self.tokens.len and end < self.tokens.len);
|
||||||
|
const start_token = self.tokens[start];
|
||||||
|
const end_token = self.tokens[end];
|
||||||
|
return self.source[start_token.start..end_token.end];
|
||||||
|
}
|
||||||
|
|
||||||
pub fn parse(self: *Tree, source: []const u8) !void {
|
pub fn parse(self: *Tree, source: []const u8) !void {
|
||||||
var tokenizer = Tokenizer{ .buffer = source };
|
var tokenizer = Tokenizer{ .buffer = source };
|
||||||
var tokens = std.ArrayList(Token).init(self.allocator);
|
var tokens = std.ArrayList(Token).init(self.allocator);
|
||||||
@ -252,8 +278,8 @@ pub const Tree = struct {
|
|||||||
});
|
});
|
||||||
|
|
||||||
switch (token.id) {
|
switch (token.id) {
|
||||||
.Eof => break,
|
.eof => break,
|
||||||
.NewLine => {
|
.new_line => {
|
||||||
line += 1;
|
line += 1;
|
||||||
prev_line_last_col = token.end;
|
prev_line_last_col = token.end;
|
||||||
},
|
},
|
||||||
@ -272,20 +298,20 @@ pub const Tree = struct {
|
|||||||
.line_cols = &self.line_cols,
|
.line_cols = &self.line_cols,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
parser.eatCommentsAndSpace(&.{});
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (parser.token_it.peek() == null) return;
|
parser.eatCommentsAndSpace(&.{});
|
||||||
|
const token = parser.token_it.next() orelse break;
|
||||||
|
|
||||||
const pos = parser.token_it.pos;
|
log.debug("(main) next {s}@{d}", .{ @tagName(token.id), parser.token_it.pos - 1 });
|
||||||
const token = parser.token_it.next();
|
|
||||||
|
|
||||||
log.debug("Next token: {}, {}", .{ pos, token });
|
|
||||||
|
|
||||||
switch (token.id) {
|
switch (token.id) {
|
||||||
.Space, .Comment, .NewLine => {},
|
.eof => break,
|
||||||
.Eof => break,
|
|
||||||
else => {
|
else => {
|
||||||
const doc = try parser.doc(pos);
|
parser.token_it.seekBy(-1);
|
||||||
try self.docs.append(self.allocator, &doc.base);
|
const doc = try parser.doc();
|
||||||
|
try self.docs.append(self.allocator, doc);
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -298,355 +324,308 @@ const Parser = struct {
|
|||||||
token_it: *TokenIterator,
|
token_it: *TokenIterator,
|
||||||
line_cols: *const std.AutoHashMap(TokenIndex, LineCol),
|
line_cols: *const std.AutoHashMap(TokenIndex, LineCol),
|
||||||
|
|
||||||
fn doc(self: *Parser, start: TokenIndex) ParseError!*Node.Doc {
|
fn value(self: *Parser) ParseError!?*Node {
|
||||||
|
self.eatCommentsAndSpace(&.{});
|
||||||
|
|
||||||
|
const pos = self.token_it.pos;
|
||||||
|
const token = self.token_it.next() orelse return error.UnexpectedEof;
|
||||||
|
|
||||||
|
log.debug(" next {s}@{d}", .{ @tagName(token.id), pos });
|
||||||
|
|
||||||
|
switch (token.id) {
|
||||||
|
.literal => if (self.eatToken(.map_value_ind, &.{ .new_line, .comment })) |_| {
|
||||||
|
// map
|
||||||
|
self.token_it.seekTo(pos);
|
||||||
|
return self.map();
|
||||||
|
} else {
|
||||||
|
// leaf value
|
||||||
|
self.token_it.seekTo(pos);
|
||||||
|
return self.leaf_value();
|
||||||
|
},
|
||||||
|
.single_quoted, .double_quoted => {
|
||||||
|
// leaf value
|
||||||
|
self.token_it.seekBy(-1);
|
||||||
|
return self.leaf_value();
|
||||||
|
},
|
||||||
|
.seq_item_ind => {
|
||||||
|
// list
|
||||||
|
self.token_it.seekBy(-1);
|
||||||
|
return self.list();
|
||||||
|
},
|
||||||
|
.flow_seq_start => {
|
||||||
|
// list
|
||||||
|
self.token_it.seekBy(-1);
|
||||||
|
return self.list_bracketed();
|
||||||
|
},
|
||||||
|
else => return null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn doc(self: *Parser) ParseError!*Node {
|
||||||
const node = try self.allocator.create(Node.Doc);
|
const node = try self.allocator.create(Node.Doc);
|
||||||
errdefer self.allocator.destroy(node);
|
errdefer self.allocator.destroy(node);
|
||||||
node.* = .{ .start = start };
|
node.* = .{};
|
||||||
node.base.tree = self.tree;
|
node.base.tree = self.tree;
|
||||||
|
node.base.start = self.token_it.pos;
|
||||||
|
|
||||||
self.token_it.seekTo(start);
|
log.debug("(doc) begin {s}@{d}", .{ @tagName(self.tree.tokens[node.base.start].id), node.base.start });
|
||||||
|
|
||||||
log.debug("Doc start: {}, {}", .{ start, self.tree.tokens[start] });
|
// Parse header
|
||||||
|
const explicit_doc: bool = if (self.eatToken(.doc_start, &.{})) |doc_pos| explicit_doc: {
|
||||||
const explicit_doc: bool = if (self.eatToken(.DocStart)) |_| explicit_doc: {
|
if (self.getCol(doc_pos) > 0) return error.MalformedYaml;
|
||||||
if (self.eatToken(.Tag)) |_| {
|
if (self.eatToken(.tag, &.{ .new_line, .comment })) |_| {
|
||||||
node.directive = try self.expectToken(.Literal);
|
node.directive = try self.expectToken(.literal, &.{ .new_line, .comment });
|
||||||
}
|
}
|
||||||
_ = try self.expectToken(.NewLine);
|
|
||||||
break :explicit_doc true;
|
break :explicit_doc true;
|
||||||
} else false;
|
} else false;
|
||||||
|
|
||||||
while (true) {
|
// Parse value
|
||||||
const pos = self.token_it.pos;
|
node.value = try self.value();
|
||||||
const token = self.token_it.next();
|
if (node.value == null) {
|
||||||
|
self.token_it.seekBy(-1);
|
||||||
|
}
|
||||||
|
errdefer if (node.value) |val| {
|
||||||
|
val.deinit(self.allocator);
|
||||||
|
self.allocator.destroy(val);
|
||||||
|
};
|
||||||
|
|
||||||
log.debug("Next token: {}, {}", .{ pos, token });
|
// Parse footer
|
||||||
|
footer: {
|
||||||
switch (token.id) {
|
if (self.eatToken(.doc_end, &.{})) |pos| {
|
||||||
.Tag => {
|
if (!explicit_doc) return error.UnexpectedToken;
|
||||||
return error.UnexpectedTag;
|
if (self.getCol(pos) > 0) return error.MalformedYaml;
|
||||||
},
|
node.base.end = pos;
|
||||||
.Literal, .SingleQuote, .DoubleQuote => {
|
break :footer;
|
||||||
_ = try self.expectToken(.MapValueInd);
|
|
||||||
const map_node = try self.map(pos);
|
|
||||||
node.value = &map_node.base;
|
|
||||||
},
|
|
||||||
.SeqItemInd => {
|
|
||||||
const list_node = try self.list(pos);
|
|
||||||
node.value = &list_node.base;
|
|
||||||
},
|
|
||||||
.FlowSeqStart => {
|
|
||||||
const list_node = try self.list_bracketed(pos);
|
|
||||||
node.value = &list_node.base;
|
|
||||||
},
|
|
||||||
.DocEnd => {
|
|
||||||
if (explicit_doc) break;
|
|
||||||
return error.UnexpectedToken;
|
|
||||||
},
|
|
||||||
.DocStart, .Eof => {
|
|
||||||
self.token_it.seekBy(-1);
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
return error.UnexpectedToken;
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
if (self.eatToken(.doc_start, &.{})) |pos| {
|
||||||
|
if (!explicit_doc) return error.UnexpectedToken;
|
||||||
|
if (self.getCol(pos) > 0) return error.MalformedYaml;
|
||||||
|
self.token_it.seekBy(-1);
|
||||||
|
node.base.end = pos - 1;
|
||||||
|
break :footer;
|
||||||
|
}
|
||||||
|
if (self.eatToken(.eof, &.{})) |pos| {
|
||||||
|
node.base.end = pos - 1;
|
||||||
|
break :footer;
|
||||||
|
}
|
||||||
|
return error.UnexpectedToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
node.end = self.token_it.pos - 1;
|
log.debug("(doc) end {s}@{d}", .{ @tagName(self.tree.tokens[node.base.end].id), node.base.end });
|
||||||
|
|
||||||
log.debug("Doc end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
|
return &node.base;
|
||||||
|
|
||||||
return node;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn map(self: *Parser, start: TokenIndex) ParseError!*Node.Map {
|
fn map(self: *Parser) ParseError!*Node {
|
||||||
const node = try self.allocator.create(Node.Map);
|
const node = try self.allocator.create(Node.Map);
|
||||||
errdefer self.allocator.destroy(node);
|
errdefer self.allocator.destroy(node);
|
||||||
node.* = .{ .start = start };
|
node.* = .{};
|
||||||
node.base.tree = self.tree;
|
node.base.tree = self.tree;
|
||||||
|
node.base.start = self.token_it.pos;
|
||||||
|
errdefer {
|
||||||
|
for (node.values.items) |entry| {
|
||||||
|
if (entry.value) |val| {
|
||||||
|
val.deinit(self.allocator);
|
||||||
|
self.allocator.destroy(val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
node.values.deinit(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
self.token_it.seekTo(start);
|
log.debug("(map) begin {s}@{d}", .{ @tagName(self.tree.tokens[node.base.start].id), node.base.start });
|
||||||
|
|
||||||
log.debug("Map start: {}, {}", .{ start, self.tree.tokens[start] });
|
const col = self.getCol(node.base.start);
|
||||||
|
|
||||||
const col = self.getCol(start);
|
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
self.eatCommentsAndSpace();
|
self.eatCommentsAndSpace(&.{});
|
||||||
|
|
||||||
// Parse key.
|
// Parse key
|
||||||
const key_pos = self.token_it.pos;
|
const key_pos = self.token_it.pos;
|
||||||
if (self.getCol(key_pos) != col) {
|
if (self.getCol(key_pos) < col) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const key = self.token_it.next();
|
const key = self.token_it.next() orelse return error.UnexpectedEof;
|
||||||
switch (key.id) {
|
switch (key.id) {
|
||||||
.Literal => {},
|
.literal => {},
|
||||||
else => {
|
.doc_start, .doc_end, .eof => {
|
||||||
self.token_it.seekBy(-1);
|
self.token_it.seekBy(-1);
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
|
else => {
|
||||||
|
// TODO key not being a literal
|
||||||
|
return error.Unhandled;
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
log.debug("Map key: {}, '{s}'", .{ key, self.tree.source[key.start..key.end] });
|
log.debug("(map) key {s}@{d}", .{ self.tree.getRaw(key_pos, key_pos), key_pos });
|
||||||
|
|
||||||
// Separator
|
// Separator
|
||||||
_ = try self.expectToken(.MapValueInd);
|
_ = try self.expectToken(.map_value_ind, &.{ .new_line, .comment });
|
||||||
|
|
||||||
// Parse value.
|
// Parse value
|
||||||
const value: *Node = value: {
|
const val = try self.value();
|
||||||
if (self.eatToken(.NewLine)) |_| {
|
errdefer if (val) |v| {
|
||||||
self.eatCommentsAndSpace();
|
v.deinit(self.allocator);
|
||||||
|
self.allocator.destroy(v);
|
||||||
|
};
|
||||||
|
|
||||||
// Explicit, complex value such as list or map.
|
if (val) |v| {
|
||||||
const value_pos = self.token_it.pos;
|
if (self.getCol(v.start) < self.getCol(key_pos)) {
|
||||||
const value = self.token_it.next();
|
return error.MalformedYaml;
|
||||||
switch (value.id) {
|
}
|
||||||
.Literal, .SingleQuote, .DoubleQuote => {
|
if (v.cast(Node.Value)) |_| {
|
||||||
// Assume nested map.
|
if (self.getCol(v.start) == self.getCol(key_pos)) {
|
||||||
const map_node = try self.map(value_pos);
|
return error.MalformedYaml;
|
||||||
break :value &map_node.base;
|
|
||||||
},
|
|
||||||
.SeqItemInd => {
|
|
||||||
// Assume list of values.
|
|
||||||
const list_node = try self.list(value_pos);
|
|
||||||
break :value &list_node.base;
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
log.err("{}", .{key});
|
|
||||||
return error.Unhandled;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.eatCommentsAndSpace();
|
|
||||||
|
|
||||||
const value_pos = self.token_it.pos;
|
|
||||||
const value = self.token_it.next();
|
|
||||||
switch (value.id) {
|
|
||||||
.Literal, .SingleQuote, .DoubleQuote => {
|
|
||||||
// Assume leaf value.
|
|
||||||
const leaf_node = try self.leaf_value(value_pos);
|
|
||||||
break :value &leaf_node.base;
|
|
||||||
},
|
|
||||||
.FlowSeqStart => {
|
|
||||||
const list_node = try self.list_bracketed(value_pos);
|
|
||||||
break :value &list_node.base;
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
log.err("{}", .{key});
|
|
||||||
return error.Unhandled;
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
log.debug("Map value: {}", .{value});
|
|
||||||
|
|
||||||
try node.values.append(self.allocator, .{
|
try node.values.append(self.allocator, .{
|
||||||
.key = key_pos,
|
.key = key_pos,
|
||||||
.value = value,
|
.value = val,
|
||||||
});
|
});
|
||||||
|
|
||||||
_ = self.eatToken(.NewLine);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
node.end = self.token_it.pos - 1;
|
node.base.end = self.token_it.pos - 1;
|
||||||
|
|
||||||
log.debug("Map end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
|
log.debug("(map) end {s}@{d}", .{ @tagName(self.tree.tokens[node.base.end].id), node.base.end });
|
||||||
|
|
||||||
return node;
|
return &node.base;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list(self: *Parser, start: TokenIndex) ParseError!*Node.List {
|
fn list(self: *Parser) ParseError!*Node {
|
||||||
const node = try self.allocator.create(Node.List);
|
const node = try self.allocator.create(Node.List);
|
||||||
errdefer self.allocator.destroy(node);
|
errdefer self.allocator.destroy(node);
|
||||||
node.* = .{
|
node.* = .{};
|
||||||
.start = start,
|
|
||||||
};
|
|
||||||
node.base.tree = self.tree;
|
node.base.tree = self.tree;
|
||||||
|
node.base.start = self.token_it.pos;
|
||||||
|
errdefer {
|
||||||
|
for (node.values.items) |val| {
|
||||||
|
val.deinit(self.allocator);
|
||||||
|
self.allocator.destroy(val);
|
||||||
|
}
|
||||||
|
node.values.deinit(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
self.token_it.seekTo(start);
|
log.debug("(list) begin {s}@{d}", .{ @tagName(self.tree.tokens[node.base.start].id), node.base.start });
|
||||||
|
|
||||||
log.debug("List start: {}, {}", .{ start, self.tree.tokens[start] });
|
|
||||||
|
|
||||||
const col = self.getCol(start);
|
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
self.eatCommentsAndSpace();
|
self.eatCommentsAndSpace(&.{});
|
||||||
|
|
||||||
if (self.getCol(self.token_it.pos) != col) {
|
_ = self.eatToken(.seq_item_ind, &.{}) orelse break;
|
||||||
|
|
||||||
|
const val = (try self.value()) orelse return error.MalformedYaml;
|
||||||
|
try node.values.append(self.allocator, val);
|
||||||
|
}
|
||||||
|
|
||||||
|
node.base.end = self.token_it.pos - 1;
|
||||||
|
|
||||||
|
log.debug("(list) end {s}@{d}", .{ @tagName(self.tree.tokens[node.base.end].id), node.base.end });
|
||||||
|
|
||||||
|
return &node.base;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list_bracketed(self: *Parser) ParseError!*Node {
|
||||||
|
const node = try self.allocator.create(Node.List);
|
||||||
|
errdefer self.allocator.destroy(node);
|
||||||
|
node.* = .{};
|
||||||
|
node.base.tree = self.tree;
|
||||||
|
node.base.start = self.token_it.pos;
|
||||||
|
errdefer {
|
||||||
|
for (node.values.items) |val| {
|
||||||
|
val.deinit(self.allocator);
|
||||||
|
self.allocator.destroy(val);
|
||||||
|
}
|
||||||
|
node.values.deinit(self.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("(list) begin {s}@{d}", .{ @tagName(self.tree.tokens[node.base.start].id), node.base.start });
|
||||||
|
|
||||||
|
_ = try self.expectToken(.flow_seq_start, &.{});
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
self.eatCommentsAndSpace(&.{.comment});
|
||||||
|
|
||||||
|
if (self.eatToken(.flow_seq_end, &.{.comment})) |pos| {
|
||||||
|
node.base.end = pos;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
_ = self.eatToken(.SeqItemInd) orelse {
|
_ = self.eatToken(.comma, &.{.comment});
|
||||||
break;
|
|
||||||
};
|
|
||||||
|
|
||||||
const pos = self.token_it.pos;
|
const val = (try self.value()) orelse return error.MalformedYaml;
|
||||||
const token = self.token_it.next();
|
try node.values.append(self.allocator, val);
|
||||||
const value: *Node = value: {
|
|
||||||
switch (token.id) {
|
|
||||||
.Literal, .SingleQuote, .DoubleQuote => {
|
|
||||||
if (self.eatToken(.MapValueInd)) |_| {
|
|
||||||
// nested map
|
|
||||||
const map_node = try self.map(pos);
|
|
||||||
break :value &map_node.base;
|
|
||||||
} else {
|
|
||||||
// standalone (leaf) value
|
|
||||||
const leaf_node = try self.leaf_value(pos);
|
|
||||||
break :value &leaf_node.base;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.FlowSeqStart => {
|
|
||||||
const list_node = try self.list_bracketed(pos);
|
|
||||||
break :value &list_node.base;
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
log.err("{}", .{token});
|
|
||||||
return error.Unhandled;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
};
|
|
||||||
try node.values.append(self.allocator, value);
|
|
||||||
|
|
||||||
_ = self.eatToken(.NewLine);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
node.end = self.token_it.pos - 1;
|
log.debug("(list) end {s}@{d}", .{ @tagName(self.tree.tokens[node.base.end].id), node.base.end });
|
||||||
|
|
||||||
log.debug("List end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
|
return &node.base;
|
||||||
|
|
||||||
return node;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list_bracketed(self: *Parser, start: TokenIndex) ParseError!*Node.List {
|
fn leaf_value(self: *Parser) ParseError!*Node {
|
||||||
const node = try self.allocator.create(Node.List);
|
|
||||||
errdefer self.allocator.destroy(node);
|
|
||||||
node.* = .{ .start = start };
|
|
||||||
node.base.tree = self.tree;
|
|
||||||
|
|
||||||
self.token_it.seekTo(start);
|
|
||||||
|
|
||||||
log.debug("List start: {}, {}", .{ start, self.tree.tokens[start] });
|
|
||||||
|
|
||||||
_ = try self.expectToken(.FlowSeqStart);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
_ = self.eatToken(.NewLine);
|
|
||||||
self.eatCommentsAndSpace();
|
|
||||||
|
|
||||||
const pos = self.token_it.pos;
|
|
||||||
const token = self.token_it.next();
|
|
||||||
|
|
||||||
log.debug("Next token: {}, {}", .{ pos, token });
|
|
||||||
|
|
||||||
const value: *Node = value: {
|
|
||||||
switch (token.id) {
|
|
||||||
.FlowSeqStart => {
|
|
||||||
const list_node = try self.list_bracketed(pos);
|
|
||||||
break :value &list_node.base;
|
|
||||||
},
|
|
||||||
.FlowSeqEnd => {
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
.Literal, .SingleQuote, .DoubleQuote => {
|
|
||||||
const leaf_node = try self.leaf_value(pos);
|
|
||||||
_ = self.eatToken(.Comma);
|
|
||||||
// TODO newline
|
|
||||||
break :value &leaf_node.base;
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
log.err("{}", .{token});
|
|
||||||
return error.Unhandled;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
};
|
|
||||||
try node.values.append(self.allocator, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
node.end = self.token_it.pos - 1;
|
|
||||||
|
|
||||||
log.debug("List end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
|
|
||||||
|
|
||||||
return node;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn leaf_value(self: *Parser, start: TokenIndex) ParseError!*Node.Value {
|
|
||||||
const node = try self.allocator.create(Node.Value);
|
const node = try self.allocator.create(Node.Value);
|
||||||
errdefer self.allocator.destroy(node);
|
errdefer self.allocator.destroy(node);
|
||||||
node.* = .{ .start = start };
|
node.* = .{ .string_value = .{} };
|
||||||
node.base.tree = self.tree;
|
node.base.tree = self.tree;
|
||||||
|
node.base.start = self.token_it.pos;
|
||||||
|
errdefer node.string_value.deinit(self.allocator);
|
||||||
|
|
||||||
self.token_it.seekTo(start);
|
// TODO handle multiline strings in new block scope
|
||||||
|
while (self.token_it.next()) |tok| {
|
||||||
log.debug("Leaf start: {}, {}", .{ node.start.?, self.tree.tokens[node.start.?] });
|
switch (tok.id) {
|
||||||
|
.single_quoted => {
|
||||||
parse: {
|
node.base.end = self.token_it.pos - 1;
|
||||||
if (self.eatToken(.SingleQuote)) |_| {
|
const raw = self.tree.getRaw(node.base.start, node.base.end);
|
||||||
node.start = node.start.? + 1;
|
try self.parseSingleQuoted(node, raw);
|
||||||
while (true) {
|
break;
|
||||||
const tok = self.token_it.next();
|
},
|
||||||
switch (tok.id) {
|
.double_quoted => {
|
||||||
.SingleQuote => {
|
node.base.end = self.token_it.pos - 1;
|
||||||
node.end = self.token_it.pos - 2;
|
const raw = self.tree.getRaw(node.base.start, node.base.end);
|
||||||
break :parse;
|
try self.parseDoubleQuoted(node, raw);
|
||||||
},
|
break;
|
||||||
.NewLine => return error.UnexpectedToken,
|
},
|
||||||
else => {},
|
.literal => {},
|
||||||
}
|
.space => {
|
||||||
}
|
const trailing = self.token_it.pos - 2;
|
||||||
}
|
self.eatCommentsAndSpace(&.{});
|
||||||
|
if (self.token_it.peek()) |peek| {
|
||||||
if (self.eatToken(.DoubleQuote)) |_| {
|
if (peek.id != .literal) {
|
||||||
node.start = node.start.? + 1;
|
node.base.end = trailing;
|
||||||
while (true) {
|
const raw = self.tree.getRaw(node.base.start, node.base.end);
|
||||||
const tok = self.token_it.next();
|
try node.string_value.appendSlice(self.allocator, raw);
|
||||||
switch (tok.id) {
|
break;
|
||||||
.DoubleQuote => {
|
|
||||||
node.end = self.token_it.pos - 2;
|
|
||||||
break :parse;
|
|
||||||
},
|
|
||||||
.NewLine => return error.UnexpectedToken,
|
|
||||||
else => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO handle multiline strings in new block scope
|
|
||||||
while (true) {
|
|
||||||
const tok = self.token_it.next();
|
|
||||||
switch (tok.id) {
|
|
||||||
.Literal => {},
|
|
||||||
.Space => {
|
|
||||||
const trailing = self.token_it.pos - 2;
|
|
||||||
self.eatCommentsAndSpace();
|
|
||||||
if (self.token_it.peek()) |peek| {
|
|
||||||
if (peek.id != .Literal) {
|
|
||||||
node.end = trailing;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
else => {
|
},
|
||||||
self.token_it.seekBy(-1);
|
else => {
|
||||||
node.end = self.token_it.pos - 1;
|
self.token_it.seekBy(-1);
|
||||||
break;
|
node.base.end = self.token_it.pos - 1;
|
||||||
},
|
const raw = self.tree.getRaw(node.base.start, node.base.end);
|
||||||
}
|
try node.string_value.appendSlice(self.allocator, raw);
|
||||||
|
break;
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log.debug("Leaf end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] });
|
log.debug("(leaf) {s}", .{self.tree.getRaw(node.base.start, node.base.end)});
|
||||||
|
|
||||||
return node;
|
return &node.base;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eatCommentsAndSpace(self: *Parser) void {
|
fn eatCommentsAndSpace(self: *Parser, comptime exclusions: []const Token.Id) void {
|
||||||
while (true) {
|
log.debug("eatCommentsAndSpace", .{});
|
||||||
_ = self.token_it.peek() orelse return;
|
outer: while (self.token_it.next()) |token| {
|
||||||
const token = self.token_it.next();
|
log.debug(" (token '{s}')", .{@tagName(token.id)});
|
||||||
switch (token.id) {
|
switch (token.id) {
|
||||||
.Comment, .Space => {},
|
.comment, .space, .new_line => |space| {
|
||||||
|
inline for (exclusions) |excl| {
|
||||||
|
if (excl == space) {
|
||||||
|
self.token_it.seekBy(-1);
|
||||||
|
break :outer;
|
||||||
|
}
|
||||||
|
} else continue;
|
||||||
|
},
|
||||||
else => {
|
else => {
|
||||||
self.token_it.seekBy(-1);
|
self.token_it.seekBy(-1);
|
||||||
break;
|
break;
|
||||||
@ -655,25 +634,24 @@ const Parser = struct {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eatToken(self: *Parser, id: Token.Id) ?TokenIndex {
|
fn eatToken(self: *Parser, id: Token.Id, comptime exclusions: []const Token.Id) ?TokenIndex {
|
||||||
while (true) {
|
log.debug("eatToken('{s}')", .{@tagName(id)});
|
||||||
const pos = self.token_it.pos;
|
self.eatCommentsAndSpace(exclusions);
|
||||||
_ = self.token_it.peek() orelse return null;
|
const pos = self.token_it.pos;
|
||||||
const token = self.token_it.next();
|
const token = self.token_it.next() orelse return null;
|
||||||
switch (token.id) {
|
if (token.id == id) {
|
||||||
.Comment, .Space => continue,
|
log.debug(" (found at {d})", .{pos});
|
||||||
else => |next_id| if (next_id == id) {
|
return pos;
|
||||||
return pos;
|
} else {
|
||||||
} else {
|
log.debug(" (not found)", .{});
|
||||||
self.token_it.seekTo(pos);
|
self.token_it.seekBy(-1);
|
||||||
return null;
|
return null;
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expectToken(self: *Parser, id: Token.Id) ParseError!TokenIndex {
|
fn expectToken(self: *Parser, id: Token.Id, comptime exclusions: []const Token.Id) ParseError!TokenIndex {
|
||||||
return self.eatToken(id) orelse error.UnexpectedToken;
|
log.debug("expectToken('{s}')", .{@tagName(id)});
|
||||||
|
return self.eatToken(id, exclusions) orelse error.UnexpectedToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn getLine(self: *Parser, index: TokenIndex) usize {
|
fn getLine(self: *Parser, index: TokenIndex) usize {
|
||||||
@ -683,8 +661,85 @@ const Parser = struct {
|
|||||||
fn getCol(self: *Parser, index: TokenIndex) usize {
|
fn getCol(self: *Parser, index: TokenIndex) usize {
|
||||||
return self.line_cols.get(index).?.col;
|
return self.line_cols.get(index).?.col;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parseSingleQuoted(self: *Parser, node: *Node.Value, raw: []const u8) ParseError!void {
|
||||||
|
assert(raw[0] == '\'' and raw[raw.len - 1] == '\'');
|
||||||
|
|
||||||
|
const raw_no_quotes = raw[1 .. raw.len - 1];
|
||||||
|
try node.string_value.ensureTotalCapacity(self.allocator, raw_no_quotes.len);
|
||||||
|
|
||||||
|
var state: enum {
|
||||||
|
start,
|
||||||
|
escape,
|
||||||
|
} = .start;
|
||||||
|
var index: usize = 0;
|
||||||
|
|
||||||
|
while (index < raw_no_quotes.len) : (index += 1) {
|
||||||
|
const c = raw_no_quotes[index];
|
||||||
|
switch (state) {
|
||||||
|
.start => switch (c) {
|
||||||
|
'\'' => {
|
||||||
|
state = .escape;
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
node.string_value.appendAssumeCapacity(c);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
.escape => switch (c) {
|
||||||
|
'\'' => {
|
||||||
|
state = .start;
|
||||||
|
node.string_value.appendAssumeCapacity(c);
|
||||||
|
},
|
||||||
|
else => return error.InvalidEscapeSequence,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseDoubleQuoted(self: *Parser, node: *Node.Value, raw: []const u8) ParseError!void {
|
||||||
|
assert(raw[0] == '"' and raw[raw.len - 1] == '"');
|
||||||
|
|
||||||
|
const raw_no_quotes = raw[1 .. raw.len - 1];
|
||||||
|
try node.string_value.ensureTotalCapacity(self.allocator, raw_no_quotes.len);
|
||||||
|
|
||||||
|
var state: enum {
|
||||||
|
start,
|
||||||
|
escape,
|
||||||
|
} = .start;
|
||||||
|
|
||||||
|
var index: usize = 0;
|
||||||
|
while (index < raw_no_quotes.len) : (index += 1) {
|
||||||
|
const c = raw_no_quotes[index];
|
||||||
|
switch (state) {
|
||||||
|
.start => switch (c) {
|
||||||
|
'\\' => {
|
||||||
|
state = .escape;
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
node.string_value.appendAssumeCapacity(c);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
.escape => switch (c) {
|
||||||
|
'n' => {
|
||||||
|
state = .start;
|
||||||
|
node.string_value.appendAssumeCapacity('\n');
|
||||||
|
},
|
||||||
|
't' => {
|
||||||
|
state = .start;
|
||||||
|
node.string_value.appendAssumeCapacity('\t');
|
||||||
|
},
|
||||||
|
'"' => {
|
||||||
|
state = .start;
|
||||||
|
node.string_value.appendAssumeCapacity('"');
|
||||||
|
},
|
||||||
|
else => return error.InvalidEscapeSequence,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
test {
|
test {
|
||||||
|
std.testing.refAllDecls(@This());
|
||||||
_ = @import("parse/test.zig");
|
_ = @import("parse/test.zig");
|
||||||
}
|
}
|
||||||
|
|||||||
@ -21,45 +21,45 @@ test "explicit doc" {
|
|||||||
try testing.expectEqual(tree.docs.items.len, 1);
|
try testing.expectEqual(tree.docs.items.len, 1);
|
||||||
|
|
||||||
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
||||||
try testing.expectEqual(doc.start.?, 0);
|
try testing.expectEqual(doc.base.start, 0);
|
||||||
try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
|
||||||
|
|
||||||
const directive = tree.tokens[doc.directive.?];
|
const directive = tree.tokens[doc.directive.?];
|
||||||
try testing.expectEqual(directive.id, .Literal);
|
try testing.expectEqual(directive.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "tapi-tbd", tree.source[directive.start..directive.end]));
|
try testing.expectEqualStrings("tapi-tbd", tree.source[directive.start..directive.end]);
|
||||||
|
|
||||||
try testing.expect(doc.value != null);
|
try testing.expect(doc.value != null);
|
||||||
try testing.expectEqual(doc.value.?.tag, .map);
|
try testing.expectEqual(doc.value.?.tag, .map);
|
||||||
|
|
||||||
const map = doc.value.?.cast(Node.Map).?;
|
const map = doc.value.?.cast(Node.Map).?;
|
||||||
try testing.expectEqual(map.start.?, 5);
|
try testing.expectEqual(map.base.start, 5);
|
||||||
try testing.expectEqual(map.end.?, 14);
|
try testing.expectEqual(map.base.end, 14);
|
||||||
try testing.expectEqual(map.values.items.len, 2);
|
try testing.expectEqual(map.values.items.len, 2);
|
||||||
|
|
||||||
{
|
{
|
||||||
const entry = map.values.items[0];
|
const entry = map.values.items[0];
|
||||||
|
|
||||||
const key = tree.tokens[entry.key];
|
const key = tree.tokens[entry.key];
|
||||||
try testing.expectEqual(key.id, .Literal);
|
try testing.expectEqual(key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "tbd-version", tree.source[key.start..key.end]));
|
try testing.expectEqualStrings("tbd-version", tree.source[key.start..key.end]);
|
||||||
|
|
||||||
const value = entry.value.cast(Node.Value).?;
|
const value = entry.value.?.cast(Node.Value).?;
|
||||||
const value_tok = tree.tokens[value.start.?];
|
const value_tok = tree.tokens[value.base.start];
|
||||||
try testing.expectEqual(value_tok.id, .Literal);
|
try testing.expectEqual(value_tok.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "4", tree.source[value_tok.start..value_tok.end]));
|
try testing.expectEqualStrings("4", tree.source[value_tok.start..value_tok.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
const entry = map.values.items[1];
|
const entry = map.values.items[1];
|
||||||
|
|
||||||
const key = tree.tokens[entry.key];
|
const key = tree.tokens[entry.key];
|
||||||
try testing.expectEqual(key.id, .Literal);
|
try testing.expectEqual(key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "abc-version", tree.source[key.start..key.end]));
|
try testing.expectEqualStrings("abc-version", tree.source[key.start..key.end]);
|
||||||
|
|
||||||
const value = entry.value.cast(Node.Value).?;
|
const value = entry.value.?.cast(Node.Value).?;
|
||||||
const value_tok = tree.tokens[value.start.?];
|
const value_tok = tree.tokens[value.base.start];
|
||||||
try testing.expectEqual(value_tok.id, .Literal);
|
try testing.expectEqual(value_tok.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "5", tree.source[value_tok.start..value_tok.end]));
|
try testing.expectEqualStrings("5", tree.source[value_tok.start..value_tok.end]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,39 +77,31 @@ test "leaf in quotes" {
|
|||||||
try testing.expectEqual(tree.docs.items.len, 1);
|
try testing.expectEqual(tree.docs.items.len, 1);
|
||||||
|
|
||||||
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
||||||
try testing.expectEqual(doc.start.?, 0);
|
try testing.expectEqual(doc.base.start, 0);
|
||||||
try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
|
||||||
try testing.expect(doc.directive == null);
|
try testing.expect(doc.directive == null);
|
||||||
|
|
||||||
try testing.expect(doc.value != null);
|
try testing.expect(doc.value != null);
|
||||||
try testing.expectEqual(doc.value.?.tag, .map);
|
try testing.expectEqual(doc.value.?.tag, .map);
|
||||||
|
|
||||||
const map = doc.value.?.cast(Node.Map).?;
|
const map = doc.value.?.cast(Node.Map).?;
|
||||||
try testing.expectEqual(map.start.?, 0);
|
try testing.expectEqual(map.base.start, 0);
|
||||||
try testing.expectEqual(map.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(map.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(map.values.items.len, 3);
|
try testing.expectEqual(map.values.items.len, 3);
|
||||||
|
|
||||||
{
|
{
|
||||||
const entry = map.values.items[0];
|
const entry = map.values.items[0];
|
||||||
|
|
||||||
const key = tree.tokens[entry.key];
|
const key = tree.tokens[entry.key];
|
||||||
try testing.expectEqual(key.id, .Literal);
|
try testing.expectEqual(key.id, .literal);
|
||||||
try testing.expect(mem.eql(
|
try testing.expectEqualStrings("key1", tree.source[key.start..key.end]);
|
||||||
u8,
|
|
||||||
"key1",
|
|
||||||
tree.source[key.start..key.end],
|
|
||||||
));
|
|
||||||
|
|
||||||
const value = entry.value.cast(Node.Value).?;
|
const value = entry.value.?.cast(Node.Value).?;
|
||||||
const start = tree.tokens[value.start.?];
|
const start = tree.tokens[value.base.start];
|
||||||
const end = tree.tokens[value.end.?];
|
const end = tree.tokens[value.base.end];
|
||||||
try testing.expectEqual(start.id, .Literal);
|
try testing.expectEqual(start.id, .literal);
|
||||||
try testing.expectEqual(end.id, .Literal);
|
try testing.expectEqual(end.id, .literal);
|
||||||
try testing.expect(mem.eql(
|
try testing.expectEqualStrings("no quotes", tree.source[start.start..end.end]);
|
||||||
u8,
|
|
||||||
"no quotes",
|
|
||||||
tree.source[start.start..end.end],
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,70 +120,60 @@ test "nested maps" {
|
|||||||
try testing.expectEqual(tree.docs.items.len, 1);
|
try testing.expectEqual(tree.docs.items.len, 1);
|
||||||
|
|
||||||
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
||||||
try testing.expectEqual(doc.start.?, 0);
|
try testing.expectEqual(doc.base.start, 0);
|
||||||
try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
|
||||||
try testing.expect(doc.directive == null);
|
try testing.expect(doc.directive == null);
|
||||||
|
|
||||||
try testing.expect(doc.value != null);
|
try testing.expect(doc.value != null);
|
||||||
try testing.expectEqual(doc.value.?.tag, .map);
|
try testing.expectEqual(doc.value.?.tag, .map);
|
||||||
|
|
||||||
const map = doc.value.?.cast(Node.Map).?;
|
const map = doc.value.?.cast(Node.Map).?;
|
||||||
try testing.expectEqual(map.start.?, 0);
|
try testing.expectEqual(map.base.start, 0);
|
||||||
try testing.expectEqual(map.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(map.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(map.values.items.len, 2);
|
try testing.expectEqual(map.values.items.len, 2);
|
||||||
|
|
||||||
{
|
{
|
||||||
const entry = map.values.items[0];
|
const entry = map.values.items[0];
|
||||||
|
|
||||||
const key = tree.tokens[entry.key];
|
const key = tree.tokens[entry.key];
|
||||||
try testing.expectEqual(key.id, .Literal);
|
try testing.expectEqual(key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "key1", tree.source[key.start..key.end]));
|
try testing.expectEqualStrings("key1", tree.source[key.start..key.end]);
|
||||||
|
|
||||||
const nested_map = entry.value.cast(Node.Map).?;
|
const nested_map = entry.value.?.cast(Node.Map).?;
|
||||||
try testing.expectEqual(nested_map.start.?, 4);
|
try testing.expectEqual(nested_map.base.start, 4);
|
||||||
try testing.expectEqual(nested_map.end.?, 16);
|
try testing.expectEqual(nested_map.base.end, 16);
|
||||||
try testing.expectEqual(nested_map.values.items.len, 2);
|
try testing.expectEqual(nested_map.values.items.len, 2);
|
||||||
|
|
||||||
{
|
{
|
||||||
const nested_entry = nested_map.values.items[0];
|
const nested_entry = nested_map.values.items[0];
|
||||||
|
|
||||||
const nested_key = tree.tokens[nested_entry.key];
|
const nested_key = tree.tokens[nested_entry.key];
|
||||||
try testing.expectEqual(nested_key.id, .Literal);
|
try testing.expectEqual(nested_key.id, .literal);
|
||||||
try testing.expect(mem.eql(
|
try testing.expectEqualStrings("key1_1", tree.source[nested_key.start..nested_key.end]);
|
||||||
u8,
|
|
||||||
"key1_1",
|
|
||||||
tree.source[nested_key.start..nested_key.end],
|
|
||||||
));
|
|
||||||
|
|
||||||
const nested_value = nested_entry.value.cast(Node.Value).?;
|
const nested_value = nested_entry.value.?.cast(Node.Value).?;
|
||||||
const nested_value_tok = tree.tokens[nested_value.start.?];
|
const nested_value_tok = tree.tokens[nested_value.base.start];
|
||||||
try testing.expectEqual(nested_value_tok.id, .Literal);
|
try testing.expectEqual(nested_value_tok.id, .literal);
|
||||||
try testing.expect(mem.eql(
|
try testing.expectEqualStrings(
|
||||||
u8,
|
|
||||||
"value1_1",
|
"value1_1",
|
||||||
tree.source[nested_value_tok.start..nested_value_tok.end],
|
tree.source[nested_value_tok.start..nested_value_tok.end],
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
const nested_entry = nested_map.values.items[1];
|
const nested_entry = nested_map.values.items[1];
|
||||||
|
|
||||||
const nested_key = tree.tokens[nested_entry.key];
|
const nested_key = tree.tokens[nested_entry.key];
|
||||||
try testing.expectEqual(nested_key.id, .Literal);
|
try testing.expectEqual(nested_key.id, .literal);
|
||||||
try testing.expect(mem.eql(
|
try testing.expectEqualStrings("key1_2", tree.source[nested_key.start..nested_key.end]);
|
||||||
u8,
|
|
||||||
"key1_2",
|
|
||||||
tree.source[nested_key.start..nested_key.end],
|
|
||||||
));
|
|
||||||
|
|
||||||
const nested_value = nested_entry.value.cast(Node.Value).?;
|
const nested_value = nested_entry.value.?.cast(Node.Value).?;
|
||||||
const nested_value_tok = tree.tokens[nested_value.start.?];
|
const nested_value_tok = tree.tokens[nested_value.base.start];
|
||||||
try testing.expectEqual(nested_value_tok.id, .Literal);
|
try testing.expectEqual(nested_value_tok.id, .literal);
|
||||||
try testing.expect(mem.eql(
|
try testing.expectEqualStrings(
|
||||||
u8,
|
|
||||||
"value1_2",
|
"value1_2",
|
||||||
tree.source[nested_value_tok.start..nested_value_tok.end],
|
tree.source[nested_value_tok.start..nested_value_tok.end],
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -199,17 +181,13 @@ test "nested maps" {
|
|||||||
const entry = map.values.items[1];
|
const entry = map.values.items[1];
|
||||||
|
|
||||||
const key = tree.tokens[entry.key];
|
const key = tree.tokens[entry.key];
|
||||||
try testing.expectEqual(key.id, .Literal);
|
try testing.expectEqual(key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "key2", tree.source[key.start..key.end]));
|
try testing.expectEqualStrings("key2", tree.source[key.start..key.end]);
|
||||||
|
|
||||||
const value = entry.value.cast(Node.Value).?;
|
const value = entry.value.?.cast(Node.Value).?;
|
||||||
const value_tok = tree.tokens[value.start.?];
|
const value_tok = tree.tokens[value.base.start];
|
||||||
try testing.expectEqual(value_tok.id, .Literal);
|
try testing.expectEqual(value_tok.id, .literal);
|
||||||
try testing.expect(mem.eql(
|
try testing.expectEqualStrings("value2", tree.source[value_tok.start..value_tok.end]);
|
||||||
u8,
|
|
||||||
"value2",
|
|
||||||
tree.source[value_tok.start..value_tok.end],
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -227,46 +205,46 @@ test "map of list of values" {
|
|||||||
try testing.expectEqual(tree.docs.items.len, 1);
|
try testing.expectEqual(tree.docs.items.len, 1);
|
||||||
|
|
||||||
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
||||||
try testing.expectEqual(doc.start.?, 0);
|
try testing.expectEqual(doc.base.start, 0);
|
||||||
try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
|
||||||
|
|
||||||
try testing.expect(doc.value != null);
|
try testing.expect(doc.value != null);
|
||||||
try testing.expectEqual(doc.value.?.tag, .map);
|
try testing.expectEqual(doc.value.?.tag, .map);
|
||||||
|
|
||||||
const map = doc.value.?.cast(Node.Map).?;
|
const map = doc.value.?.cast(Node.Map).?;
|
||||||
try testing.expectEqual(map.start.?, 0);
|
try testing.expectEqual(map.base.start, 0);
|
||||||
try testing.expectEqual(map.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(map.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(map.values.items.len, 1);
|
try testing.expectEqual(map.values.items.len, 1);
|
||||||
|
|
||||||
const entry = map.values.items[0];
|
const entry = map.values.items[0];
|
||||||
const key = tree.tokens[entry.key];
|
const key = tree.tokens[entry.key];
|
||||||
try testing.expectEqual(key.id, .Literal);
|
try testing.expectEqual(key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "ints", tree.source[key.start..key.end]));
|
try testing.expectEqualStrings("ints", tree.source[key.start..key.end]);
|
||||||
|
|
||||||
const value = entry.value.cast(Node.List).?;
|
const value = entry.value.?.cast(Node.List).?;
|
||||||
try testing.expectEqual(value.start.?, 4);
|
try testing.expectEqual(value.base.start, 4);
|
||||||
try testing.expectEqual(value.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(value.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(value.values.items.len, 3);
|
try testing.expectEqual(value.values.items.len, 3);
|
||||||
|
|
||||||
{
|
{
|
||||||
const elem = value.values.items[0].cast(Node.Value).?;
|
const elem = value.values.items[0].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[elem.start.?];
|
const leaf = tree.tokens[elem.base.start];
|
||||||
try testing.expectEqual(leaf.id, .Literal);
|
try testing.expectEqual(leaf.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "0", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("0", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
const elem = value.values.items[1].cast(Node.Value).?;
|
const elem = value.values.items[1].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[elem.start.?];
|
const leaf = tree.tokens[elem.base.start];
|
||||||
try testing.expectEqual(leaf.id, .Literal);
|
try testing.expectEqual(leaf.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "1", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("1", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
const elem = value.values.items[2].cast(Node.Value).?;
|
const elem = value.values.items[2].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[elem.start.?];
|
const leaf = tree.tokens[elem.base.start];
|
||||||
try testing.expectEqual(leaf.id, .Literal);
|
try testing.expectEqual(leaf.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "2", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("2", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -285,64 +263,64 @@ test "map of list of maps" {
|
|||||||
try testing.expectEqual(tree.docs.items.len, 1);
|
try testing.expectEqual(tree.docs.items.len, 1);
|
||||||
|
|
||||||
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
||||||
try testing.expectEqual(doc.start.?, 0);
|
try testing.expectEqual(doc.base.start, 0);
|
||||||
try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
|
||||||
|
|
||||||
try testing.expect(doc.value != null);
|
try testing.expect(doc.value != null);
|
||||||
try testing.expectEqual(doc.value.?.tag, .map);
|
try testing.expectEqual(doc.value.?.tag, .map);
|
||||||
|
|
||||||
const map = doc.value.?.cast(Node.Map).?;
|
const map = doc.value.?.cast(Node.Map).?;
|
||||||
try testing.expectEqual(map.start.?, 0);
|
try testing.expectEqual(map.base.start, 0);
|
||||||
try testing.expectEqual(map.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(map.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(map.values.items.len, 1);
|
try testing.expectEqual(map.values.items.len, 1);
|
||||||
|
|
||||||
const entry = map.values.items[0];
|
const entry = map.values.items[0];
|
||||||
const key = tree.tokens[entry.key];
|
const key = tree.tokens[entry.key];
|
||||||
try testing.expectEqual(key.id, .Literal);
|
try testing.expectEqual(key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "key1", tree.source[key.start..key.end]));
|
try testing.expectEqualStrings("key1", tree.source[key.start..key.end]);
|
||||||
|
|
||||||
const value = entry.value.cast(Node.List).?;
|
const value = entry.value.?.cast(Node.List).?;
|
||||||
try testing.expectEqual(value.start.?, 3);
|
try testing.expectEqual(value.base.start, 3);
|
||||||
try testing.expectEqual(value.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(value.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(value.values.items.len, 3);
|
try testing.expectEqual(value.values.items.len, 3);
|
||||||
|
|
||||||
{
|
{
|
||||||
const elem = value.values.items[0].cast(Node.Map).?;
|
const elem = value.values.items[0].cast(Node.Map).?;
|
||||||
const nested = elem.values.items[0];
|
const nested = elem.values.items[0];
|
||||||
const nested_key = tree.tokens[nested.key];
|
const nested_key = tree.tokens[nested.key];
|
||||||
try testing.expectEqual(nested_key.id, .Literal);
|
try testing.expectEqual(nested_key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "key2", tree.source[nested_key.start..nested_key.end]));
|
try testing.expectEqualStrings("key2", tree.source[nested_key.start..nested_key.end]);
|
||||||
|
|
||||||
const nested_v = nested.value.cast(Node.Value).?;
|
const nested_v = nested.value.?.cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[nested_v.start.?];
|
const leaf = tree.tokens[nested_v.base.start];
|
||||||
try testing.expectEqual(leaf.id, .Literal);
|
try testing.expectEqual(leaf.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "value2", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("value2", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
const elem = value.values.items[1].cast(Node.Map).?;
|
const elem = value.values.items[1].cast(Node.Map).?;
|
||||||
const nested = elem.values.items[0];
|
const nested = elem.values.items[0];
|
||||||
const nested_key = tree.tokens[nested.key];
|
const nested_key = tree.tokens[nested.key];
|
||||||
try testing.expectEqual(nested_key.id, .Literal);
|
try testing.expectEqual(nested_key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "key3", tree.source[nested_key.start..nested_key.end]));
|
try testing.expectEqualStrings("key3", tree.source[nested_key.start..nested_key.end]);
|
||||||
|
|
||||||
const nested_v = nested.value.cast(Node.Value).?;
|
const nested_v = nested.value.?.cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[nested_v.start.?];
|
const leaf = tree.tokens[nested_v.base.start];
|
||||||
try testing.expectEqual(leaf.id, .Literal);
|
try testing.expectEqual(leaf.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "value3", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("value3", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
const elem = value.values.items[2].cast(Node.Map).?;
|
const elem = value.values.items[2].cast(Node.Map).?;
|
||||||
const nested = elem.values.items[0];
|
const nested = elem.values.items[0];
|
||||||
const nested_key = tree.tokens[nested.key];
|
const nested_key = tree.tokens[nested.key];
|
||||||
try testing.expectEqual(nested_key.id, .Literal);
|
try testing.expectEqual(nested_key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "key4", tree.source[nested_key.start..nested_key.end]));
|
try testing.expectEqualStrings("key4", tree.source[nested_key.start..nested_key.end]);
|
||||||
|
|
||||||
const nested_v = nested.value.cast(Node.Value).?;
|
const nested_v = nested.value.?.cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[nested_v.start.?];
|
const leaf = tree.tokens[nested_v.base.start];
|
||||||
try testing.expectEqual(leaf.id, .Literal);
|
try testing.expectEqual(leaf.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "value4", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("value4", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -360,15 +338,15 @@ test "list of lists" {
|
|||||||
try testing.expectEqual(tree.docs.items.len, 1);
|
try testing.expectEqual(tree.docs.items.len, 1);
|
||||||
|
|
||||||
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
||||||
try testing.expectEqual(doc.start.?, 0);
|
try testing.expectEqual(doc.base.start, 0);
|
||||||
try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
|
||||||
|
|
||||||
try testing.expect(doc.value != null);
|
try testing.expect(doc.value != null);
|
||||||
try testing.expectEqual(doc.value.?.tag, .list);
|
try testing.expectEqual(doc.value.?.tag, .list);
|
||||||
|
|
||||||
const list = doc.value.?.cast(Node.List).?;
|
const list = doc.value.?.cast(Node.List).?;
|
||||||
try testing.expectEqual(list.start.?, 0);
|
try testing.expectEqual(list.base.start, 0);
|
||||||
try testing.expectEqual(list.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(list.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(list.values.items.len, 3);
|
try testing.expectEqual(list.values.items.len, 3);
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -379,22 +357,22 @@ test "list of lists" {
|
|||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[0].tag, .value);
|
try testing.expectEqual(nested.values.items[0].tag, .value);
|
||||||
const value = nested.values.items[0].cast(Node.Value).?;
|
const value = nested.values.items[0].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("name", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[1].tag, .value);
|
try testing.expectEqual(nested.values.items[1].tag, .value);
|
||||||
const value = nested.values.items[1].cast(Node.Value).?;
|
const value = nested.values.items[1].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("hr", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[2].tag, .value);
|
try testing.expectEqual(nested.values.items[2].tag, .value);
|
||||||
const value = nested.values.items[2].cast(Node.Value).?;
|
const value = nested.values.items[2].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("avg", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -406,23 +384,23 @@ test "list of lists" {
|
|||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[0].tag, .value);
|
try testing.expectEqual(nested.values.items[0].tag, .value);
|
||||||
const value = nested.values.items[0].cast(Node.Value).?;
|
const value = nested.values.items[0].cast(Node.Value).?;
|
||||||
const start = tree.tokens[value.start.?];
|
const start = tree.tokens[value.base.start];
|
||||||
const end = tree.tokens[value.end.?];
|
const end = tree.tokens[value.base.end];
|
||||||
try testing.expect(mem.eql(u8, "Mark McGwire", tree.source[start.start..end.end]));
|
try testing.expectEqualStrings("Mark McGwire", tree.source[start.start..end.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[1].tag, .value);
|
try testing.expectEqual(nested.values.items[1].tag, .value);
|
||||||
const value = nested.values.items[1].cast(Node.Value).?;
|
const value = nested.values.items[1].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "65", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("65", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[2].tag, .value);
|
try testing.expectEqual(nested.values.items[2].tag, .value);
|
||||||
const value = nested.values.items[2].cast(Node.Value).?;
|
const value = nested.values.items[2].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "0.278", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("0.278", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -434,23 +412,23 @@ test "list of lists" {
|
|||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[0].tag, .value);
|
try testing.expectEqual(nested.values.items[0].tag, .value);
|
||||||
const value = nested.values.items[0].cast(Node.Value).?;
|
const value = nested.values.items[0].cast(Node.Value).?;
|
||||||
const start = tree.tokens[value.start.?];
|
const start = tree.tokens[value.base.start];
|
||||||
const end = tree.tokens[value.end.?];
|
const end = tree.tokens[value.base.end];
|
||||||
try testing.expect(mem.eql(u8, "Sammy Sosa", tree.source[start.start..end.end]));
|
try testing.expectEqualStrings("Sammy Sosa", tree.source[start.start..end.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[1].tag, .value);
|
try testing.expectEqual(nested.values.items[1].tag, .value);
|
||||||
const value = nested.values.items[1].cast(Node.Value).?;
|
const value = nested.values.items[1].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "63", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("63", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(nested.values.items[2].tag, .value);
|
try testing.expectEqual(nested.values.items[2].tag, .value);
|
||||||
const value = nested.values.items[2].cast(Node.Value).?;
|
const value = nested.values.items[2].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "0.288", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("0.288", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -467,36 +445,36 @@ test "inline list" {
|
|||||||
try testing.expectEqual(tree.docs.items.len, 1);
|
try testing.expectEqual(tree.docs.items.len, 1);
|
||||||
|
|
||||||
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
||||||
try testing.expectEqual(doc.start.?, 0);
|
try testing.expectEqual(doc.base.start, 0);
|
||||||
try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
|
||||||
|
|
||||||
try testing.expect(doc.value != null);
|
try testing.expect(doc.value != null);
|
||||||
try testing.expectEqual(doc.value.?.tag, .list);
|
try testing.expectEqual(doc.value.?.tag, .list);
|
||||||
|
|
||||||
const list = doc.value.?.cast(Node.List).?;
|
const list = doc.value.?.cast(Node.List).?;
|
||||||
try testing.expectEqual(list.start.?, 0);
|
try testing.expectEqual(list.base.start, 0);
|
||||||
try testing.expectEqual(list.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(list.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(list.values.items.len, 3);
|
try testing.expectEqual(list.values.items.len, 3);
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(list.values.items[0].tag, .value);
|
try testing.expectEqual(list.values.items[0].tag, .value);
|
||||||
const value = list.values.items[0].cast(Node.Value).?;
|
const value = list.values.items[0].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("name", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(list.values.items[1].tag, .value);
|
try testing.expectEqual(list.values.items[1].tag, .value);
|
||||||
const value = list.values.items[1].cast(Node.Value).?;
|
const value = list.values.items[1].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("hr", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(list.values.items[2].tag, .value);
|
try testing.expectEqual(list.values.items[2].tag, .value);
|
||||||
const value = list.values.items[2].cast(Node.Value).?;
|
const value = list.values.items[2].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("avg", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -514,45 +492,273 @@ test "inline list as mapping value" {
|
|||||||
try testing.expectEqual(tree.docs.items.len, 1);
|
try testing.expectEqual(tree.docs.items.len, 1);
|
||||||
|
|
||||||
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
const doc = tree.docs.items[0].cast(Node.Doc).?;
|
||||||
try testing.expectEqual(doc.start.?, 0);
|
try testing.expectEqual(doc.base.start, 0);
|
||||||
try testing.expectEqual(doc.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(doc.base.end, tree.tokens.len - 2);
|
||||||
|
|
||||||
try testing.expect(doc.value != null);
|
try testing.expect(doc.value != null);
|
||||||
try testing.expectEqual(doc.value.?.tag, .map);
|
try testing.expectEqual(doc.value.?.tag, .map);
|
||||||
|
|
||||||
const map = doc.value.?.cast(Node.Map).?;
|
const map = doc.value.?.cast(Node.Map).?;
|
||||||
try testing.expectEqual(map.start.?, 0);
|
try testing.expectEqual(map.base.start, 0);
|
||||||
try testing.expectEqual(map.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(map.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(map.values.items.len, 1);
|
try testing.expectEqual(map.values.items.len, 1);
|
||||||
|
|
||||||
const entry = map.values.items[0];
|
const entry = map.values.items[0];
|
||||||
const key = tree.tokens[entry.key];
|
const key = tree.tokens[entry.key];
|
||||||
try testing.expectEqual(key.id, .Literal);
|
try testing.expectEqual(key.id, .literal);
|
||||||
try testing.expect(mem.eql(u8, "key", tree.source[key.start..key.end]));
|
try testing.expectEqualStrings("key", tree.source[key.start..key.end]);
|
||||||
|
|
||||||
const list = entry.value.cast(Node.List).?;
|
const list = entry.value.?.cast(Node.List).?;
|
||||||
try testing.expectEqual(list.start.?, 4);
|
try testing.expectEqual(list.base.start, 4);
|
||||||
try testing.expectEqual(list.end.?, tree.tokens.len - 2);
|
try testing.expectEqual(list.base.end, tree.tokens.len - 2);
|
||||||
try testing.expectEqual(list.values.items.len, 3);
|
try testing.expectEqual(list.values.items.len, 3);
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(list.values.items[0].tag, .value);
|
try testing.expectEqual(list.values.items[0].tag, .value);
|
||||||
const value = list.values.items[0].cast(Node.Value).?;
|
const value = list.values.items[0].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("name", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(list.values.items[1].tag, .value);
|
try testing.expectEqual(list.values.items[1].tag, .value);
|
||||||
const value = list.values.items[1].cast(Node.Value).?;
|
const value = list.values.items[1].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("hr", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
try testing.expectEqual(list.values.items[2].tag, .value);
|
try testing.expectEqual(list.values.items[2].tag, .value);
|
||||||
const value = list.values.items[2].cast(Node.Value).?;
|
const value = list.values.items[2].cast(Node.Value).?;
|
||||||
const leaf = tree.tokens[value.start.?];
|
const leaf = tree.tokens[value.base.start];
|
||||||
try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end]));
|
try testing.expectEqualStrings("avg", tree.source[leaf.start..leaf.end]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parseSuccess(comptime source: []const u8) !void {
|
||||||
|
var tree = Tree.init(testing.allocator);
|
||||||
|
defer tree.deinit();
|
||||||
|
try tree.parse(source);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseError(comptime source: []const u8, err: parse.ParseError) !void {
|
||||||
|
var tree = Tree.init(testing.allocator);
|
||||||
|
defer tree.deinit();
|
||||||
|
try testing.expectError(err, tree.parse(source));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "empty doc with spaces and comments" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\
|
||||||
|
\\
|
||||||
|
\\ # this is a comment in a weird place
|
||||||
|
\\# and this one is too
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "comment between --- and ! in document start" {
|
||||||
|
try parseError(
|
||||||
|
\\--- # what is it?
|
||||||
|
\\!
|
||||||
|
, error.UnexpectedToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "correct doc start with tag" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\--- !some-tag
|
||||||
|
\\
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "doc close without explicit doc open" {
|
||||||
|
try parseError(
|
||||||
|
\\
|
||||||
|
\\
|
||||||
|
\\# something cool
|
||||||
|
\\...
|
||||||
|
, error.UnexpectedToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "doc open and close are ok" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\---
|
||||||
|
\\# first doc
|
||||||
|
\\
|
||||||
|
\\
|
||||||
|
\\---
|
||||||
|
\\# second doc
|
||||||
|
\\
|
||||||
|
\\
|
||||||
|
\\...
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "doc with a single string is ok" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\a string of some sort
|
||||||
|
\\
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "explicit doc with a single string is ok" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\--- !anchor
|
||||||
|
\\# nothing to see here except one string
|
||||||
|
\\ # not a lot to go on with
|
||||||
|
\\a single string
|
||||||
|
\\...
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "doc with two string is bad" {
|
||||||
|
try parseError(
|
||||||
|
\\first
|
||||||
|
\\second
|
||||||
|
\\# this should fail already
|
||||||
|
, error.UnexpectedToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "single quote string can have new lines" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\'what is this
|
||||||
|
\\ thing?'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "single quote string on one line is fine" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\'here''s an apostrophe'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "double quote string can have new lines" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\"what is this
|
||||||
|
\\ thing?"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "double quote string on one line is fine" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\"a newline\nand a\ttab"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "map with key and value literals" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\key1: val1
|
||||||
|
\\key2 : val2
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "map of maps" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\
|
||||||
|
\\# the first key
|
||||||
|
\\key1:
|
||||||
|
\\ # the first subkey
|
||||||
|
\\ key1_1: 0
|
||||||
|
\\ key1_2: 1
|
||||||
|
\\# the second key
|
||||||
|
\\key2:
|
||||||
|
\\ key2_1: -1
|
||||||
|
\\ key2_2: -2
|
||||||
|
\\# the end of map
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "map value indicator needs to be on the same line" {
|
||||||
|
try parseError(
|
||||||
|
\\a
|
||||||
|
\\ : b
|
||||||
|
, error.UnexpectedToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "value needs to be indented" {
|
||||||
|
try parseError(
|
||||||
|
\\a:
|
||||||
|
\\b
|
||||||
|
, error.MalformedYaml);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "comment between a key and a value is fine" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\a:
|
||||||
|
\\ # this is a value
|
||||||
|
\\ b
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "simple list" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\# first el
|
||||||
|
\\- a
|
||||||
|
\\# second el
|
||||||
|
\\- b
|
||||||
|
\\# third el
|
||||||
|
\\- c
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "list indentation matters" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\ - a
|
||||||
|
\\- b
|
||||||
|
);
|
||||||
|
|
||||||
|
try parseSuccess(
|
||||||
|
\\- a
|
||||||
|
\\ - b
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "unindented list is fine too" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\a:
|
||||||
|
\\- 0
|
||||||
|
\\- 1
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "empty values in a map" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\a:
|
||||||
|
\\b:
|
||||||
|
\\- 0
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "weirdly nested map of maps of lists" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\a:
|
||||||
|
\\ b:
|
||||||
|
\\ - 0
|
||||||
|
\\ - 1
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "square brackets denote a list" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\[ a,
|
||||||
|
\\ b, c ]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "empty list" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\[ ]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "comment within a bracketed list is an error" {
|
||||||
|
try parseError(
|
||||||
|
\\[ # something
|
||||||
|
\\]
|
||||||
|
, error.MalformedYaml);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "mixed ints with floats in a list" {
|
||||||
|
try parseSuccess(
|
||||||
|
\\[0, 1.0]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@ -2,8 +2,7 @@ const std = @import("std");
|
|||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
const math = std.math;
|
const math = std.math;
|
||||||
const mem = std.mem;
|
const mem = std.mem;
|
||||||
const testing = std.testing;
|
const log = std.log.scoped(.yaml);
|
||||||
const log = std.log.scoped(.tapi);
|
|
||||||
|
|
||||||
const Allocator = mem.Allocator;
|
const Allocator = mem.Allocator;
|
||||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||||
@ -17,22 +16,15 @@ const ParseError = parse.ParseError;
|
|||||||
|
|
||||||
pub const YamlError = error{
|
pub const YamlError = error{
|
||||||
UnexpectedNodeType,
|
UnexpectedNodeType,
|
||||||
|
DuplicateMapKey,
|
||||||
OutOfMemory,
|
OutOfMemory,
|
||||||
|
CannotEncodeValue,
|
||||||
} || ParseError || std.fmt.ParseIntError;
|
} || ParseError || std.fmt.ParseIntError;
|
||||||
|
|
||||||
pub const ValueType = enum {
|
|
||||||
empty,
|
|
||||||
int,
|
|
||||||
float,
|
|
||||||
string,
|
|
||||||
list,
|
|
||||||
map,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const List = []Value;
|
pub const List = []Value;
|
||||||
pub const Map = std.StringArrayHashMap(Value);
|
pub const Map = std.StringHashMap(Value);
|
||||||
|
|
||||||
pub const Value = union(ValueType) {
|
pub const Value = union(enum) {
|
||||||
empty,
|
empty,
|
||||||
int: i64,
|
int: i64,
|
||||||
float: f64,
|
float: f64,
|
||||||
@ -70,9 +62,7 @@ pub const Value = union(ValueType) {
|
|||||||
should_inline_first_key: bool = false,
|
should_inline_first_key: bool = false,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const StringifyError = std.os.WriteError;
|
pub fn stringify(self: Value, writer: anytype, args: StringifyArgs) anyerror!void {
|
||||||
|
|
||||||
pub fn stringify(self: Value, writer: anytype, args: StringifyArgs) StringifyError!void {
|
|
||||||
switch (self) {
|
switch (self) {
|
||||||
.empty => return,
|
.empty => return,
|
||||||
.int => |int| return writer.print("{}", .{int}),
|
.int => |int| return writer.print("{}", .{int}),
|
||||||
@ -83,7 +73,7 @@ pub const Value = union(ValueType) {
|
|||||||
if (len == 0) return;
|
if (len == 0) return;
|
||||||
|
|
||||||
const first = list[0];
|
const first = list[0];
|
||||||
if (first.is_compound()) {
|
if (first.isCompound()) {
|
||||||
for (list, 0..) |elem, i| {
|
for (list, 0..) |elem, i| {
|
||||||
try writer.writeByteNTimes(' ', args.indentation);
|
try writer.writeByteNTimes(' ', args.indentation);
|
||||||
try writer.writeAll("- ");
|
try writer.writeAll("- ");
|
||||||
@ -108,20 +98,23 @@ pub const Value = union(ValueType) {
|
|||||||
try writer.writeAll(" ]");
|
try writer.writeAll(" ]");
|
||||||
},
|
},
|
||||||
.map => |map| {
|
.map => |map| {
|
||||||
const keys = map.keys();
|
const len = map.count();
|
||||||
const len = keys.len;
|
|
||||||
if (len == 0) return;
|
if (len == 0) return;
|
||||||
|
|
||||||
for (keys, 0..) |key, i| {
|
var i: usize = 0;
|
||||||
|
var it = map.iterator();
|
||||||
|
while (it.next()) |entry| {
|
||||||
|
const key = entry.key_ptr.*;
|
||||||
|
const value = entry.value_ptr.*;
|
||||||
|
|
||||||
if (!args.should_inline_first_key or i != 0) {
|
if (!args.should_inline_first_key or i != 0) {
|
||||||
try writer.writeByteNTimes(' ', args.indentation);
|
try writer.writeByteNTimes(' ', args.indentation);
|
||||||
}
|
}
|
||||||
try writer.print("{s}: ", .{key});
|
try writer.print("{s}: ", .{key});
|
||||||
|
|
||||||
const value = map.get(key) orelse unreachable;
|
|
||||||
const should_inline = blk: {
|
const should_inline = blk: {
|
||||||
if (!value.is_compound()) break :blk true;
|
if (!value.isCompound()) break :blk true;
|
||||||
if (value == .list and value.list.len > 0 and !value.list[0].is_compound()) break :blk true;
|
if (value == .list and value.list.len > 0 and !value.list[0].isCompound()) break :blk true;
|
||||||
break :blk false;
|
break :blk false;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -137,35 +130,44 @@ pub const Value = union(ValueType) {
|
|||||||
if (i < len - 1) {
|
if (i < len - 1) {
|
||||||
try writer.writeByte('\n');
|
try writer.writeByte('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
i += 1;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_compound(self: Value) bool {
|
fn isCompound(self: Value) bool {
|
||||||
return switch (self) {
|
return switch (self) {
|
||||||
.list, .map => true,
|
.list, .map => true,
|
||||||
else => false,
|
else => false,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fromNode(arena: Allocator, tree: *const Tree, node: *const Node, type_hint: ?ValueType) YamlError!Value {
|
fn fromNode(arena: Allocator, tree: *const Tree, node: *const Node) YamlError!Value {
|
||||||
if (node.cast(Node.Doc)) |doc| {
|
if (node.cast(Node.Doc)) |doc| {
|
||||||
const inner = doc.value orelse {
|
const inner = doc.value orelse {
|
||||||
// empty doc
|
// empty doc
|
||||||
return Value{ .empty = {} };
|
return Value{ .empty = {} };
|
||||||
};
|
};
|
||||||
return Value.fromNode(arena, tree, inner, null);
|
return Value.fromNode(arena, tree, inner);
|
||||||
} else if (node.cast(Node.Map)) |map| {
|
} else if (node.cast(Node.Map)) |map| {
|
||||||
var out_map = std.StringArrayHashMap(Value).init(arena);
|
// TODO use ContextAdapted HashMap and do not duplicate keys, intern
|
||||||
try out_map.ensureUnusedCapacity(map.values.items.len);
|
// in a contiguous string buffer.
|
||||||
|
var out_map = std.StringHashMap(Value).init(arena);
|
||||||
|
try out_map.ensureUnusedCapacity(math.cast(u32, map.values.items.len) orelse return error.Overflow);
|
||||||
|
|
||||||
for (map.values.items) |entry| {
|
for (map.values.items) |entry| {
|
||||||
const key_tok = tree.tokens[entry.key];
|
const key = try arena.dupe(u8, tree.getRaw(entry.key, entry.key));
|
||||||
const key = try arena.dupe(u8, tree.source[key_tok.start..key_tok.end]);
|
const gop = out_map.getOrPutAssumeCapacity(key);
|
||||||
const value = try Value.fromNode(arena, tree, entry.value, null);
|
if (gop.found_existing) {
|
||||||
|
return error.DuplicateMapKey;
|
||||||
out_map.putAssumeCapacityNoClobber(key, value);
|
}
|
||||||
|
const value = if (entry.value) |value|
|
||||||
|
try Value.fromNode(arena, tree, value)
|
||||||
|
else
|
||||||
|
.empty;
|
||||||
|
gop.value_ptr.* = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Value{ .map = out_map };
|
return Value{ .map = out_map };
|
||||||
@ -173,56 +175,124 @@ pub const Value = union(ValueType) {
|
|||||||
var out_list = std.ArrayList(Value).init(arena);
|
var out_list = std.ArrayList(Value).init(arena);
|
||||||
try out_list.ensureUnusedCapacity(list.values.items.len);
|
try out_list.ensureUnusedCapacity(list.values.items.len);
|
||||||
|
|
||||||
if (list.values.items.len > 0) {
|
for (list.values.items) |elem| {
|
||||||
const hint = if (list.values.items[0].cast(Node.Value)) |value| hint: {
|
const value = try Value.fromNode(arena, tree, elem);
|
||||||
const start = tree.tokens[value.start.?];
|
out_list.appendAssumeCapacity(value);
|
||||||
const end = tree.tokens[value.end.?];
|
|
||||||
const raw = tree.source[start.start..end.end];
|
|
||||||
_ = std.fmt.parseInt(i64, raw, 10) catch {
|
|
||||||
_ = std.fmt.parseFloat(f64, raw) catch {
|
|
||||||
break :hint ValueType.string;
|
|
||||||
};
|
|
||||||
break :hint ValueType.float;
|
|
||||||
};
|
|
||||||
break :hint ValueType.int;
|
|
||||||
} else null;
|
|
||||||
|
|
||||||
for (list.values.items) |elem| {
|
|
||||||
const value = try Value.fromNode(arena, tree, elem, hint);
|
|
||||||
out_list.appendAssumeCapacity(value);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Value{ .list = try out_list.toOwnedSlice() };
|
return Value{ .list = try out_list.toOwnedSlice() };
|
||||||
} else if (node.cast(Node.Value)) |value| {
|
} else if (node.cast(Node.Value)) |value| {
|
||||||
const start = tree.tokens[value.start.?];
|
const raw = tree.getRaw(node.start, node.end);
|
||||||
const end = tree.tokens[value.end.?];
|
|
||||||
const raw = tree.source[start.start..end.end];
|
|
||||||
|
|
||||||
if (type_hint) |hint| {
|
|
||||||
return switch (hint) {
|
|
||||||
.int => Value{ .int = try std.fmt.parseInt(i64, raw, 10) },
|
|
||||||
.float => Value{ .float = try std.fmt.parseFloat(f64, raw) },
|
|
||||||
.string => Value{ .string = try arena.dupe(u8, raw) },
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
try_int: {
|
try_int: {
|
||||||
// TODO infer base for int
|
// TODO infer base for int
|
||||||
const int = std.fmt.parseInt(i64, raw, 10) catch break :try_int;
|
const int = std.fmt.parseInt(i64, raw, 10) catch break :try_int;
|
||||||
return Value{ .int = int };
|
return Value{ .int = int };
|
||||||
}
|
}
|
||||||
|
|
||||||
try_float: {
|
try_float: {
|
||||||
const float = std.fmt.parseFloat(f64, raw) catch break :try_float;
|
const float = std.fmt.parseFloat(f64, raw) catch break :try_float;
|
||||||
return Value{ .float = float };
|
return Value{ .float = float };
|
||||||
}
|
}
|
||||||
return Value{ .string = try arena.dupe(u8, raw) };
|
|
||||||
|
return Value{ .string = try arena.dupe(u8, value.string_value.items) };
|
||||||
} else {
|
} else {
|
||||||
log.err("Unexpected node type: {}", .{node.tag});
|
log.err("Unexpected node type: {}", .{node.tag});
|
||||||
return error.UnexpectedNodeType;
|
return error.UnexpectedNodeType;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn encode(arena: Allocator, input: anytype) YamlError!?Value {
|
||||||
|
switch (@typeInfo(@TypeOf(input))) {
|
||||||
|
.ComptimeInt,
|
||||||
|
.Int,
|
||||||
|
=> return Value{ .int = math.cast(i64, input) orelse return error.Overflow },
|
||||||
|
|
||||||
|
.Float => return Value{ .float = math.lossyCast(f64, input) },
|
||||||
|
|
||||||
|
.Struct => |info| if (info.is_tuple) {
|
||||||
|
var list = std.ArrayList(Value).init(arena);
|
||||||
|
errdefer list.deinit();
|
||||||
|
try list.ensureTotalCapacityPrecise(info.fields.len);
|
||||||
|
|
||||||
|
inline for (info.fields) |field| {
|
||||||
|
if (try encode(arena, @field(input, field.name))) |value| {
|
||||||
|
list.appendAssumeCapacity(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Value{ .list = try list.toOwnedSlice() };
|
||||||
|
} else {
|
||||||
|
var map = Map.init(arena);
|
||||||
|
errdefer map.deinit();
|
||||||
|
try map.ensureTotalCapacity(info.fields.len);
|
||||||
|
|
||||||
|
inline for (info.fields) |field| {
|
||||||
|
if (try encode(arena, @field(input, field.name))) |value| {
|
||||||
|
const key = try arena.dupe(u8, field.name);
|
||||||
|
map.putAssumeCapacityNoClobber(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Value{ .map = map };
|
||||||
|
},
|
||||||
|
|
||||||
|
.Union => |info| if (info.tag_type) |tag_type| {
|
||||||
|
inline for (info.fields) |field| {
|
||||||
|
if (@field(tag_type, field.name) == input) {
|
||||||
|
return try encode(arena, @field(input, field.name));
|
||||||
|
}
|
||||||
|
} else unreachable;
|
||||||
|
} else return error.UntaggedUnion,
|
||||||
|
|
||||||
|
.Array => return encode(arena, &input),
|
||||||
|
|
||||||
|
.Pointer => |info| switch (info.size) {
|
||||||
|
.One => switch (@typeInfo(info.child)) {
|
||||||
|
.Array => |child_info| {
|
||||||
|
const Slice = []const child_info.child;
|
||||||
|
return encode(arena, @as(Slice, input));
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
@compileError("Unhandled type: {s}" ++ @typeName(info.child));
|
||||||
|
},
|
||||||
|
},
|
||||||
|
.Slice => {
|
||||||
|
if (info.child == u8) {
|
||||||
|
return Value{ .string = try arena.dupe(u8, input) };
|
||||||
|
}
|
||||||
|
|
||||||
|
var list = std.ArrayList(Value).init(arena);
|
||||||
|
errdefer list.deinit();
|
||||||
|
try list.ensureTotalCapacityPrecise(input.len);
|
||||||
|
|
||||||
|
for (input) |elem| {
|
||||||
|
if (try encode(arena, elem)) |value| {
|
||||||
|
list.appendAssumeCapacity(value);
|
||||||
|
} else {
|
||||||
|
log.err("Could not encode value in a list: {any}", .{elem});
|
||||||
|
return error.CannotEncodeValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Value{ .list = try list.toOwnedSlice() };
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
@compileError("Unhandled type: {s}" ++ @typeName(@TypeOf(input)));
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
// TODO we should probably have an option to encode `null` and also
|
||||||
|
// allow for some default value too.
|
||||||
|
.Optional => return if (input) |val| encode(arena, val) else null,
|
||||||
|
|
||||||
|
.Null => return null,
|
||||||
|
|
||||||
|
else => {
|
||||||
|
@compileError("Unhandled type: {s}" ++ @typeName(@TypeOf(input)));
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Yaml = struct {
|
pub const Yaml = struct {
|
||||||
@ -234,30 +304,18 @@ pub const Yaml = struct {
|
|||||||
self.arena.deinit();
|
self.arena.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn stringify(self: Yaml, writer: anytype) !void {
|
|
||||||
for (self.docs.items) |doc| {
|
|
||||||
// if (doc.directive) |directive| {
|
|
||||||
// try writer.print("--- !{s}\n", .{directive});
|
|
||||||
// }
|
|
||||||
try doc.stringify(writer, .{});
|
|
||||||
// if (doc.directive != null) {
|
|
||||||
// try writer.writeAll("...\n");
|
|
||||||
// }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load(allocator: Allocator, source: []const u8) !Yaml {
|
pub fn load(allocator: Allocator, source: []const u8) !Yaml {
|
||||||
var arena = ArenaAllocator.init(allocator);
|
var arena = ArenaAllocator.init(allocator);
|
||||||
const arena_allocator = arena.allocator();
|
errdefer arena.deinit();
|
||||||
|
|
||||||
var tree = Tree.init(arena_allocator);
|
var tree = Tree.init(arena.allocator());
|
||||||
try tree.parse(source);
|
try tree.parse(source);
|
||||||
|
|
||||||
var docs = std.ArrayList(Value).init(arena_allocator);
|
var docs = std.ArrayList(Value).init(arena.allocator());
|
||||||
try docs.ensureUnusedCapacity(tree.docs.items.len);
|
try docs.ensureTotalCapacityPrecise(tree.docs.items.len);
|
||||||
|
|
||||||
for (tree.docs.items) |node| {
|
for (tree.docs.items) |node| {
|
||||||
const value = try Value.fromNode(arena_allocator, &tree, node, null);
|
const value = try Value.fromNode(arena.allocator(), &tree, node);
|
||||||
docs.appendAssumeCapacity(value);
|
docs.appendAssumeCapacity(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -316,17 +374,19 @@ pub const Yaml = struct {
|
|||||||
|
|
||||||
fn parseValue(self: *Yaml, comptime T: type, value: Value) Error!T {
|
fn parseValue(self: *Yaml, comptime T: type, value: Value) Error!T {
|
||||||
return switch (@typeInfo(T)) {
|
return switch (@typeInfo(T)) {
|
||||||
.Int => math.cast(T, try value.asInt()) orelse error.Overflow,
|
.Int => math.cast(T, try value.asInt()) orelse return error.Overflow,
|
||||||
.Float => math.lossyCast(T, try value.asFloat()),
|
.Float => if (value.asFloat()) |float| {
|
||||||
|
return math.lossyCast(T, float);
|
||||||
|
} else |_| {
|
||||||
|
return math.lossyCast(T, try value.asInt());
|
||||||
|
},
|
||||||
.Struct => self.parseStruct(T, try value.asMap()),
|
.Struct => self.parseStruct(T, try value.asMap()),
|
||||||
.Union => self.parseUnion(T, value),
|
.Union => self.parseUnion(T, value),
|
||||||
.Array => self.parseArray(T, try value.asList()),
|
.Array => self.parseArray(T, try value.asList()),
|
||||||
.Pointer => {
|
.Pointer => if (value.asList()) |list| {
|
||||||
if (value.asList()) |list| {
|
return self.parsePointer(T, .{ .list = list });
|
||||||
return self.parsePointer(T, .{ .list = list });
|
} else |_| {
|
||||||
} else |_| {
|
return self.parsePointer(T, .{ .string = try value.asString() });
|
||||||
return self.parsePointer(T, .{ .string = try value.asString() });
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
.Void => error.TypeMismatch,
|
.Void => error.TypeMismatch,
|
||||||
.Optional => unreachable,
|
.Optional => unreachable,
|
||||||
@ -372,7 +432,7 @@ pub const Yaml = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const unwrapped = value orelse {
|
const unwrapped = value orelse {
|
||||||
log.debug("missing struct field: {s}: {s}", .{ field.name, @typeName(field.type) });
|
log.err("missing struct field: {s}: {s}", .{ field.name, @typeName(field.type) });
|
||||||
return error.StructFieldMissing;
|
return error.StructFieldMissing;
|
||||||
};
|
};
|
||||||
@field(parsed, field.name) = try self.parseValue(field.type, unwrapped);
|
@field(parsed, field.name) = try self.parseValue(field.type, unwrapped);
|
||||||
@ -387,8 +447,7 @@ pub const Yaml = struct {
|
|||||||
|
|
||||||
switch (ptr_info.size) {
|
switch (ptr_info.size) {
|
||||||
.Slice => {
|
.Slice => {
|
||||||
const child_info = @typeInfo(ptr_info.child);
|
if (ptr_info.child == u8) {
|
||||||
if (child_info == .Int and child_info.Int.bits == 8) {
|
|
||||||
return value.asString();
|
return value.asString();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -413,315 +472,36 @@ pub const Yaml = struct {
|
|||||||
|
|
||||||
return parsed;
|
return parsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn stringify(self: Yaml, writer: anytype) !void {
|
||||||
|
for (self.docs.items, 0..) |doc, i| {
|
||||||
|
try writer.writeAll("---");
|
||||||
|
if (self.tree.?.getDirective(i)) |directive| {
|
||||||
|
try writer.print(" !{s}", .{directive});
|
||||||
|
}
|
||||||
|
try writer.writeByte('\n');
|
||||||
|
try doc.stringify(writer, .{});
|
||||||
|
try writer.writeByte('\n');
|
||||||
|
}
|
||||||
|
try writer.writeAll("...\n");
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub fn stringify(allocator: Allocator, input: anytype, writer: anytype) !void {
|
||||||
|
var arena = ArenaAllocator.init(allocator);
|
||||||
|
defer arena.deinit();
|
||||||
|
|
||||||
|
var maybe_value = try Value.encode(arena.allocator(), input);
|
||||||
|
|
||||||
|
if (maybe_value) |value| {
|
||||||
|
// TODO should we output as an explicit doc?
|
||||||
|
// How can allow the user to specify?
|
||||||
|
try value.stringify(writer, .{});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
test {
|
test {
|
||||||
testing.refAllDecls(@This());
|
std.testing.refAllDecls(Tokenizer);
|
||||||
}
|
std.testing.refAllDecls(parse);
|
||||||
|
_ = @import("yaml/test.zig");
|
||||||
test "simple list" {
|
|
||||||
const source =
|
|
||||||
\\- a
|
|
||||||
\\- b
|
|
||||||
\\- c
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectEqual(yaml.docs.items.len, 1);
|
|
||||||
|
|
||||||
const list = yaml.docs.items[0].list;
|
|
||||||
try testing.expectEqual(list.len, 3);
|
|
||||||
|
|
||||||
try testing.expect(mem.eql(u8, list[0].string, "a"));
|
|
||||||
try testing.expect(mem.eql(u8, list[1].string, "b"));
|
|
||||||
try testing.expect(mem.eql(u8, list[2].string, "c"));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "simple list typed as array of strings" {
|
|
||||||
const source =
|
|
||||||
\\- a
|
|
||||||
\\- b
|
|
||||||
\\- c
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectEqual(yaml.docs.items.len, 1);
|
|
||||||
|
|
||||||
const arr = try yaml.parse([3][]const u8);
|
|
||||||
try testing.expectEqual(arr.len, 3);
|
|
||||||
try testing.expect(mem.eql(u8, arr[0], "a"));
|
|
||||||
try testing.expect(mem.eql(u8, arr[1], "b"));
|
|
||||||
try testing.expect(mem.eql(u8, arr[2], "c"));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "simple list typed as array of ints" {
|
|
||||||
const source =
|
|
||||||
\\- 0
|
|
||||||
\\- 1
|
|
||||||
\\- 2
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectEqual(yaml.docs.items.len, 1);
|
|
||||||
|
|
||||||
const arr = try yaml.parse([3]u8);
|
|
||||||
try testing.expectEqual(arr.len, 3);
|
|
||||||
try testing.expectEqual(arr[0], 0);
|
|
||||||
try testing.expectEqual(arr[1], 1);
|
|
||||||
try testing.expectEqual(arr[2], 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "list of mixed sign integer" {
|
|
||||||
const source =
|
|
||||||
\\- 0
|
|
||||||
\\- -1
|
|
||||||
\\- 2
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectEqual(yaml.docs.items.len, 1);
|
|
||||||
|
|
||||||
const arr = try yaml.parse([3]i8);
|
|
||||||
try testing.expectEqual(arr.len, 3);
|
|
||||||
try testing.expectEqual(arr[0], 0);
|
|
||||||
try testing.expectEqual(arr[1], -1);
|
|
||||||
try testing.expectEqual(arr[2], 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "simple map untyped" {
|
|
||||||
const source =
|
|
||||||
\\a: 0
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectEqual(yaml.docs.items.len, 1);
|
|
||||||
|
|
||||||
const map = yaml.docs.items[0].map;
|
|
||||||
try testing.expect(map.contains("a"));
|
|
||||||
try testing.expectEqual(map.get("a").?.int, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "simple map untyped with a list of maps" {
|
|
||||||
const source =
|
|
||||||
\\a: 0
|
|
||||||
\\b:
|
|
||||||
\\ - foo: 1
|
|
||||||
\\ bar: 2
|
|
||||||
\\ - foo: 3
|
|
||||||
\\ bar: 4
|
|
||||||
\\c: 1
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectEqual(yaml.docs.items.len, 1);
|
|
||||||
|
|
||||||
const map = yaml.docs.items[0].map;
|
|
||||||
try testing.expect(map.contains("a"));
|
|
||||||
try testing.expect(map.contains("b"));
|
|
||||||
try testing.expect(map.contains("c"));
|
|
||||||
try testing.expectEqual(map.get("a").?.int, 0);
|
|
||||||
try testing.expectEqual(map.get("c").?.int, 1);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[0].map.get("foo").?.int, 1);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[0].map.get("bar").?.int, 2);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[1].map.get("foo").?.int, 3);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[1].map.get("bar").?.int, 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "simple map untyped with a list of maps. no indent" {
|
|
||||||
const source =
|
|
||||||
\\b:
|
|
||||||
\\- foo: 1
|
|
||||||
\\c: 1
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectEqual(yaml.docs.items.len, 1);
|
|
||||||
|
|
||||||
const map = yaml.docs.items[0].map;
|
|
||||||
try testing.expect(map.contains("b"));
|
|
||||||
try testing.expect(map.contains("c"));
|
|
||||||
try testing.expectEqual(map.get("c").?.int, 1);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[0].map.get("foo").?.int, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "simple map untyped with a list of maps. no indent 2" {
|
|
||||||
const source =
|
|
||||||
\\a: 0
|
|
||||||
\\b:
|
|
||||||
\\- foo: 1
|
|
||||||
\\ bar: 2
|
|
||||||
\\- foo: 3
|
|
||||||
\\ bar: 4
|
|
||||||
\\c: 1
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectEqual(yaml.docs.items.len, 1);
|
|
||||||
|
|
||||||
const map = yaml.docs.items[0].map;
|
|
||||||
try testing.expect(map.contains("a"));
|
|
||||||
try testing.expect(map.contains("b"));
|
|
||||||
try testing.expect(map.contains("c"));
|
|
||||||
try testing.expectEqual(map.get("a").?.int, 0);
|
|
||||||
try testing.expectEqual(map.get("c").?.int, 1);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[0].map.get("foo").?.int, 1);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[0].map.get("bar").?.int, 2);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[1].map.get("foo").?.int, 3);
|
|
||||||
try testing.expectEqual(map.get("b").?.list[1].map.get("bar").?.int, 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "simple map typed" {
|
|
||||||
const source =
|
|
||||||
\\a: 0
|
|
||||||
\\b: hello there
|
|
||||||
\\c: 'wait, what?'
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
const simple = try yaml.parse(struct { a: usize, b: []const u8, c: []const u8 });
|
|
||||||
try testing.expectEqual(simple.a, 0);
|
|
||||||
try testing.expect(mem.eql(u8, simple.b, "hello there"));
|
|
||||||
try testing.expect(mem.eql(u8, simple.c, "wait, what?"));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "typed nested structs" {
|
|
||||||
const source =
|
|
||||||
\\a:
|
|
||||||
\\ b: hello there
|
|
||||||
\\ c: 'wait, what?'
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
const simple = try yaml.parse(struct {
|
|
||||||
a: struct {
|
|
||||||
b: []const u8,
|
|
||||||
c: []const u8,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
try testing.expect(mem.eql(u8, simple.a.b, "hello there"));
|
|
||||||
try testing.expect(mem.eql(u8, simple.a.c, "wait, what?"));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "multidoc typed as a slice of structs" {
|
|
||||||
const source =
|
|
||||||
\\---
|
|
||||||
\\a: 0
|
|
||||||
\\---
|
|
||||||
\\a: 1
|
|
||||||
\\...
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
{
|
|
||||||
const result = try yaml.parse([2]struct { a: usize });
|
|
||||||
try testing.expectEqual(result.len, 2);
|
|
||||||
try testing.expectEqual(result[0].a, 0);
|
|
||||||
try testing.expectEqual(result[1].a, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const result = try yaml.parse([]struct { a: usize });
|
|
||||||
try testing.expectEqual(result.len, 2);
|
|
||||||
try testing.expectEqual(result[0].a, 0);
|
|
||||||
try testing.expectEqual(result[1].a, 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "multidoc typed as a struct is an error" {
|
|
||||||
const source =
|
|
||||||
\\---
|
|
||||||
\\a: 0
|
|
||||||
\\---
|
|
||||||
\\b: 1
|
|
||||||
\\...
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize }));
|
|
||||||
try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { b: usize }));
|
|
||||||
try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize, b: usize }));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "multidoc typed as a slice of structs with optionals" {
|
|
||||||
const source =
|
|
||||||
\\---
|
|
||||||
\\a: 0
|
|
||||||
\\c: 1.0
|
|
||||||
\\---
|
|
||||||
\\a: 1
|
|
||||||
\\b: different field
|
|
||||||
\\...
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
const result = try yaml.parse([]struct { a: usize, b: ?[]const u8, c: ?f16 });
|
|
||||||
try testing.expectEqual(result.len, 2);
|
|
||||||
|
|
||||||
try testing.expectEqual(result[0].a, 0);
|
|
||||||
try testing.expect(result[0].b == null);
|
|
||||||
try testing.expect(result[0].c != null);
|
|
||||||
try testing.expectEqual(result[0].c.?, 1.0);
|
|
||||||
|
|
||||||
try testing.expectEqual(result[1].a, 1);
|
|
||||||
try testing.expect(result[1].b != null);
|
|
||||||
try testing.expect(mem.eql(u8, result[1].b.?, "different field"));
|
|
||||||
try testing.expect(result[1].c == null);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "empty yaml can be represented as void" {
|
|
||||||
const source = "";
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
const result = try yaml.parse(void);
|
|
||||||
try testing.expect(@TypeOf(result) == void);
|
|
||||||
}
|
|
||||||
|
|
||||||
test "nonempty yaml cannot be represented as void" {
|
|
||||||
const source =
|
|
||||||
\\a: b
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(void));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "typed array size mismatch" {
|
|
||||||
const source =
|
|
||||||
\\- 0
|
|
||||||
\\- 0
|
|
||||||
;
|
|
||||||
|
|
||||||
var yaml = try Yaml.load(testing.allocator, source);
|
|
||||||
defer yaml.deinit();
|
|
||||||
|
|
||||||
try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([1]usize));
|
|
||||||
try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([5]usize));
|
|
||||||
}
|
}
|
||||||
|
|||||||
475
src/link/tapi/yaml/test.zig
Normal file
475
src/link/tapi/yaml/test.zig
Normal file
@ -0,0 +1,475 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const mem = std.mem;
|
||||||
|
const testing = std.testing;
|
||||||
|
|
||||||
|
const yaml_mod = @import("../yaml.zig");
|
||||||
|
const Yaml = yaml_mod.Yaml;
|
||||||
|
|
||||||
|
test "simple list" {
|
||||||
|
const source =
|
||||||
|
\\- a
|
||||||
|
\\- b
|
||||||
|
\\- c
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectEqual(yaml.docs.items.len, 1);
|
||||||
|
|
||||||
|
const list = yaml.docs.items[0].list;
|
||||||
|
try testing.expectEqual(list.len, 3);
|
||||||
|
|
||||||
|
try testing.expectEqualStrings("a", list[0].string);
|
||||||
|
try testing.expectEqualStrings("b", list[1].string);
|
||||||
|
try testing.expectEqualStrings("c", list[2].string);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "simple list typed as array of strings" {
|
||||||
|
const source =
|
||||||
|
\\- a
|
||||||
|
\\- b
|
||||||
|
\\- c
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectEqual(yaml.docs.items.len, 1);
|
||||||
|
|
||||||
|
const arr = try yaml.parse([3][]const u8);
|
||||||
|
try testing.expectEqual(3, arr.len);
|
||||||
|
try testing.expectEqualStrings("a", arr[0]);
|
||||||
|
try testing.expectEqualStrings("b", arr[1]);
|
||||||
|
try testing.expectEqualStrings("c", arr[2]);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "simple list typed as array of ints" {
|
||||||
|
const source =
|
||||||
|
\\- 0
|
||||||
|
\\- 1
|
||||||
|
\\- 2
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectEqual(yaml.docs.items.len, 1);
|
||||||
|
|
||||||
|
const arr = try yaml.parse([3]u8);
|
||||||
|
try testing.expectEqualSlices(u8, &[_]u8{ 0, 1, 2 }, &arr);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "list of mixed sign integer" {
|
||||||
|
const source =
|
||||||
|
\\- 0
|
||||||
|
\\- -1
|
||||||
|
\\- 2
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectEqual(yaml.docs.items.len, 1);
|
||||||
|
|
||||||
|
const arr = try yaml.parse([3]i8);
|
||||||
|
try testing.expectEqualSlices(i8, &[_]i8{ 0, -1, 2 }, &arr);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "simple map untyped" {
|
||||||
|
const source =
|
||||||
|
\\a: 0
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectEqual(yaml.docs.items.len, 1);
|
||||||
|
|
||||||
|
const map = yaml.docs.items[0].map;
|
||||||
|
try testing.expect(map.contains("a"));
|
||||||
|
try testing.expectEqual(@as(i64, 0), map.get("a").?.int);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "simple map untyped with a list of maps" {
|
||||||
|
const source =
|
||||||
|
\\a: 0
|
||||||
|
\\b:
|
||||||
|
\\ - foo: 1
|
||||||
|
\\ bar: 2
|
||||||
|
\\ - foo: 3
|
||||||
|
\\ bar: 4
|
||||||
|
\\c: 1
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectEqual(yaml.docs.items.len, 1);
|
||||||
|
|
||||||
|
const map = yaml.docs.items[0].map;
|
||||||
|
try testing.expect(map.contains("a"));
|
||||||
|
try testing.expect(map.contains("b"));
|
||||||
|
try testing.expect(map.contains("c"));
|
||||||
|
try testing.expectEqual(@as(i64, 0), map.get("a").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 1), map.get("c").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 1), map.get("b").?.list[0].map.get("foo").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 2), map.get("b").?.list[0].map.get("bar").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 3), map.get("b").?.list[1].map.get("foo").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 4), map.get("b").?.list[1].map.get("bar").?.int);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "simple map untyped with a list of maps. no indent" {
|
||||||
|
const source =
|
||||||
|
\\b:
|
||||||
|
\\- foo: 1
|
||||||
|
\\c: 1
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectEqual(yaml.docs.items.len, 1);
|
||||||
|
|
||||||
|
const map = yaml.docs.items[0].map;
|
||||||
|
try testing.expect(map.contains("b"));
|
||||||
|
try testing.expect(map.contains("c"));
|
||||||
|
try testing.expectEqual(@as(i64, 1), map.get("c").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 1), map.get("b").?.list[0].map.get("foo").?.int);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "simple map untyped with a list of maps. no indent 2" {
|
||||||
|
const source =
|
||||||
|
\\a: 0
|
||||||
|
\\b:
|
||||||
|
\\- foo: 1
|
||||||
|
\\ bar: 2
|
||||||
|
\\- foo: 3
|
||||||
|
\\ bar: 4
|
||||||
|
\\c: 1
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectEqual(yaml.docs.items.len, 1);
|
||||||
|
|
||||||
|
const map = yaml.docs.items[0].map;
|
||||||
|
try testing.expect(map.contains("a"));
|
||||||
|
try testing.expect(map.contains("b"));
|
||||||
|
try testing.expect(map.contains("c"));
|
||||||
|
try testing.expectEqual(@as(i64, 0), map.get("a").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 1), map.get("c").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 1), map.get("b").?.list[0].map.get("foo").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 2), map.get("b").?.list[0].map.get("bar").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 3), map.get("b").?.list[1].map.get("foo").?.int);
|
||||||
|
try testing.expectEqual(@as(i64, 4), map.get("b").?.list[1].map.get("bar").?.int);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "simple map typed" {
|
||||||
|
const source =
|
||||||
|
\\a: 0
|
||||||
|
\\b: hello there
|
||||||
|
\\c: 'wait, what?'
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
const simple = try yaml.parse(struct { a: usize, b: []const u8, c: []const u8 });
|
||||||
|
try testing.expectEqual(@as(usize, 0), simple.a);
|
||||||
|
try testing.expectEqualStrings("hello there", simple.b);
|
||||||
|
try testing.expectEqualStrings("wait, what?", simple.c);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "typed nested structs" {
|
||||||
|
const source =
|
||||||
|
\\a:
|
||||||
|
\\ b: hello there
|
||||||
|
\\ c: 'wait, what?'
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
const simple = try yaml.parse(struct {
|
||||||
|
a: struct {
|
||||||
|
b: []const u8,
|
||||||
|
c: []const u8,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
try testing.expectEqualStrings("hello there", simple.a.b);
|
||||||
|
try testing.expectEqualStrings("wait, what?", simple.a.c);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "single quoted string" {
|
||||||
|
const source =
|
||||||
|
\\- 'hello'
|
||||||
|
\\- 'here''s an escaped quote'
|
||||||
|
\\- 'newlines and tabs\nare not\tsupported'
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
const arr = try yaml.parse([3][]const u8);
|
||||||
|
try testing.expectEqual(arr.len, 3);
|
||||||
|
try testing.expectEqualStrings("hello", arr[0]);
|
||||||
|
try testing.expectEqualStrings("here's an escaped quote", arr[1]);
|
||||||
|
try testing.expectEqualStrings("newlines and tabs\\nare not\\tsupported", arr[2]);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "double quoted string" {
|
||||||
|
const source =
|
||||||
|
\\- "hello"
|
||||||
|
\\- "\"here\" are some escaped quotes"
|
||||||
|
\\- "newlines and tabs\nare\tsupported"
|
||||||
|
\\- "let's have
|
||||||
|
\\some fun!"
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
const arr = try yaml.parse([4][]const u8);
|
||||||
|
try testing.expectEqual(arr.len, 4);
|
||||||
|
try testing.expectEqualStrings("hello", arr[0]);
|
||||||
|
try testing.expectEqualStrings(
|
||||||
|
\\"here" are some escaped quotes
|
||||||
|
, arr[1]);
|
||||||
|
try testing.expectEqualStrings(
|
||||||
|
\\newlines and tabs
|
||||||
|
\\are supported
|
||||||
|
, arr[2]);
|
||||||
|
try testing.expectEqualStrings(
|
||||||
|
\\let's have
|
||||||
|
\\some fun!
|
||||||
|
, arr[3]);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "multidoc typed as a slice of structs" {
|
||||||
|
const source =
|
||||||
|
\\---
|
||||||
|
\\a: 0
|
||||||
|
\\---
|
||||||
|
\\a: 1
|
||||||
|
\\...
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
{
|
||||||
|
const result = try yaml.parse([2]struct { a: usize });
|
||||||
|
try testing.expectEqual(result.len, 2);
|
||||||
|
try testing.expectEqual(result[0].a, 0);
|
||||||
|
try testing.expectEqual(result[1].a, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const result = try yaml.parse([]struct { a: usize });
|
||||||
|
try testing.expectEqual(result.len, 2);
|
||||||
|
try testing.expectEqual(result[0].a, 0);
|
||||||
|
try testing.expectEqual(result[1].a, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "multidoc typed as a struct is an error" {
|
||||||
|
const source =
|
||||||
|
\\---
|
||||||
|
\\a: 0
|
||||||
|
\\---
|
||||||
|
\\b: 1
|
||||||
|
\\...
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize }));
|
||||||
|
try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { b: usize }));
|
||||||
|
try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize, b: usize }));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "multidoc typed as a slice of structs with optionals" {
|
||||||
|
const source =
|
||||||
|
\\---
|
||||||
|
\\a: 0
|
||||||
|
\\c: 1.0
|
||||||
|
\\---
|
||||||
|
\\a: 1
|
||||||
|
\\b: different field
|
||||||
|
\\...
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
const result = try yaml.parse([]struct { a: usize, b: ?[]const u8, c: ?f16 });
|
||||||
|
try testing.expectEqual(result.len, 2);
|
||||||
|
|
||||||
|
try testing.expectEqual(result[0].a, 0);
|
||||||
|
try testing.expect(result[0].b == null);
|
||||||
|
try testing.expect(result[0].c != null);
|
||||||
|
try testing.expectEqual(result[0].c.?, 1.0);
|
||||||
|
|
||||||
|
try testing.expectEqual(result[1].a, 1);
|
||||||
|
try testing.expect(result[1].b != null);
|
||||||
|
try testing.expectEqualStrings("different field", result[1].b.?);
|
||||||
|
try testing.expect(result[1].c == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "empty yaml can be represented as void" {
|
||||||
|
const source = "";
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
const result = try yaml.parse(void);
|
||||||
|
try testing.expect(@TypeOf(result) == void);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "nonempty yaml cannot be represented as void" {
|
||||||
|
const source =
|
||||||
|
\\a: b
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(void));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "typed array size mismatch" {
|
||||||
|
const source =
|
||||||
|
\\- 0
|
||||||
|
\\- 0
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([1]usize));
|
||||||
|
try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([5]usize));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "comments" {
|
||||||
|
const source =
|
||||||
|
\\
|
||||||
|
\\key: # this is the key
|
||||||
|
\\# first value
|
||||||
|
\\
|
||||||
|
\\- val1
|
||||||
|
\\
|
||||||
|
\\# second value
|
||||||
|
\\- val2
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
const simple = try yaml.parse(struct {
|
||||||
|
key: []const []const u8,
|
||||||
|
});
|
||||||
|
try testing.expect(simple.key.len == 2);
|
||||||
|
try testing.expectEqualStrings("val1", simple.key[0]);
|
||||||
|
try testing.expectEqualStrings("val2", simple.key[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "promote ints to floats in a list mixed numeric types" {
|
||||||
|
const source =
|
||||||
|
\\a_list: [0, 1.0]
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
const simple = try yaml.parse(struct {
|
||||||
|
a_list: []const f64,
|
||||||
|
});
|
||||||
|
try testing.expectEqualSlices(f64, &[_]f64{ 0.0, 1.0 }, simple.a_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "demoting floats to ints in a list is an error" {
|
||||||
|
const source =
|
||||||
|
\\a_list: [0, 1.0]
|
||||||
|
;
|
||||||
|
|
||||||
|
var yaml = try Yaml.load(testing.allocator, source);
|
||||||
|
defer yaml.deinit();
|
||||||
|
|
||||||
|
try testing.expectError(error.TypeMismatch, yaml.parse(struct {
|
||||||
|
a_list: []const u64,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "duplicate map keys" {
|
||||||
|
const source =
|
||||||
|
\\a: b
|
||||||
|
\\a: c
|
||||||
|
;
|
||||||
|
try testing.expectError(error.DuplicateMapKey, Yaml.load(testing.allocator, source));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn testStringify(expected: []const u8, input: anytype) !void {
|
||||||
|
var output = std.ArrayList(u8).init(testing.allocator);
|
||||||
|
defer output.deinit();
|
||||||
|
|
||||||
|
try yaml_mod.stringify(testing.allocator, input, output.writer());
|
||||||
|
try testing.expectEqualStrings(expected, output.items);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify an int" {
|
||||||
|
try testStringify("128", @as(u32, 128));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify a simple struct" {
|
||||||
|
try testStringify(
|
||||||
|
\\a: 1
|
||||||
|
\\b: 2
|
||||||
|
\\c: 2.5
|
||||||
|
, struct { a: i64, b: f64, c: f64 }{ .a = 1, .b = 2.0, .c = 2.5 });
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify a struct with an optional" {
|
||||||
|
try testStringify(
|
||||||
|
\\a: 1
|
||||||
|
\\b: 2
|
||||||
|
\\c: 2.5
|
||||||
|
, struct { a: i64, b: ?f64, c: f64 }{ .a = 1, .b = 2.0, .c = 2.5 });
|
||||||
|
|
||||||
|
try testStringify(
|
||||||
|
\\a: 1
|
||||||
|
\\c: 2.5
|
||||||
|
, struct { a: i64, b: ?f64, c: f64 }{ .a = 1, .b = null, .c = 2.5 });
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify a struct with all optionals" {
|
||||||
|
try testStringify("", struct { a: ?i64, b: ?f64 }{ .a = null, .b = null });
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify an optional" {
|
||||||
|
try testStringify("", null);
|
||||||
|
try testStringify("", @as(?u64, null));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify a union" {
|
||||||
|
const Dummy = union(enum) {
|
||||||
|
x: u64,
|
||||||
|
y: f64,
|
||||||
|
};
|
||||||
|
try testStringify("a: 1", struct { a: Dummy }{ .a = .{ .x = 1 } });
|
||||||
|
try testStringify("a: 2.1", struct { a: Dummy }{ .a = .{ .y = 2.1 } });
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify a string" {
|
||||||
|
try testStringify("a: name", struct { a: []const u8 }{ .a = "name" });
|
||||||
|
try testStringify("name", "name");
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify a list" {
|
||||||
|
try testStringify("[ 1, 2, 3 ]", @as([]const u64, &.{ 1, 2, 3 }));
|
||||||
|
try testStringify("[ 1, 2, 3 ]", .{ @as(i64, 1), 2, 3 });
|
||||||
|
try testStringify("[ 1, name, 3 ]", .{ 1, "name", 3 });
|
||||||
|
|
||||||
|
const arr: [3]i64 = .{ 1, 2, 3 };
|
||||||
|
try testStringify("[ 1, 2, 3 ]", arr);
|
||||||
|
}
|
||||||
Loading…
x
Reference in New Issue
Block a user