mirror of
https://github.com/ziglang/zig.git
synced 2026-02-17 14:59:14 +00:00
Merge remote-tracking branch 'origin/master' into wrangle-writer-buffering
This commit is contained in:
commit
70994b13df
@ -7987,7 +7987,7 @@ AsmInput <- COLON AsmInputList AsmClobbers?
|
||||
|
||||
AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
|
||||
|
||||
AsmClobbers <- COLON StringList
|
||||
AsmClobbers <- COLON Expr
|
||||
|
||||
# *** Helper grammar ***
|
||||
BreakLabel <- COLON IDENTIFIER
|
||||
|
||||
@ -292,12 +292,14 @@ pub fn main() !void {
|
||||
};
|
||||
defer depfile.close();
|
||||
|
||||
const depfile_writer = depfile.deprecatedWriter();
|
||||
var depfile_buffered_writer = std.io.bufferedWriter(depfile_writer);
|
||||
var depfile_buffer: [1024]u8 = undefined;
|
||||
var depfile_writer = depfile.writer(&depfile_buffer);
|
||||
switch (options.depfile_fmt) {
|
||||
.json => {
|
||||
var write_stream = std.json.writeStream(depfile_buffered_writer.writer(), .{ .whitespace = .indent_2 });
|
||||
defer write_stream.deinit();
|
||||
var write_stream: std.json.Stringify = .{
|
||||
.writer = &depfile_writer.interface,
|
||||
.options = .{ .whitespace = .indent_2 },
|
||||
};
|
||||
|
||||
try write_stream.beginArray();
|
||||
for (dependencies_list.items) |dep_path| {
|
||||
@ -306,7 +308,7 @@ pub fn main() !void {
|
||||
try write_stream.endArray();
|
||||
},
|
||||
}
|
||||
try depfile_buffered_writer.flush();
|
||||
try depfile_writer.interface.flush();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -10,10 +10,10 @@ pub const std_options: std.Options = .{
|
||||
};
|
||||
|
||||
var log_err_count: usize = 0;
|
||||
var fba_buffer: [8192]u8 = undefined;
|
||||
var fba = std.heap.FixedBufferAllocator.init(&fba_buffer);
|
||||
var stdin_buffer: [std.heap.page_size_min]u8 align(std.heap.page_size_min) = undefined;
|
||||
var stdout_buffer: [std.heap.page_size_min]u8 align(std.heap.page_size_min) = undefined;
|
||||
var fba_buffer: [8192]u8 = undefined;
|
||||
var stdin_buffer: [4096]u8 = undefined;
|
||||
var stdout_buffer: [4096]u8 = undefined;
|
||||
|
||||
const crippled = switch (builtin.zig_backend) {
|
||||
.stage2_powerpc,
|
||||
@ -68,8 +68,8 @@ pub fn main() void {
|
||||
|
||||
fn mainServer() !void {
|
||||
@disableInstrumentation();
|
||||
var stdin_reader = std.fs.File.stdin().reader(&stdin_buffer);
|
||||
var stdout_writer = std.fs.File.stdout().writer(&stdout_buffer);
|
||||
var stdin_reader = std.fs.File.stdin().readerStreaming(&stdin_buffer);
|
||||
var stdout_writer = std.fs.File.stdout().writerStreaming(&stdout_buffer);
|
||||
var server = try std.zig.Server.init(.{
|
||||
.in = &stdin_reader.interface,
|
||||
.out = &stdout_writer.interface,
|
||||
@ -104,7 +104,7 @@ fn mainServer() !void {
|
||||
defer testing.allocator.free(expected_panic_msgs);
|
||||
|
||||
for (test_fns, names, expected_panic_msgs) |test_fn, *name, *expected_panic_msg| {
|
||||
name.* = @as(u32, @intCast(string_bytes.items.len));
|
||||
name.* = @intCast(string_bytes.items.len);
|
||||
try string_bytes.ensureUnusedCapacity(testing.allocator, test_fn.name.len + 1);
|
||||
string_bytes.appendSliceAssumeCapacity(test_fn.name);
|
||||
string_bytes.appendAssumeCapacity(0);
|
||||
|
||||
@ -161,17 +161,19 @@ pub fn formatEscapeString(path: Path, writer: *std.io.Writer) std.io.Writer.Erro
|
||||
}
|
||||
}
|
||||
|
||||
/// Deprecated, use double quoted escape to print paths.
|
||||
pub fn fmtEscapeChar(path: Path) std.fmt.Formatter(Path, formatEscapeChar) {
|
||||
return .{ .data = path };
|
||||
}
|
||||
|
||||
/// Deprecated, use double quoted escape to print paths.
|
||||
pub fn formatEscapeChar(path: Path, writer: *std.io.Writer) std.io.Writer.Error!void {
|
||||
if (path.root_dir.path) |p| {
|
||||
try std.zig.charEscape(p, writer);
|
||||
if (path.sub_path.len > 0) try std.zig.charEscape(fs.path.sep_str, writer);
|
||||
for (p) |byte| try std.zig.charEscape(byte, writer);
|
||||
if (path.sub_path.len > 0) try writer.writeByte(fs.path.sep);
|
||||
}
|
||||
if (path.sub_path.len > 0) {
|
||||
try std.zig.charEscape(path.sub_path, writer);
|
||||
for (path.sub_path) |byte| try std.zig.charEscape(byte, writer);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1122,10 +1122,12 @@ fn runCommand(
|
||||
// Wine's excessive stderr logging is only situationally helpful. Disable it by default, but
|
||||
// allow the user to override it (e.g. with `WINEDEBUG=err+all`) if desired.
|
||||
if (env_map.get("WINEDEBUG") == null) {
|
||||
// We don't own `env_map` at this point, so turn it into a copy before modifying it.
|
||||
env_map = arena.create(EnvMap) catch @panic("OOM");
|
||||
env_map.hash_map = try env_map.hash_map.cloneWithAllocator(arena);
|
||||
try env_map.put("WINEDEBUG", "-all");
|
||||
// We don't own `env_map` at this point, so create a copy in order to modify it.
|
||||
const new_env_map = arena.create(EnvMap) catch @panic("OOM");
|
||||
new_env_map.hash_map = try env_map.hash_map.cloneWithAllocator(arena);
|
||||
try new_env_map.put("WINEDEBUG", "-all");
|
||||
|
||||
env_map = new_env_map;
|
||||
}
|
||||
} else {
|
||||
return failForeign(run, "-fwine", argv[0], exe);
|
||||
@ -1737,7 +1739,7 @@ fn sendMessage(file: std.fs.File, tag: std.zig.Client.Message.Tag) !void {
|
||||
.tag = tag,
|
||||
.bytes_len = 0,
|
||||
};
|
||||
try file.writeAll(std.mem.asBytes(&header));
|
||||
try file.writeAll(@ptrCast(&header));
|
||||
}
|
||||
|
||||
fn sendRunTestMessage(file: std.fs.File, tag: std.zig.Client.Message.Tag, index: u32) !void {
|
||||
|
||||
@ -990,9 +990,9 @@ pub fn discardDelimiterLimit(r: *Reader, delimiter: u8, limit: Limit) DiscardDel
|
||||
/// Returns `error.EndOfStream` if and only if there are fewer than `n` bytes
|
||||
/// remaining.
|
||||
///
|
||||
/// Asserts buffer capacity is at least `n`.
|
||||
/// If the end of stream is not encountered, asserts buffer capacity is at
|
||||
/// least `n`.
|
||||
pub fn fill(r: *Reader, n: usize) Error!void {
|
||||
assert(n <= r.buffer.len);
|
||||
if (r.seek + n <= r.end) {
|
||||
@branchHint(.likely);
|
||||
return;
|
||||
@ -1108,9 +1108,9 @@ pub fn takeVarInt(r: *Reader, comptime Int: type, endian: std.builtin.Endian, n:
|
||||
/// Asserts the buffer was initialized with a capacity at least `@sizeOf(T)`.
|
||||
///
|
||||
/// See also:
|
||||
/// * `peekStructReference`
|
||||
/// * `peekStructPointer`
|
||||
/// * `takeStruct`
|
||||
pub fn takeStructReference(r: *Reader, comptime T: type) Error!*align(1) T {
|
||||
pub fn takeStructPointer(r: *Reader, comptime T: type) Error!*align(1) T {
|
||||
// Only extern and packed structs have defined in-memory layout.
|
||||
comptime assert(@typeInfo(T).@"struct".layout != .auto);
|
||||
return @ptrCast(try r.takeArray(@sizeOf(T)));
|
||||
@ -1122,9 +1122,9 @@ pub fn takeStructReference(r: *Reader, comptime T: type) Error!*align(1) T {
|
||||
/// Asserts the buffer was initialized with a capacity at least `@sizeOf(T)`.
|
||||
///
|
||||
/// See also:
|
||||
/// * `takeStructReference`
|
||||
/// * `takeStructPointer`
|
||||
/// * `peekStruct`
|
||||
pub fn peekStructReference(r: *Reader, comptime T: type) Error!*align(1) T {
|
||||
pub fn peekStructPointer(r: *Reader, comptime T: type) Error!*align(1) T {
|
||||
// Only extern and packed structs have defined in-memory layout.
|
||||
comptime assert(@typeInfo(T).@"struct".layout != .auto);
|
||||
return @ptrCast(try r.peekArray(@sizeOf(T)));
|
||||
@ -1136,19 +1136,19 @@ pub fn peekStructReference(r: *Reader, comptime T: type) Error!*align(1) T {
|
||||
/// when `endian` is comptime-known and matches the host endianness.
|
||||
///
|
||||
/// See also:
|
||||
/// * `takeStructReference`
|
||||
/// * `takeStructPointer`
|
||||
/// * `peekStruct`
|
||||
pub inline fn takeStruct(r: *Reader, comptime T: type, endian: std.builtin.Endian) Error!T {
|
||||
switch (@typeInfo(T)) {
|
||||
.@"struct" => |info| switch (info.layout) {
|
||||
.auto => @compileError("ill-defined memory layout"),
|
||||
.@"extern" => {
|
||||
var res = (try r.takeStructReference(T)).*;
|
||||
var res = (try r.takeStructPointer(T)).*;
|
||||
if (native_endian != endian) std.mem.byteSwapAllFields(T, &res);
|
||||
return res;
|
||||
},
|
||||
.@"packed" => {
|
||||
return takeInt(r, info.backing_integer.?, endian);
|
||||
return @bitCast(try takeInt(r, info.backing_integer.?, endian));
|
||||
},
|
||||
},
|
||||
else => @compileError("not a struct"),
|
||||
@ -1162,18 +1162,18 @@ pub inline fn takeStruct(r: *Reader, comptime T: type, endian: std.builtin.Endia
|
||||
///
|
||||
/// See also:
|
||||
/// * `takeStruct`
|
||||
/// * `peekStructReference`
|
||||
/// * `peekStructPointer`
|
||||
pub inline fn peekStruct(r: *Reader, comptime T: type, endian: std.builtin.Endian) Error!T {
|
||||
switch (@typeInfo(T)) {
|
||||
.@"struct" => |info| switch (info.layout) {
|
||||
.auto => @compileError("ill-defined memory layout"),
|
||||
.@"extern" => {
|
||||
var res = (try r.peekStructReference(T)).*;
|
||||
var res = (try r.peekStructPointer(T)).*;
|
||||
if (native_endian != endian) std.mem.byteSwapAllFields(T, &res);
|
||||
return res;
|
||||
},
|
||||
.@"packed" => {
|
||||
return peekInt(r, info.backing_integer.?, endian);
|
||||
return @bitCast(try peekInt(r, info.backing_integer.?, endian));
|
||||
},
|
||||
},
|
||||
else => @compileError("not a struct"),
|
||||
@ -1557,27 +1557,27 @@ test takeVarInt {
|
||||
try testing.expectError(error.EndOfStream, r.takeVarInt(u16, .little, 1));
|
||||
}
|
||||
|
||||
test takeStructReference {
|
||||
test takeStructPointer {
|
||||
var r: Reader = .fixed(&.{ 0x12, 0x00, 0x34, 0x56 });
|
||||
const S = extern struct { a: u8, b: u16 };
|
||||
switch (native_endian) {
|
||||
.little => try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.takeStructReference(S)).*),
|
||||
.big => try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.takeStructReference(S)).*),
|
||||
.little => try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.takeStructPointer(S)).*),
|
||||
.big => try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.takeStructPointer(S)).*),
|
||||
}
|
||||
try testing.expectError(error.EndOfStream, r.takeStructReference(S));
|
||||
try testing.expectError(error.EndOfStream, r.takeStructPointer(S));
|
||||
}
|
||||
|
||||
test peekStructReference {
|
||||
test peekStructPointer {
|
||||
var r: Reader = .fixed(&.{ 0x12, 0x00, 0x34, 0x56 });
|
||||
const S = extern struct { a: u8, b: u16 };
|
||||
switch (native_endian) {
|
||||
.little => {
|
||||
try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.peekStructReference(S)).*);
|
||||
try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.peekStructReference(S)).*);
|
||||
try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.peekStructPointer(S)).*);
|
||||
try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.peekStructPointer(S)).*);
|
||||
},
|
||||
.big => {
|
||||
try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.peekStructReference(S)).*);
|
||||
try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.peekStructReference(S)).*);
|
||||
try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.peekStructPointer(S)).*);
|
||||
try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.peekStructPointer(S)).*);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -1724,6 +1724,27 @@ test "takeDelimiterInclusive when it rebases" {
|
||||
}
|
||||
}
|
||||
|
||||
test "takeStruct and peekStruct packed" {
|
||||
var r: Reader = .fixed(&.{ 0b11110000, 0b00110011 });
|
||||
const S = packed struct(u16) { a: u2, b: u6, c: u7, d: u1 };
|
||||
|
||||
try testing.expectEqual(@as(S, .{
|
||||
.a = 0b11,
|
||||
.b = 0b001100,
|
||||
.c = 0b1110000,
|
||||
.d = 0b1,
|
||||
}), try r.peekStruct(S, .big));
|
||||
|
||||
try testing.expectEqual(@as(S, .{
|
||||
.a = 0b11,
|
||||
.b = 0b001100,
|
||||
.c = 0b1110000,
|
||||
.d = 0b1,
|
||||
}), try r.takeStruct(S, .big));
|
||||
|
||||
try testing.expectError(error.EndOfStream, r.takeStruct(S, .little));
|
||||
}
|
||||
|
||||
/// Provides a `Reader` implementation by passing data from an underlying
|
||||
/// reader through `Hasher.update`.
|
||||
///
|
||||
|
||||
@ -867,18 +867,11 @@ pub inline fn writeSliceEndian(
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts that the buffer storage capacity is at least enough to store `@sizeOf(Elem)`
|
||||
///
|
||||
/// Asserts that the buffer is aligned enough for `@alignOf(Elem)`.
|
||||
pub fn writeSliceSwap(w: *Writer, Elem: type, slice: []const Elem) Error!void {
|
||||
var i: usize = 0;
|
||||
while (i < slice.len) {
|
||||
const dest_bytes = try w.writableSliceGreedy(@sizeOf(Elem));
|
||||
const dest: []Elem = @alignCast(@ptrCast(dest_bytes[0 .. dest_bytes.len - dest_bytes.len % @sizeOf(Elem)]));
|
||||
const copy_len = @min(dest.len, slice.len - i);
|
||||
@memcpy(dest[0..copy_len], slice[i..][0..copy_len]);
|
||||
i += copy_len;
|
||||
std.mem.byteSwapAllElements(Elem, dest);
|
||||
for (slice) |elem| {
|
||||
var tmp = elem;
|
||||
std.mem.byteSwapAllFields(Elem, &tmp);
|
||||
try w.writeAll(@ptrCast(&tmp));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1141,8 +1134,8 @@ pub fn printValue(
|
||||
else => invalidFmtError(fmt, value),
|
||||
},
|
||||
't' => switch (@typeInfo(T)) {
|
||||
.error_set => return w.writeAll(@errorName(value)),
|
||||
.@"enum", .@"union" => return w.writeAll(@tagName(value)),
|
||||
.error_set => return w.alignBufferOptions(@errorName(value), options),
|
||||
.@"enum", .@"union" => return w.alignBufferOptions(@tagName(value), options),
|
||||
else => invalidFmtError(fmt, value),
|
||||
},
|
||||
else => {},
|
||||
@ -2152,6 +2145,14 @@ test "bytes.hex" {
|
||||
try testing.expectFmt("lowercase: 000ebabe\n", "lowercase: {x}\n", .{bytes_with_zeros});
|
||||
}
|
||||
|
||||
test "padding" {
|
||||
const foo: enum { foo } = .foo;
|
||||
try testing.expectFmt("tag: |foo |\n", "tag: |{t:<4}|\n", .{foo});
|
||||
|
||||
const bar: error{bar} = error.bar;
|
||||
try testing.expectFmt("error: |bar |\n", "error: |{t:<4}|\n", .{bar});
|
||||
}
|
||||
|
||||
test fixed {
|
||||
{
|
||||
var buf: [255]u8 = undefined;
|
||||
@ -2650,9 +2651,10 @@ test writeStruct {
|
||||
}
|
||||
|
||||
test writeSliceEndian {
|
||||
var buffer: [4]u8 align(2) = undefined;
|
||||
var buffer: [5]u8 align(2) = undefined;
|
||||
var w: Writer = .fixed(&buffer);
|
||||
try w.writeByte('x');
|
||||
const array: [2]u16 = .{ 0x1234, 0x5678 };
|
||||
try writeSliceEndian(&w, u16, &array, .big);
|
||||
try testing.expectEqualSlices(u8, &.{ 0x12, 0x34, 0x56, 0x78 }, &buffer);
|
||||
try testing.expectEqualSlices(u8, &.{ 'x', 0x12, 0x34, 0x56, 0x78 }, &buffer);
|
||||
}
|
||||
|
||||
@ -31,8 +31,6 @@ pub const CityHash64 = cityhash.CityHash64;
|
||||
const wyhash = @import("hash/wyhash.zig");
|
||||
pub const Wyhash = wyhash.Wyhash;
|
||||
|
||||
pub const RapidHash = @import("hash/RapidHash.zig");
|
||||
|
||||
const xxhash = @import("hash/xxhash.zig");
|
||||
pub const XxHash3 = xxhash.XxHash3;
|
||||
pub const XxHash64 = xxhash.XxHash64;
|
||||
|
||||
@ -1,125 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
const readInt = std.mem.readInt;
|
||||
const assert = std.debug.assert;
|
||||
const expect = std.testing.expect;
|
||||
const expectEqual = std.testing.expectEqual;
|
||||
|
||||
const RAPID_SEED: u64 = 0xbdd89aa982704029;
|
||||
const RAPID_SECRET: [3]u64 = .{ 0x2d358dccaa6c78a5, 0x8bb84b93962eacc9, 0x4b33a62ed433d4a3 };
|
||||
|
||||
pub fn hash(seed: u64, input: []const u8) u64 {
|
||||
const sc = RAPID_SECRET;
|
||||
const len = input.len;
|
||||
var a: u64 = 0;
|
||||
var b: u64 = 0;
|
||||
var k = input;
|
||||
var is: [3]u64 = .{ seed, 0, 0 };
|
||||
|
||||
is[0] ^= mix(seed ^ sc[0], sc[1]) ^ len;
|
||||
|
||||
if (len <= 16) {
|
||||
if (len >= 4) {
|
||||
const d: u64 = ((len & 24) >> @intCast(len >> 3));
|
||||
const e = len - 4;
|
||||
a = (r32(k) << 32) | r32(k[e..]);
|
||||
b = ((r32(k[d..]) << 32) | r32(k[(e - d)..]));
|
||||
} else if (len > 0)
|
||||
a = (@as(u64, k[0]) << 56) | (@as(u64, k[len >> 1]) << 32) | @as(u64, k[len - 1]);
|
||||
} else {
|
||||
var remain = len;
|
||||
if (len > 48) {
|
||||
is[1] = is[0];
|
||||
is[2] = is[0];
|
||||
while (remain >= 96) {
|
||||
inline for (0..6) |i| {
|
||||
const m1 = r64(k[8 * i * 2 ..]);
|
||||
const m2 = r64(k[8 * (i * 2 + 1) ..]);
|
||||
is[i % 3] = mix(m1 ^ sc[i % 3], m2 ^ is[i % 3]);
|
||||
}
|
||||
k = k[96..];
|
||||
remain -= 96;
|
||||
}
|
||||
if (remain >= 48) {
|
||||
inline for (0..3) |i| {
|
||||
const m1 = r64(k[8 * i * 2 ..]);
|
||||
const m2 = r64(k[8 * (i * 2 + 1) ..]);
|
||||
is[i] = mix(m1 ^ sc[i], m2 ^ is[i]);
|
||||
}
|
||||
k = k[48..];
|
||||
remain -= 48;
|
||||
}
|
||||
|
||||
is[0] ^= is[1] ^ is[2];
|
||||
}
|
||||
|
||||
if (remain > 16) {
|
||||
is[0] = mix(r64(k) ^ sc[2], r64(k[8..]) ^ is[0] ^ sc[1]);
|
||||
if (remain > 32) {
|
||||
is[0] = mix(r64(k[16..]) ^ sc[2], r64(k[24..]) ^ is[0]);
|
||||
}
|
||||
}
|
||||
|
||||
a = r64(input[len - 16 ..]);
|
||||
b = r64(input[len - 8 ..]);
|
||||
}
|
||||
|
||||
a ^= sc[1];
|
||||
b ^= is[0];
|
||||
mum(&a, &b);
|
||||
return mix(a ^ sc[0] ^ len, b ^ sc[1]);
|
||||
}
|
||||
|
||||
test "RapidHash.hash" {
|
||||
const bytes: []const u8 = "abcdefgh" ** 128;
|
||||
|
||||
const sizes: [13]u64 = .{ 0, 1, 2, 3, 4, 8, 16, 32, 64, 128, 256, 512, 1024 };
|
||||
|
||||
const outcomes: [13]u64 = .{
|
||||
0x5a6ef77074ebc84b,
|
||||
0xc11328477bc0f5d1,
|
||||
0x5644ac035e40d569,
|
||||
0x347080fbf5fcd81,
|
||||
0x56b66b8dc802bcc,
|
||||
0xb6bf9055973aac7c,
|
||||
0xed56d62eead1e402,
|
||||
0xc19072d767da8ffb,
|
||||
0x89bb40a9928a4f0d,
|
||||
0xe0af7c5e7b6e29fd,
|
||||
0x9a3ed35fbedfa11a,
|
||||
0x4c684b2119ca19fb,
|
||||
0x4b575f5bf25600d6,
|
||||
};
|
||||
|
||||
var success: bool = true;
|
||||
for (sizes, outcomes) |s, e| {
|
||||
const r = hash(RAPID_SEED, bytes[0..s]);
|
||||
|
||||
expectEqual(e, r) catch |err| {
|
||||
std.debug.print("Failed on {d}: {!}\n", .{ s, err });
|
||||
success = false;
|
||||
};
|
||||
}
|
||||
try expect(success);
|
||||
}
|
||||
|
||||
inline fn mum(a: *u64, b: *u64) void {
|
||||
const r = @as(u128, a.*) * b.*;
|
||||
a.* = @truncate(r);
|
||||
b.* = @truncate(r >> 64);
|
||||
}
|
||||
|
||||
inline fn mix(a: u64, b: u64) u64 {
|
||||
var copy_a = a;
|
||||
var copy_b = b;
|
||||
mum(©_a, ©_b);
|
||||
return copy_a ^ copy_b;
|
||||
}
|
||||
|
||||
inline fn r64(p: []const u8) u64 {
|
||||
return readInt(u64, p[0..8], .little);
|
||||
}
|
||||
|
||||
inline fn r32(p: []const u8) u64 {
|
||||
return readInt(u32, p[0..4], .little);
|
||||
}
|
||||
@ -59,12 +59,6 @@ const hashes = [_]Hash{
|
||||
.ty = hash.crc.Crc32,
|
||||
.name = "crc32",
|
||||
},
|
||||
Hash{
|
||||
.ty = hash.RapidHash,
|
||||
.name = "rapidhash",
|
||||
.has_iterative_api = false,
|
||||
.init_u64 = 0,
|
||||
},
|
||||
Hash{
|
||||
.ty = hash.CityHash32,
|
||||
.name = "cityhash-32",
|
||||
|
||||
@ -44,7 +44,7 @@ test Value {
|
||||
test Stringify {
|
||||
var out: std.io.Writer.Allocating = .init(testing.allocator);
|
||||
var write_stream: Stringify = .{
|
||||
.writer = &out.interface,
|
||||
.writer = &out.writer,
|
||||
.options = .{ .whitespace = .indent_2 },
|
||||
};
|
||||
defer out.deinit();
|
||||
@ -66,18 +66,18 @@ pub const Value = @import("json/dynamic.zig").Value;
|
||||
|
||||
pub const ArrayHashMap = @import("json/hashmap.zig").ArrayHashMap;
|
||||
|
||||
pub const validate = @import("json/scanner.zig").validate;
|
||||
pub const Error = @import("json/scanner.zig").Error;
|
||||
pub const reader = @import("json/scanner.zig").reader;
|
||||
pub const default_buffer_size = @import("json/scanner.zig").default_buffer_size;
|
||||
pub const Token = @import("json/scanner.zig").Token;
|
||||
pub const TokenType = @import("json/scanner.zig").TokenType;
|
||||
pub const Diagnostics = @import("json/scanner.zig").Diagnostics;
|
||||
pub const AllocWhen = @import("json/scanner.zig").AllocWhen;
|
||||
pub const default_max_value_len = @import("json/scanner.zig").default_max_value_len;
|
||||
pub const Reader = @import("json/scanner.zig").Reader;
|
||||
pub const Scanner = @import("json/scanner.zig").Scanner;
|
||||
pub const isNumberFormattedLikeAnInteger = @import("json/scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
pub const Scanner = @import("json/Scanner.zig");
|
||||
pub const validate = Scanner.validate;
|
||||
pub const Error = Scanner.Error;
|
||||
pub const reader = Scanner.reader;
|
||||
pub const default_buffer_size = Scanner.default_buffer_size;
|
||||
pub const Token = Scanner.Token;
|
||||
pub const TokenType = Scanner.TokenType;
|
||||
pub const Diagnostics = Scanner.Diagnostics;
|
||||
pub const AllocWhen = Scanner.AllocWhen;
|
||||
pub const default_max_value_len = Scanner.default_max_value_len;
|
||||
pub const Reader = Scanner.Reader;
|
||||
pub const isNumberFormattedLikeAnInteger = Scanner.isNumberFormattedLikeAnInteger;
|
||||
|
||||
pub const ParseOptions = @import("json/static.zig").ParseOptions;
|
||||
pub const Parsed = @import("json/static.zig").Parsed;
|
||||
@ -101,10 +101,10 @@ pub fn fmt(value: anytype, options: Stringify.Options) Formatter(@TypeOf(value))
|
||||
|
||||
test fmt {
|
||||
const expectFmt = std.testing.expectFmt;
|
||||
try expectFmt("123", "{}", .{fmt(@as(u32, 123), .{})});
|
||||
try expectFmt("123", "{f}", .{fmt(@as(u32, 123), .{})});
|
||||
try expectFmt(
|
||||
\\{"num":927,"msg":"hello","sub":{"mybool":true}}
|
||||
, "{}", .{fmt(struct {
|
||||
, "{f}", .{fmt(struct {
|
||||
num: u32,
|
||||
msg: []const u8,
|
||||
sub: struct {
|
||||
@ -123,14 +123,7 @@ pub fn Formatter(comptime T: type) type {
|
||||
value: T,
|
||||
options: Stringify.Options,
|
||||
|
||||
pub fn format(
|
||||
self: @This(),
|
||||
comptime fmt_spec: []const u8,
|
||||
options: std.fmt.FormatOptions,
|
||||
writer: *std.io.Writer,
|
||||
) !void {
|
||||
comptime std.debug.assert(fmt_spec.len == 0);
|
||||
_ = options;
|
||||
pub fn format(self: @This(), writer: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
try Stringify.value(self.value, self.options, writer);
|
||||
}
|
||||
};
|
||||
@ -138,7 +131,7 @@ pub fn Formatter(comptime T: type) type {
|
||||
|
||||
test {
|
||||
_ = @import("json/test.zig");
|
||||
_ = @import("json/scanner.zig");
|
||||
_ = Scanner;
|
||||
_ = @import("json/dynamic.zig");
|
||||
_ = @import("json/hashmap.zig");
|
||||
_ = @import("json/static.zig");
|
||||
|
||||
1767
lib/std/json/Scanner.zig
Normal file
1767
lib/std/json/Scanner.zig
Normal file
File diff suppressed because it is too large
Load Diff
@ -248,7 +248,7 @@ test print {
|
||||
\\ ]
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, out.getWritten());
|
||||
try std.testing.expectEqualStrings(expected, out.buffered());
|
||||
}
|
||||
|
||||
/// An alternative to calling `write` that allows you to write directly to the `.writer` field, e.g. with `.writer.writeAll()`.
|
||||
@ -577,7 +577,7 @@ pub fn value(v: anytype, options: Options, writer: *Writer) Error!void {
|
||||
|
||||
test value {
|
||||
var out: std.io.Writer.Allocating = .init(std.testing.allocator);
|
||||
const writer = &out.interface;
|
||||
const writer = &out.writer;
|
||||
defer out.deinit();
|
||||
|
||||
const T = struct { a: i32, b: []const u8 };
|
||||
@ -617,9 +617,8 @@ test value {
|
||||
/// Caller owns returned memory.
|
||||
pub fn valueAlloc(gpa: Allocator, v: anytype, options: Options) error{OutOfMemory}![]u8 {
|
||||
var aw: std.io.Writer.Allocating = .init(gpa);
|
||||
const writer = &aw.interface;
|
||||
defer aw.deinit();
|
||||
value(v, options, writer) catch return error.OutOfMemory;
|
||||
value(v, options, &aw.writer) catch return error.OutOfMemory;
|
||||
return aw.toOwnedSlice();
|
||||
}
|
||||
|
||||
@ -634,23 +633,23 @@ test valueAlloc {
|
||||
try std.testing.expectEqualStrings(expected, actual);
|
||||
}
|
||||
|
||||
fn outputUnicodeEscape(codepoint: u21, bw: *Writer) Error!void {
|
||||
fn outputUnicodeEscape(codepoint: u21, w: *Writer) Error!void {
|
||||
if (codepoint <= 0xFFFF) {
|
||||
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
|
||||
// then it may be represented as a six-character sequence: a reverse solidus, followed
|
||||
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
|
||||
try bw.writeAll("\\u");
|
||||
try bw.printInt("x", .{ .width = 4, .fill = '0' }, codepoint);
|
||||
try w.writeAll("\\u");
|
||||
try w.printInt(codepoint, 16, .lower, .{ .width = 4, .fill = '0' });
|
||||
} else {
|
||||
assert(codepoint <= 0x10FFFF);
|
||||
// To escape an extended character that is not in the Basic Multilingual Plane,
|
||||
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
|
||||
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
|
||||
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
|
||||
try bw.writeAll("\\u");
|
||||
try bw.printInt("x", .{ .width = 4, .fill = '0' }, high);
|
||||
try bw.writeAll("\\u");
|
||||
try bw.printInt("x", .{ .width = 4, .fill = '0' }, low);
|
||||
try w.writeAll("\\u");
|
||||
try w.printInt(high, 16, .lower, .{ .width = 4, .fill = '0' });
|
||||
try w.writeAll("\\u");
|
||||
try w.printInt(low, 16, .lower, .{ .width = 4, .fill = '0' });
|
||||
}
|
||||
}
|
||||
|
||||
@ -723,8 +722,8 @@ test "json write stream" {
|
||||
try testBasicWriteStream(&w);
|
||||
}
|
||||
|
||||
fn testBasicWriteStream(w: *Stringify) Error!void {
|
||||
w.writer.reset();
|
||||
fn testBasicWriteStream(w: *Stringify) !void {
|
||||
w.writer.end = 0;
|
||||
|
||||
try w.beginObject();
|
||||
|
||||
@ -755,19 +754,19 @@ fn testBasicWriteStream(w: *Stringify) Error!void {
|
||||
\\{
|
||||
\\ "object": {
|
||||
\\ "one": 1,
|
||||
\\ "two": 2e0
|
||||
\\ "two": 2
|
||||
\\ },
|
||||
\\ "string": "This is a string",
|
||||
\\ "array": [
|
||||
\\ "Another string",
|
||||
\\ 1,
|
||||
\\ 3.5e0
|
||||
\\ 3.5
|
||||
\\ ],
|
||||
\\ "int": 10,
|
||||
\\ "float": 3.5e0
|
||||
\\ "float": 3.5
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, w.writer.getWritten());
|
||||
try std.testing.expectEqualStrings(expected, w.writer.buffered());
|
||||
}
|
||||
|
||||
fn getJsonObject(allocator: std.mem.Allocator) !std.json.Value {
|
||||
@ -804,12 +803,12 @@ test "stringify basic types" {
|
||||
try testStringify("null", @as(?u8, null), .{});
|
||||
try testStringify("null", @as(?*u32, null), .{});
|
||||
try testStringify("42", 42, .{});
|
||||
try testStringify("4.2e1", 42.0, .{});
|
||||
try testStringify("42", 42.0, .{});
|
||||
try testStringify("42", @as(u8, 42), .{});
|
||||
try testStringify("42", @as(u128, 42), .{});
|
||||
try testStringify("9999999999999999", 9999999999999999, .{});
|
||||
try testStringify("4.2e1", @as(f32, 42), .{});
|
||||
try testStringify("4.2e1", @as(f64, 42), .{});
|
||||
try testStringify("42", @as(f32, 42), .{});
|
||||
try testStringify("42", @as(f64, 42), .{});
|
||||
try testStringify("\"ItBroke\"", @as(anyerror, error.ItBroke), .{});
|
||||
try testStringify("\"ItBroke\"", error.ItBroke, .{});
|
||||
}
|
||||
@ -970,9 +969,9 @@ test "stringify struct with custom stringifier" {
|
||||
|
||||
fn testStringify(expected: []const u8, v: anytype, options: Options) !void {
|
||||
var buffer: [4096]u8 = undefined;
|
||||
var bw: Writer = .fixed(&buffer);
|
||||
try value(v, options, &bw);
|
||||
try std.testing.expectEqualStrings(expected, bw.getWritten());
|
||||
var w: Writer = .fixed(&buffer);
|
||||
try value(v, options, &w);
|
||||
try std.testing.expectEqualStrings(expected, w.buffered());
|
||||
}
|
||||
|
||||
test "raw streaming" {
|
||||
@ -996,5 +995,5 @@ test "raw streaming" {
|
||||
\\ "long key": "long value"
|
||||
\\}
|
||||
;
|
||||
try std.testing.expectEqualStrings(expected, w.writer.getWritten());
|
||||
try std.testing.expectEqualStrings(expected, w.writer.buffered());
|
||||
}
|
||||
|
||||
@ -9,10 +9,7 @@ const json = std.json;
|
||||
const ParseOptions = @import("./static.zig").ParseOptions;
|
||||
const ParseError = @import("./static.zig").ParseError;
|
||||
|
||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||
const AllocWhen = @import("./scanner.zig").AllocWhen;
|
||||
const Token = @import("./scanner.zig").Token;
|
||||
const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
const isNumberFormattedLikeAnInteger = @import("Scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
|
||||
pub const ObjectMap = StringArrayHashMap(Value);
|
||||
pub const Array = ArrayList(Value);
|
||||
|
||||
@ -16,8 +16,7 @@ const parseFromTokenSource = @import("static.zig").parseFromTokenSource;
|
||||
const parseFromValueLeaky = @import("static.zig").parseFromValueLeaky;
|
||||
const ParseOptions = @import("static.zig").ParseOptions;
|
||||
|
||||
const jsonReader = @import("scanner.zig").reader;
|
||||
const JsonReader = @import("scanner.zig").Reader;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
|
||||
test "json.parser.dynamic" {
|
||||
const s =
|
||||
@ -99,8 +98,8 @@ test "write json then parse it" {
|
||||
|
||||
try jw.endObject();
|
||||
|
||||
var fbs: std.io.FixedBufferStream = .{ .buffer = fixed_writer.getWritten() };
|
||||
var json_reader = jsonReader(testing.allocator, fbs.reader());
|
||||
var fbs: std.Io.Reader = .fixed(fixed_writer.buffered());
|
||||
var json_reader: Scanner.Reader = .init(testing.allocator, &fbs);
|
||||
defer json_reader.deinit();
|
||||
var parsed = try parseFromTokenSource(Value, testing.allocator, &json_reader, .{});
|
||||
defer parsed.deinit();
|
||||
@ -263,7 +262,7 @@ test "Value.jsonStringify" {
|
||||
\\ }
|
||||
\\]
|
||||
;
|
||||
try testing.expectEqualStrings(expected, fixed_writer.getWritten());
|
||||
try testing.expectEqualStrings(expected, fixed_writer.buffered());
|
||||
}
|
||||
|
||||
test "parseFromValue(std.json.Value,...)" {
|
||||
@ -331,8 +330,8 @@ test "polymorphic parsing" {
|
||||
test "long object value" {
|
||||
const value = "01234567890123456789";
|
||||
const doc = "{\"key\":\"" ++ value ++ "\"}";
|
||||
var fbs: std.io.FixedBufferStream = .{ .buffer = doc };
|
||||
var reader = smallBufferJsonReader(testing.allocator, fbs.reader());
|
||||
var fbs: std.Io.Reader = .fixed(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, &fbs);
|
||||
defer reader.deinit();
|
||||
var parsed = try parseFromTokenSource(Value, testing.allocator, &reader, .{});
|
||||
defer parsed.deinit();
|
||||
@ -364,8 +363,8 @@ test "many object keys" {
|
||||
\\ "k5": "v5"
|
||||
\\}
|
||||
;
|
||||
var fbs: std.io.FixedBufferStream = .{ .buffer = doc };
|
||||
var reader = smallBufferJsonReader(testing.allocator, fbs.reader());
|
||||
var fbs: std.Io.Reader = .fixed(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, &fbs);
|
||||
defer reader.deinit();
|
||||
var parsed = try parseFromTokenSource(Value, testing.allocator, &reader, .{});
|
||||
defer parsed.deinit();
|
||||
@ -379,8 +378,8 @@ test "many object keys" {
|
||||
|
||||
test "negative zero" {
|
||||
const doc = "-0";
|
||||
var fbs: std.io.FixedBufferStream = .{ .buffer = doc };
|
||||
var reader = smallBufferJsonReader(testing.allocator, fbs.reader());
|
||||
var fbs: std.Io.Reader = .fixed(doc);
|
||||
var reader = smallBufferJsonReader(testing.allocator, &fbs);
|
||||
defer reader.deinit();
|
||||
var parsed = try parseFromTokenSource(Value, testing.allocator, &reader, .{});
|
||||
defer parsed.deinit();
|
||||
@ -388,6 +387,6 @@ test "negative zero" {
|
||||
try testing.expect(std.math.isNegativeZero(parsed.value.float));
|
||||
}
|
||||
|
||||
fn smallBufferJsonReader(allocator: Allocator, io_reader: anytype) JsonReader(16, @TypeOf(io_reader)) {
|
||||
return JsonReader(16, @TypeOf(io_reader)).init(allocator, io_reader);
|
||||
fn smallBufferJsonReader(allocator: Allocator, io_reader: anytype) Scanner.Reader {
|
||||
return .init(allocator, io_reader);
|
||||
}
|
||||
|
||||
@ -10,7 +10,7 @@ const parseFromTokenSource = @import("static.zig").parseFromTokenSource;
|
||||
const parseFromValue = @import("static.zig").parseFromValue;
|
||||
const Value = @import("dynamic.zig").Value;
|
||||
|
||||
const jsonReader = @import("./scanner.zig").reader;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
|
||||
const T = struct {
|
||||
i: i32,
|
||||
@ -39,8 +39,8 @@ test "parse json hashmap while streaming" {
|
||||
\\ "xyz": {"i": 1, "s": "w"}
|
||||
\\}
|
||||
;
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = doc };
|
||||
var json_reader = jsonReader(testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(doc);
|
||||
var json_reader: Scanner.Reader = .init(testing.allocator, &stream);
|
||||
|
||||
var parsed = try parseFromTokenSource(
|
||||
ArrayHashMap(T),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,11 @@
|
||||
const std = @import("std");
|
||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||
const jsonReader = @import("./scanner.zig").reader;
|
||||
const JsonReader = @import("./scanner.zig").Reader;
|
||||
const Token = @import("./scanner.zig").Token;
|
||||
const TokenType = @import("./scanner.zig").TokenType;
|
||||
const Diagnostics = @import("./scanner.zig").Diagnostics;
|
||||
const Error = @import("./scanner.zig").Error;
|
||||
const validate = @import("./scanner.zig").validate;
|
||||
const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
const Token = Scanner.Token;
|
||||
const TokenType = Scanner.TokenType;
|
||||
const Diagnostics = Scanner.Diagnostics;
|
||||
const Error = Scanner.Error;
|
||||
const validate = Scanner.validate;
|
||||
const isNumberFormattedLikeAnInteger = Scanner.isNumberFormattedLikeAnInteger;
|
||||
|
||||
const example_document_str =
|
||||
\\{
|
||||
@ -36,7 +34,7 @@ fn expectPeekNext(scanner_or_reader: anytype, expected_token_type: TokenType, ex
|
||||
}
|
||||
|
||||
test "token" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, example_document_str);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, example_document_str);
|
||||
defer scanner.deinit();
|
||||
|
||||
try expectNext(&scanner, .object_begin);
|
||||
@ -138,23 +136,25 @@ fn testAllTypes(source: anytype, large_buffer: bool) !void {
|
||||
}
|
||||
|
||||
test "peek all types" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, all_types_test_case);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, all_types_test_case);
|
||||
defer scanner.deinit();
|
||||
try testAllTypes(&scanner, true);
|
||||
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = all_types_test_case };
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(all_types_test_case);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
try testAllTypes(&json_reader, true);
|
||||
|
||||
var tiny_stream: std.io.FixedBufferStream = .{ .buffer = all_types_test_case };
|
||||
var tiny_json_reader = JsonReader(1, @TypeOf(tiny_stream.reader())).init(std.testing.allocator, tiny_stream.reader());
|
||||
var tiny_buffer: [1]u8 = undefined;
|
||||
var tiny_stream: std.testing.Reader = .init(&tiny_buffer, &.{.{ .buffer = all_types_test_case }});
|
||||
tiny_stream.artificial_limit = .limited(1);
|
||||
var tiny_json_reader: Scanner.Reader = .init(std.testing.allocator, &tiny_stream.interface);
|
||||
defer tiny_json_reader.deinit();
|
||||
try testAllTypes(&tiny_json_reader, false);
|
||||
}
|
||||
|
||||
test "token mismatched close" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, "[102, 111, 111 }");
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, "[102, 111, 111 }");
|
||||
defer scanner.deinit();
|
||||
try expectNext(&scanner, .array_begin);
|
||||
try expectNext(&scanner, Token{ .number = "102" });
|
||||
@ -164,15 +164,15 @@ test "token mismatched close" {
|
||||
}
|
||||
|
||||
test "token premature object close" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, "{ \"key\": }");
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, "{ \"key\": }");
|
||||
defer scanner.deinit();
|
||||
try expectNext(&scanner, .object_begin);
|
||||
try expectNext(&scanner, Token{ .string = "key" });
|
||||
try std.testing.expectError(error.SyntaxError, scanner.next());
|
||||
}
|
||||
|
||||
test "JsonScanner basic" {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, example_document_str);
|
||||
test "Scanner basic" {
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, example_document_str);
|
||||
defer scanner.deinit();
|
||||
|
||||
while (true) {
|
||||
@ -181,10 +181,10 @@ test "JsonScanner basic" {
|
||||
}
|
||||
}
|
||||
|
||||
test "JsonReader basic" {
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = example_document_str };
|
||||
test "Scanner.Reader basic" {
|
||||
var stream: std.Io.Reader = .fixed(example_document_str);
|
||||
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
|
||||
while (true) {
|
||||
@ -215,7 +215,7 @@ const number_test_items = blk: {
|
||||
|
||||
test "numbers" {
|
||||
for (number_test_items) |number_str| {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, number_str);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, number_str);
|
||||
defer scanner.deinit();
|
||||
|
||||
const token = try scanner.next();
|
||||
@ -243,10 +243,10 @@ const string_test_cases = .{
|
||||
|
||||
test "strings" {
|
||||
inline for (string_test_cases) |tuple| {
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = "\"" ++ tuple[0] ++ "\"" };
|
||||
var stream: std.Io.Reader = .fixed("\"" ++ tuple[0] ++ "\"");
|
||||
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena.deinit();
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
|
||||
const token = try json_reader.nextAlloc(arena.allocator(), .alloc_if_needed);
|
||||
@ -289,7 +289,7 @@ test "nesting" {
|
||||
}
|
||||
|
||||
fn expectMaybeError(document_str: []const u8, maybe_error: ?Error) !void {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, document_str);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, document_str);
|
||||
defer scanner.deinit();
|
||||
|
||||
while (true) {
|
||||
@ -352,12 +352,12 @@ fn expectEqualTokens(expected_token: Token, actual_token: Token) !void {
|
||||
}
|
||||
|
||||
fn testTinyBufferSize(document_str: []const u8) !void {
|
||||
var tiny_stream: std.io.FixedBufferStream = .{ .buffer = document_str };
|
||||
var normal_stream: std.io.FixedBufferStream = .{ .buffer = document_str };
|
||||
var tiny_stream: std.Io.Reader = .fixed(document_str);
|
||||
var normal_stream: std.Io.Reader = .fixed(document_str);
|
||||
|
||||
var tiny_json_reader = JsonReader(1, @TypeOf(tiny_stream.reader())).init(std.testing.allocator, tiny_stream.reader());
|
||||
var tiny_json_reader: Scanner.Reader = .init(std.testing.allocator, &tiny_stream);
|
||||
defer tiny_json_reader.deinit();
|
||||
var normal_json_reader = JsonReader(0x1000, @TypeOf(normal_stream.reader())).init(std.testing.allocator, normal_stream.reader());
|
||||
var normal_json_reader: Scanner.Reader = .init(std.testing.allocator, &normal_stream);
|
||||
defer normal_json_reader.deinit();
|
||||
|
||||
expectEqualStreamOfTokens(&normal_json_reader, &tiny_json_reader) catch |err| {
|
||||
@ -397,13 +397,13 @@ test "validate" {
|
||||
}
|
||||
|
||||
fn testSkipValue(s: []const u8) !void {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, s);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, s);
|
||||
defer scanner.deinit();
|
||||
try scanner.skipValue();
|
||||
try expectEqualTokens(.end_of_document, try scanner.next());
|
||||
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = s };
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(s);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
try json_reader.skipValue();
|
||||
try expectEqualTokens(.end_of_document, try json_reader.next());
|
||||
@ -441,7 +441,7 @@ fn testEnsureStackCapacity(do_ensure: bool) !void {
|
||||
try input_string.appendNTimes(std.testing.allocator, ']', nestings);
|
||||
defer input_string.deinit(std.testing.allocator);
|
||||
|
||||
var scanner = JsonScanner.initCompleteInput(failing_allocator, input_string.items);
|
||||
var scanner = Scanner.initCompleteInput(failing_allocator, input_string.items);
|
||||
defer scanner.deinit();
|
||||
|
||||
if (do_ensure) {
|
||||
@ -473,17 +473,17 @@ fn testDiagnosticsFromSource(expected_error: ?anyerror, line: u64, col: u64, byt
|
||||
try std.testing.expectEqual(byte_offset, diagnostics.getByteOffset());
|
||||
}
|
||||
fn testDiagnostics(expected_error: ?anyerror, line: u64, col: u64, byte_offset: u64, s: []const u8) !void {
|
||||
var scanner = JsonScanner.initCompleteInput(std.testing.allocator, s);
|
||||
var scanner = Scanner.initCompleteInput(std.testing.allocator, s);
|
||||
defer scanner.deinit();
|
||||
try testDiagnosticsFromSource(expected_error, line, col, byte_offset, &scanner);
|
||||
|
||||
var tiny_stream: std.io.FixedBufferStream = .{ .buffer = s };
|
||||
var tiny_json_reader = JsonReader(1, @TypeOf(tiny_stream.reader())).init(std.testing.allocator, tiny_stream.reader());
|
||||
var tiny_stream: std.Io.Reader = .fixed(s);
|
||||
var tiny_json_reader: Scanner.Reader = .init(std.testing.allocator, &tiny_stream);
|
||||
defer tiny_json_reader.deinit();
|
||||
try testDiagnosticsFromSource(expected_error, line, col, byte_offset, &tiny_json_reader);
|
||||
|
||||
var medium_stream: std.io.FixedBufferStream = .{ .buffer = s };
|
||||
var medium_json_reader = JsonReader(5, @TypeOf(medium_stream.reader())).init(std.testing.allocator, medium_stream.reader());
|
||||
var medium_stream: std.Io.Reader = .fixed(s);
|
||||
var medium_json_reader: Scanner.Reader = .init(std.testing.allocator, &medium_stream);
|
||||
defer medium_json_reader.deinit();
|
||||
try testDiagnosticsFromSource(expected_error, line, col, byte_offset, &medium_json_reader);
|
||||
}
|
||||
|
||||
@ -4,11 +4,11 @@ const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
|
||||
const Scanner = @import("./scanner.zig").Scanner;
|
||||
const Token = @import("./scanner.zig").Token;
|
||||
const AllocWhen = @import("./scanner.zig").AllocWhen;
|
||||
const default_max_value_len = @import("./scanner.zig").default_max_value_len;
|
||||
const isNumberFormattedLikeAnInteger = @import("./scanner.zig").isNumberFormattedLikeAnInteger;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
const Token = Scanner.Token;
|
||||
const AllocWhen = Scanner.AllocWhen;
|
||||
const default_max_value_len = Scanner.default_max_value_len;
|
||||
const isNumberFormattedLikeAnInteger = Scanner.isNumberFormattedLikeAnInteger;
|
||||
|
||||
const Value = @import("./dynamic.zig").Value;
|
||||
const Array = @import("./dynamic.zig").Array;
|
||||
|
||||
@ -12,9 +12,7 @@ const parseFromValue = @import("./static.zig").parseFromValue;
|
||||
const parseFromValueLeaky = @import("./static.zig").parseFromValueLeaky;
|
||||
const ParseOptions = @import("./static.zig").ParseOptions;
|
||||
|
||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||
const jsonReader = @import("./scanner.zig").reader;
|
||||
const Diagnostics = @import("./scanner.zig").Diagnostics;
|
||||
const Scanner = @import("Scanner.zig");
|
||||
|
||||
const Value = @import("./dynamic.zig").Value;
|
||||
|
||||
@ -300,9 +298,9 @@ const subnamespaces_0_doc =
|
||||
fn testAllParseFunctions(comptime T: type, expected: T, doc: []const u8) !void {
|
||||
// First do the one with the debug info in case we get a SyntaxError or something.
|
||||
{
|
||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, doc);
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, doc);
|
||||
defer scanner.deinit();
|
||||
var diagnostics = Diagnostics{};
|
||||
var diagnostics = Scanner.Diagnostics{};
|
||||
scanner.enableDiagnostics(&diagnostics);
|
||||
var parsed = parseFromTokenSource(T, testing.allocator, &scanner, .{}) catch |e| {
|
||||
std.debug.print("at line,col: {}:{}\n", .{ diagnostics.getLine(), diagnostics.getColumn() });
|
||||
@ -317,8 +315,8 @@ fn testAllParseFunctions(comptime T: type, expected: T, doc: []const u8) !void {
|
||||
try testing.expectEqualDeep(expected, parsed.value);
|
||||
}
|
||||
{
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = doc };
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(doc);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
var parsed = try parseFromTokenSource(T, testing.allocator, &json_reader, .{});
|
||||
defer parsed.deinit();
|
||||
@ -331,13 +329,13 @@ fn testAllParseFunctions(comptime T: type, expected: T, doc: []const u8) !void {
|
||||
try testing.expectEqualDeep(expected, try parseFromSliceLeaky(T, arena.allocator(), doc, .{}));
|
||||
}
|
||||
{
|
||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, doc);
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, doc);
|
||||
defer scanner.deinit();
|
||||
try testing.expectEqualDeep(expected, try parseFromTokenSourceLeaky(T, arena.allocator(), &scanner, .{}));
|
||||
}
|
||||
{
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = doc };
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(doc);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
try testing.expectEqualDeep(expected, try parseFromTokenSourceLeaky(T, arena.allocator(), &json_reader, .{}));
|
||||
}
|
||||
@ -763,7 +761,7 @@ test "parse exponential into int" {
|
||||
|
||||
test "parseFromTokenSource" {
|
||||
{
|
||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, "123");
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, "123");
|
||||
defer scanner.deinit();
|
||||
var parsed = try parseFromTokenSource(u32, testing.allocator, &scanner, .{});
|
||||
defer parsed.deinit();
|
||||
@ -771,8 +769,8 @@ test "parseFromTokenSource" {
|
||||
}
|
||||
|
||||
{
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = "123" };
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed("123");
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
defer json_reader.deinit();
|
||||
var parsed = try parseFromTokenSource(u32, testing.allocator, &json_reader, .{});
|
||||
defer parsed.deinit();
|
||||
@ -836,7 +834,7 @@ test "json parse partial" {
|
||||
\\}
|
||||
;
|
||||
const allocator = testing.allocator;
|
||||
var scanner = JsonScanner.initCompleteInput(allocator, str);
|
||||
var scanner = Scanner.initCompleteInput(allocator, str);
|
||||
defer scanner.deinit();
|
||||
|
||||
var arena = ArenaAllocator.init(allocator);
|
||||
@ -886,8 +884,8 @@ test "json parse allocate when streaming" {
|
||||
var arena = ArenaAllocator.init(allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
var stream: std.io.FixedBufferStream = .{ .buffer = str };
|
||||
var json_reader = jsonReader(std.testing.allocator, stream.reader());
|
||||
var stream: std.Io.Reader = .fixed(str);
|
||||
var json_reader: Scanner.Reader = .init(std.testing.allocator, &stream);
|
||||
|
||||
const parsed = parseFromTokenSourceLeaky(T, arena.allocator(), &json_reader, .{}) catch |err| {
|
||||
json_reader.deinit();
|
||||
|
||||
@ -2,8 +2,7 @@ const std = @import("std");
|
||||
const json = std.json;
|
||||
const testing = std.testing;
|
||||
const parseFromSlice = @import("./static.zig").parseFromSlice;
|
||||
const validate = @import("./scanner.zig").validate;
|
||||
const JsonScanner = @import("./scanner.zig").Scanner;
|
||||
const Scanner = @import("./Scanner.zig");
|
||||
const Value = @import("./dynamic.zig").Value;
|
||||
|
||||
// Support for JSONTestSuite.zig
|
||||
@ -20,7 +19,7 @@ pub fn any(s: []const u8) !void {
|
||||
testHighLevelDynamicParser(s) catch {};
|
||||
}
|
||||
fn testLowLevelScanner(s: []const u8) !void {
|
||||
var scanner = JsonScanner.initCompleteInput(testing.allocator, s);
|
||||
var scanner = Scanner.initCompleteInput(testing.allocator, s);
|
||||
defer scanner.deinit();
|
||||
while (true) {
|
||||
const token = try scanner.next();
|
||||
@ -47,7 +46,7 @@ test "n_object_closed_missing_value" {
|
||||
}
|
||||
|
||||
fn roundTrip(s: []const u8) !void {
|
||||
try testing.expect(try validate(testing.allocator, s));
|
||||
try testing.expect(try Scanner.validate(testing.allocator, s));
|
||||
|
||||
var parsed = try parseFromSlice(Value, testing.allocator, s, .{});
|
||||
defer parsed.deinit();
|
||||
|
||||
@ -446,8 +446,8 @@ pub fn fmtString(bytes: []const u8) std.fmt.Formatter([]const u8, stringEscape)
|
||||
}
|
||||
|
||||
/// Return a formatter for escaping a single quoted Zig string.
|
||||
pub fn fmtChar(bytes: []const u8) std.fmt.Formatter([]const u8, charEscape) {
|
||||
return .{ .data = bytes };
|
||||
pub fn fmtChar(c: u21) std.fmt.Formatter(u21, charEscape) {
|
||||
return .{ .data = c };
|
||||
}
|
||||
|
||||
test fmtString {
|
||||
@ -458,9 +458,7 @@ test fmtString {
|
||||
}
|
||||
|
||||
test fmtChar {
|
||||
try std.testing.expectFmt(
|
||||
\\" \\ hi \x07 \x11 " derp \'"
|
||||
, "\"{f}\"", .{fmtChar(" \\ hi \x07 \x11 \" derp '")});
|
||||
try std.testing.expectFmt("c \\u{26a1}", "{f} {f}", .{ fmtChar('c'), fmtChar('⚡') });
|
||||
}
|
||||
|
||||
/// Print the string as escaped contents of a double quoted string.
|
||||
@ -480,21 +478,26 @@ pub fn stringEscape(bytes: []const u8, w: *Writer) Writer.Error!void {
|
||||
};
|
||||
}
|
||||
|
||||
/// Print the string as escaped contents of a single-quoted string.
|
||||
pub fn charEscape(bytes: []const u8, w: *Writer) Writer.Error!void {
|
||||
for (bytes) |byte| switch (byte) {
|
||||
/// Print as escaped contents of a single-quoted string.
|
||||
pub fn charEscape(codepoint: u21, w: *Writer) Writer.Error!void {
|
||||
switch (codepoint) {
|
||||
'\n' => try w.writeAll("\\n"),
|
||||
'\r' => try w.writeAll("\\r"),
|
||||
'\t' => try w.writeAll("\\t"),
|
||||
'\\' => try w.writeAll("\\\\"),
|
||||
'"' => try w.writeByte('"'),
|
||||
'\'' => try w.writeAll("\\'"),
|
||||
' ', '!', '#'...'&', '('...'[', ']'...'~' => try w.writeByte(byte),
|
||||
'"', ' ', '!', '#'...'&', '('...'[', ']'...'~' => try w.writeByte(@intCast(codepoint)),
|
||||
else => {
|
||||
try w.writeAll("\\x");
|
||||
try w.printInt(byte, 16, .lower, .{ .width = 2, .fill = '0' });
|
||||
if (std.math.cast(u8, codepoint)) |byte| {
|
||||
try w.writeAll("\\x");
|
||||
try w.printInt(byte, 16, .lower, .{ .width = 2, .fill = '0' });
|
||||
} else {
|
||||
try w.writeAll("\\u{");
|
||||
try w.printInt(codepoint, 16, .lower, .{});
|
||||
try w.writeByte('}');
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isValidId(bytes: []const u8) bool {
|
||||
|
||||
@ -574,7 +574,7 @@ pub fn renderError(tree: Ast, parse_error: Error, w: *Writer) Writer.Error!void
|
||||
'/' => "comment",
|
||||
else => unreachable,
|
||||
},
|
||||
std.zig.fmtChar(tok_slice[parse_error.extra.offset..][0..1]),
|
||||
std.zig.fmtChar(tok_slice[parse_error.extra.offset]),
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
@ -203,8 +203,8 @@ pub const TestMetadata = struct {
|
||||
|
||||
pub fn serveTestMetadata(s: *Server, test_metadata: TestMetadata) !void {
|
||||
const header: OutMessage.TestMetadata = .{
|
||||
.tests_len = @as(u32, @intCast(test_metadata.names.len)),
|
||||
.string_bytes_len = @as(u32, @intCast(test_metadata.string_bytes.len)),
|
||||
.tests_len = @intCast(test_metadata.names.len),
|
||||
.string_bytes_len = @intCast(test_metadata.string_bytes.len),
|
||||
};
|
||||
const trailing = 2;
|
||||
const bytes_len = @sizeOf(OutMessage.TestMetadata) +
|
||||
|
||||
@ -177,7 +177,7 @@ pub fn next(bc: *BitcodeReader) !?Item {
|
||||
|
||||
pub fn skipBlock(bc: *BitcodeReader, block: Block) !void {
|
||||
assert(bc.bit_offset == 0);
|
||||
try bc.reader.discardAll(4 * @as(u34, block.len));
|
||||
try bc.reader.discardAll(4 * @as(usize, block.len));
|
||||
try bc.endBlock();
|
||||
}
|
||||
|
||||
|
||||
@ -38,6 +38,7 @@
|
||||
|
||||
pub const parse = @import("zon/parse.zig");
|
||||
pub const stringify = @import("zon/stringify.zig");
|
||||
pub const Serializer = @import("zon/Serializer.zig");
|
||||
|
||||
test {
|
||||
_ = parse;
|
||||
|
||||
929
lib/std/zon/Serializer.zig
Normal file
929
lib/std/zon/Serializer.zig
Normal file
@ -0,0 +1,929 @@
|
||||
//! Lower level control over serialization, you can create a new instance with `serializer`.
|
||||
//!
|
||||
//! Useful when you want control over which fields are serialized, how they're represented,
|
||||
//! or want to write a ZON object that does not exist in memory.
|
||||
//!
|
||||
//! You can serialize values with `value`. To serialize recursive types, the following are provided:
|
||||
//! * `valueMaxDepth`
|
||||
//! * `valueArbitraryDepth`
|
||||
//!
|
||||
//! You can also serialize values using specific notations:
|
||||
//! * `int`
|
||||
//! * `float`
|
||||
//! * `codePoint`
|
||||
//! * `tuple`
|
||||
//! * `tupleMaxDepth`
|
||||
//! * `tupleArbitraryDepth`
|
||||
//! * `string`
|
||||
//! * `multilineString`
|
||||
//!
|
||||
//! For manual serialization of containers, see:
|
||||
//! * `beginStruct`
|
||||
//! * `beginTuple`
|
||||
|
||||
options: Options = .{},
|
||||
indent_level: u8 = 0,
|
||||
writer: *Writer,
|
||||
|
||||
const Serializer = @This();
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
pub const Error = Writer.Error;
|
||||
pub const DepthError = Error || error{ExceededMaxDepth};
|
||||
|
||||
pub const Options = struct {
|
||||
/// If false, only syntactically necessary whitespace is emitted.
|
||||
whitespace: bool = true,
|
||||
};
|
||||
|
||||
/// Options for manual serialization of container types.
|
||||
pub const ContainerOptions = struct {
|
||||
/// The whitespace style that should be used for this container. Ignored if whitespace is off.
|
||||
whitespace_style: union(enum) {
|
||||
/// If true, wrap every field. If false do not.
|
||||
wrap: bool,
|
||||
/// Automatically decide whether to wrap or not based on the number of fields. Following
|
||||
/// the standard rule of thumb, containers with more than two fields are wrapped.
|
||||
fields: usize,
|
||||
} = .{ .wrap = true },
|
||||
|
||||
fn shouldWrap(self: ContainerOptions) bool {
|
||||
return switch (self.whitespace_style) {
|
||||
.wrap => |wrap| wrap,
|
||||
.fields => |fields| fields > 2,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// Options for serialization of an individual value.
|
||||
///
|
||||
/// See `SerializeOptions` for more information on these options.
|
||||
pub const ValueOptions = struct {
|
||||
emit_codepoint_literals: EmitCodepointLiterals = .never,
|
||||
emit_strings_as_containers: bool = false,
|
||||
emit_default_optional_fields: bool = true,
|
||||
};
|
||||
|
||||
/// Determines when to emit Unicode code point literals as opposed to integer literals.
|
||||
pub const EmitCodepointLiterals = enum {
|
||||
/// Never emit Unicode code point literals.
|
||||
never,
|
||||
/// Emit Unicode code point literals for any `u8` in the printable ASCII range.
|
||||
printable_ascii,
|
||||
/// Emit Unicode code point literals for any unsigned integer with 21 bits or fewer
|
||||
/// whose value is a valid non-surrogate code point.
|
||||
always,
|
||||
|
||||
/// If the value should be emitted as a Unicode codepoint, return it as a u21.
|
||||
fn emitAsCodepoint(self: @This(), val: anytype) ?u21 {
|
||||
// Rule out incompatible integer types
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.int => |int_info| if (int_info.signedness == .signed or int_info.bits > 21) {
|
||||
return null;
|
||||
},
|
||||
.comptime_int => {},
|
||||
else => comptime unreachable,
|
||||
}
|
||||
|
||||
// Return null if the value shouldn't be printed as a Unicode codepoint, or the value casted
|
||||
// to a u21 if it should.
|
||||
switch (self) {
|
||||
.always => {
|
||||
const c = std.math.cast(u21, val) orelse return null;
|
||||
if (!std.unicode.utf8ValidCodepoint(c)) return null;
|
||||
return c;
|
||||
},
|
||||
.printable_ascii => {
|
||||
const c = std.math.cast(u8, val) orelse return null;
|
||||
if (!std.ascii.isPrint(c)) return null;
|
||||
return c;
|
||||
},
|
||||
.never => {
|
||||
return null;
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/// Serialize a value, similar to `serialize`.
|
||||
pub fn value(self: *Serializer, val: anytype, options: ValueOptions) Error!void {
|
||||
comptime assert(!typeIsRecursive(@TypeOf(val)));
|
||||
return self.valueArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
/// Serialize a value, similar to `serializeMaxDepth`.
|
||||
/// Can return `error.ExceededMaxDepth`.
|
||||
pub fn valueMaxDepth(self: *Serializer, val: anytype, options: ValueOptions, depth: usize) DepthError!void {
|
||||
try checkValueDepth(val, depth);
|
||||
return self.valueArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
/// Serialize a value, similar to `serializeArbitraryDepth`.
|
||||
pub fn valueArbitraryDepth(self: *Serializer, val: anytype, options: ValueOptions) Error!void {
|
||||
comptime assert(canSerializeType(@TypeOf(val)));
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.int, .comptime_int => if (options.emit_codepoint_literals.emitAsCodepoint(val)) |c| {
|
||||
self.codePoint(c) catch |err| switch (err) {
|
||||
error.InvalidCodepoint => unreachable, // Already validated
|
||||
else => |e| return e,
|
||||
};
|
||||
} else {
|
||||
try self.int(val);
|
||||
},
|
||||
.float, .comptime_float => try self.float(val),
|
||||
.bool, .null => try self.writer.print("{}", .{val}),
|
||||
.enum_literal => try self.ident(@tagName(val)),
|
||||
.@"enum" => try self.ident(@tagName(val)),
|
||||
.pointer => |pointer| {
|
||||
// Try to serialize as a string
|
||||
const item: ?type = switch (@typeInfo(pointer.child)) {
|
||||
.array => |array| array.child,
|
||||
else => if (pointer.size == .slice) pointer.child else null,
|
||||
};
|
||||
if (item == u8 and
|
||||
(pointer.sentinel() == null or pointer.sentinel() == 0) and
|
||||
!options.emit_strings_as_containers)
|
||||
{
|
||||
return try self.string(val);
|
||||
}
|
||||
|
||||
// Serialize as either a tuple or as the child type
|
||||
switch (pointer.size) {
|
||||
.slice => try self.tupleImpl(val, options),
|
||||
.one => try self.valueArbitraryDepth(val.*, options),
|
||||
else => comptime unreachable,
|
||||
}
|
||||
},
|
||||
.array => {
|
||||
var container = try self.beginTuple(
|
||||
.{ .whitespace_style = .{ .fields = val.len } },
|
||||
);
|
||||
for (val) |item_val| {
|
||||
try container.fieldArbitraryDepth(item_val, options);
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
.@"struct" => |@"struct"| if (@"struct".is_tuple) {
|
||||
var container = try self.beginTuple(
|
||||
.{ .whitespace_style = .{ .fields = @"struct".fields.len } },
|
||||
);
|
||||
inline for (val) |field_value| {
|
||||
try container.fieldArbitraryDepth(field_value, options);
|
||||
}
|
||||
try container.end();
|
||||
} else {
|
||||
// Decide which fields to emit
|
||||
const fields, const skipped: [@"struct".fields.len]bool = if (options.emit_default_optional_fields) b: {
|
||||
break :b .{ @"struct".fields.len, @splat(false) };
|
||||
} else b: {
|
||||
var fields = @"struct".fields.len;
|
||||
var skipped: [@"struct".fields.len]bool = @splat(false);
|
||||
inline for (@"struct".fields, &skipped) |field_info, *skip| {
|
||||
if (field_info.default_value_ptr) |ptr| {
|
||||
const default: *const field_info.type = @ptrCast(@alignCast(ptr));
|
||||
const field_value = @field(val, field_info.name);
|
||||
if (std.meta.eql(field_value, default.*)) {
|
||||
skip.* = true;
|
||||
fields -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
break :b .{ fields, skipped };
|
||||
};
|
||||
|
||||
// Emit those fields
|
||||
var container = try self.beginStruct(
|
||||
.{ .whitespace_style = .{ .fields = fields } },
|
||||
);
|
||||
inline for (@"struct".fields, skipped) |field_info, skip| {
|
||||
if (!skip) {
|
||||
try container.fieldArbitraryDepth(
|
||||
field_info.name,
|
||||
@field(val, field_info.name),
|
||||
options,
|
||||
);
|
||||
}
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
.@"union" => |@"union"| {
|
||||
comptime assert(@"union".tag_type != null);
|
||||
switch (val) {
|
||||
inline else => |pl, tag| if (@TypeOf(pl) == void)
|
||||
try self.writer.print(".{s}", .{@tagName(tag)})
|
||||
else {
|
||||
var container = try self.beginStruct(.{ .whitespace_style = .{ .fields = 1 } });
|
||||
|
||||
try container.fieldArbitraryDepth(
|
||||
@tagName(tag),
|
||||
pl,
|
||||
options,
|
||||
);
|
||||
|
||||
try container.end();
|
||||
},
|
||||
}
|
||||
},
|
||||
.optional => if (val) |inner| {
|
||||
try self.valueArbitraryDepth(inner, options);
|
||||
} else {
|
||||
try self.writer.writeAll("null");
|
||||
},
|
||||
.vector => |vector| {
|
||||
var container = try self.beginTuple(
|
||||
.{ .whitespace_style = .{ .fields = vector.len } },
|
||||
);
|
||||
for (0..vector.len) |i| {
|
||||
try container.fieldArbitraryDepth(val[i], options);
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
|
||||
else => comptime unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize an integer.
|
||||
pub fn int(self: *Serializer, val: anytype) Error!void {
|
||||
try self.writer.printInt(val, 10, .lower, .{});
|
||||
}
|
||||
|
||||
/// Serialize a float.
|
||||
pub fn float(self: *Serializer, val: anytype) Error!void {
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.float => if (std.math.isNan(val)) {
|
||||
return self.writer.writeAll("nan");
|
||||
} else if (std.math.isPositiveInf(val)) {
|
||||
return self.writer.writeAll("inf");
|
||||
} else if (std.math.isNegativeInf(val)) {
|
||||
return self.writer.writeAll("-inf");
|
||||
} else if (std.math.isNegativeZero(val)) {
|
||||
return self.writer.writeAll("-0.0");
|
||||
} else {
|
||||
try self.writer.print("{d}", .{val});
|
||||
},
|
||||
.comptime_float => if (val == 0) {
|
||||
return self.writer.writeAll("0");
|
||||
} else {
|
||||
try self.writer.print("{d}", .{val});
|
||||
},
|
||||
else => comptime unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize `name` as an identifier prefixed with `.`.
|
||||
///
|
||||
/// Escapes the identifier if necessary.
|
||||
pub fn ident(self: *Serializer, name: []const u8) Error!void {
|
||||
try self.writer.print(".{f}", .{std.zig.fmtIdPU(name)});
|
||||
}
|
||||
|
||||
pub const CodePointError = Error || error{InvalidCodepoint};
|
||||
|
||||
/// Serialize `val` as a Unicode codepoint.
|
||||
///
|
||||
/// Returns `error.InvalidCodepoint` if `val` is not a valid Unicode codepoint.
|
||||
pub fn codePoint(self: *Serializer, val: u21) CodePointError!void {
|
||||
try self.writer.print("'{f}'", .{std.zig.fmtChar(val)});
|
||||
}
|
||||
|
||||
/// Like `value`, but always serializes `val` as a tuple.
|
||||
///
|
||||
/// Will fail at comptime if `val` is not a tuple, array, pointer to an array, or slice.
|
||||
pub fn tuple(self: *Serializer, val: anytype, options: ValueOptions) Error!void {
|
||||
comptime assert(!typeIsRecursive(@TypeOf(val)));
|
||||
try self.tupleArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
/// Like `tuple`, but recursive types are allowed.
|
||||
///
|
||||
/// Returns `error.ExceededMaxDepth` if `depth` is exceeded.
|
||||
pub fn tupleMaxDepth(
|
||||
self: *Serializer,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
depth: usize,
|
||||
) DepthError!void {
|
||||
try checkValueDepth(val, depth);
|
||||
try self.tupleArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
/// Like `tuple`, but recursive types are allowed.
|
||||
///
|
||||
/// It is the caller's responsibility to ensure that `val` does not contain cycles.
|
||||
pub fn tupleArbitraryDepth(
|
||||
self: *Serializer,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.tupleImpl(val, options);
|
||||
}
|
||||
|
||||
fn tupleImpl(self: *Serializer, val: anytype, options: ValueOptions) Error!void {
|
||||
comptime assert(canSerializeType(@TypeOf(val)));
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.@"struct" => {
|
||||
var container = try self.beginTuple(.{ .whitespace_style = .{ .fields = val.len } });
|
||||
inline for (val) |item_val| {
|
||||
try container.fieldArbitraryDepth(item_val, options);
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
.pointer, .array => {
|
||||
var container = try self.beginTuple(.{ .whitespace_style = .{ .fields = val.len } });
|
||||
for (val) |item_val| {
|
||||
try container.fieldArbitraryDepth(item_val, options);
|
||||
}
|
||||
try container.end();
|
||||
},
|
||||
else => comptime unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `value`, but always serializes `val` as a string.
|
||||
pub fn string(self: *Serializer, val: []const u8) Error!void {
|
||||
try self.writer.print("\"{f}\"", .{std.zig.fmtString(val)});
|
||||
}
|
||||
|
||||
/// Options for formatting multiline strings.
|
||||
pub const MultilineStringOptions = struct {
|
||||
/// If top level is true, whitespace before and after the multiline string is elided.
|
||||
/// If it is true, a newline is printed, then the value, followed by a newline, and if
|
||||
/// whitespace is true any necessary indentation follows.
|
||||
top_level: bool = false,
|
||||
};
|
||||
|
||||
pub const MultilineStringError = Error || error{InnerCarriageReturn};
|
||||
|
||||
/// Like `value`, but always serializes to a multiline string literal.
|
||||
///
|
||||
/// Returns `error.InnerCarriageReturn` if `val` contains a CR not followed by a newline,
|
||||
/// since multiline strings cannot represent CR without a following newline.
|
||||
pub fn multilineString(
|
||||
self: *Serializer,
|
||||
val: []const u8,
|
||||
options: MultilineStringOptions,
|
||||
) MultilineStringError!void {
|
||||
// Make sure the string does not contain any carriage returns not followed by a newline
|
||||
var i: usize = 0;
|
||||
while (i < val.len) : (i += 1) {
|
||||
if (val[i] == '\r') {
|
||||
if (i + 1 < val.len) {
|
||||
if (val[i + 1] == '\n') {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return error.InnerCarriageReturn;
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.top_level) {
|
||||
try self.newline();
|
||||
try self.indent();
|
||||
}
|
||||
|
||||
try self.writer.writeAll("\\\\");
|
||||
for (val) |c| {
|
||||
if (c != '\r') {
|
||||
try self.writer.writeByte(c); // We write newlines here even if whitespace off
|
||||
if (c == '\n') {
|
||||
try self.indent();
|
||||
try self.writer.writeAll("\\\\");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.top_level) {
|
||||
try self.writer.writeByte('\n'); // Even if whitespace off
|
||||
try self.indent();
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a `Struct` for writing ZON structs field by field.
|
||||
pub fn beginStruct(self: *Serializer, options: ContainerOptions) Error!Struct {
|
||||
return Struct.begin(self, options);
|
||||
}
|
||||
|
||||
/// Creates a `Tuple` for writing ZON tuples field by field.
|
||||
pub fn beginTuple(self: *Serializer, options: ContainerOptions) Error!Tuple {
|
||||
return Tuple.begin(self, options);
|
||||
}
|
||||
|
||||
fn indent(self: *Serializer) Error!void {
|
||||
if (self.options.whitespace) {
|
||||
try self.writer.splatByteAll(' ', 4 * self.indent_level);
|
||||
}
|
||||
}
|
||||
|
||||
fn newline(self: *Serializer) Error!void {
|
||||
if (self.options.whitespace) {
|
||||
try self.writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
fn newlineOrSpace(self: *Serializer, len: usize) Error!void {
|
||||
if (self.containerShouldWrap(len)) {
|
||||
try self.newline();
|
||||
} else {
|
||||
try self.space();
|
||||
}
|
||||
}
|
||||
|
||||
fn space(self: *Serializer) Error!void {
|
||||
if (self.options.whitespace) {
|
||||
try self.writer.writeByte(' ');
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes ZON tuples field by field.
|
||||
pub const Tuple = struct {
|
||||
container: Container,
|
||||
|
||||
fn begin(parent: *Serializer, options: ContainerOptions) Error!Tuple {
|
||||
return .{
|
||||
.container = try Container.begin(parent, .anon, options),
|
||||
};
|
||||
}
|
||||
|
||||
/// Finishes serializing the tuple.
|
||||
///
|
||||
/// Prints a trailing comma as configured when appropriate, and the closing bracket.
|
||||
pub fn end(self: *Tuple) Error!void {
|
||||
try self.container.end();
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`.
|
||||
pub fn field(
|
||||
self: *Tuple,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.container.field(null, val, options);
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`.
|
||||
/// Returns `error.ExceededMaxDepth` if `depth` is exceeded.
|
||||
pub fn fieldMaxDepth(
|
||||
self: *Tuple,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
depth: usize,
|
||||
) DepthError!void {
|
||||
try self.container.fieldMaxDepth(null, val, options, depth);
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by
|
||||
/// `valueArbitraryDepth`.
|
||||
pub fn fieldArbitraryDepth(
|
||||
self: *Tuple,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.container.fieldArbitraryDepth(null, val, options);
|
||||
}
|
||||
|
||||
/// Starts a field with a struct as a value. Returns the struct.
|
||||
pub fn beginStructField(
|
||||
self: *Tuple,
|
||||
options: ContainerOptions,
|
||||
) Error!Struct {
|
||||
try self.fieldPrefix();
|
||||
return self.container.serializer.beginStruct(options);
|
||||
}
|
||||
|
||||
/// Starts a field with a tuple as a value. Returns the tuple.
|
||||
pub fn beginTupleField(
|
||||
self: *Tuple,
|
||||
options: ContainerOptions,
|
||||
) Error!Tuple {
|
||||
try self.fieldPrefix();
|
||||
return self.container.serializer.beginTuple(options);
|
||||
}
|
||||
|
||||
/// Print a field prefix. This prints any necessary commas, and whitespace as
|
||||
/// configured. Useful if you want to serialize the field value yourself.
|
||||
pub fn fieldPrefix(self: *Tuple) Error!void {
|
||||
try self.container.fieldPrefix(null);
|
||||
}
|
||||
};
|
||||
|
||||
/// Writes ZON structs field by field.
|
||||
pub const Struct = struct {
|
||||
container: Container,
|
||||
|
||||
fn begin(parent: *Serializer, options: ContainerOptions) Error!Struct {
|
||||
return .{
|
||||
.container = try Container.begin(parent, .named, options),
|
||||
};
|
||||
}
|
||||
|
||||
/// Finishes serializing the struct.
|
||||
///
|
||||
/// Prints a trailing comma as configured when appropriate, and the closing bracket.
|
||||
pub fn end(self: *Struct) Error!void {
|
||||
try self.container.end();
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`.
|
||||
pub fn field(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.container.field(name, val, options);
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`.
|
||||
/// Returns `error.ExceededMaxDepth` if `depth` is exceeded.
|
||||
pub fn fieldMaxDepth(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
depth: usize,
|
||||
) DepthError!void {
|
||||
try self.container.fieldMaxDepth(name, val, options, depth);
|
||||
}
|
||||
|
||||
/// Serialize a field. Equivalent to calling `fieldPrefix` followed by
|
||||
/// `valueArbitraryDepth`.
|
||||
pub fn fieldArbitraryDepth(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.container.fieldArbitraryDepth(name, val, options);
|
||||
}
|
||||
|
||||
/// Starts a field with a struct as a value. Returns the struct.
|
||||
pub fn beginStructField(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
options: ContainerOptions,
|
||||
) Error!Struct {
|
||||
try self.fieldPrefix(name);
|
||||
return self.container.serializer.beginStruct(options);
|
||||
}
|
||||
|
||||
/// Starts a field with a tuple as a value. Returns the tuple.
|
||||
pub fn beginTupleField(
|
||||
self: *Struct,
|
||||
name: []const u8,
|
||||
options: ContainerOptions,
|
||||
) Error!Tuple {
|
||||
try self.fieldPrefix(name);
|
||||
return self.container.serializer.beginTuple(options);
|
||||
}
|
||||
|
||||
/// Print a field prefix. This prints any necessary commas, the field name (escaped if
|
||||
/// necessary) and whitespace as configured. Useful if you want to serialize the field
|
||||
/// value yourself.
|
||||
pub fn fieldPrefix(self: *Struct, name: []const u8) Error!void {
|
||||
try self.container.fieldPrefix(name);
|
||||
}
|
||||
};
|
||||
|
||||
const Container = struct {
|
||||
const FieldStyle = enum { named, anon };
|
||||
|
||||
serializer: *Serializer,
|
||||
field_style: FieldStyle,
|
||||
options: ContainerOptions,
|
||||
empty: bool,
|
||||
|
||||
fn begin(
|
||||
sz: *Serializer,
|
||||
field_style: FieldStyle,
|
||||
options: ContainerOptions,
|
||||
) Error!Container {
|
||||
if (options.shouldWrap()) sz.indent_level +|= 1;
|
||||
try sz.writer.writeAll(".{");
|
||||
return .{
|
||||
.serializer = sz,
|
||||
.field_style = field_style,
|
||||
.options = options,
|
||||
.empty = true,
|
||||
};
|
||||
}
|
||||
|
||||
fn end(self: *Container) Error!void {
|
||||
if (self.options.shouldWrap()) self.serializer.indent_level -|= 1;
|
||||
if (!self.empty) {
|
||||
if (self.options.shouldWrap()) {
|
||||
if (self.serializer.options.whitespace) {
|
||||
try self.serializer.writer.writeByte(',');
|
||||
}
|
||||
try self.serializer.newline();
|
||||
try self.serializer.indent();
|
||||
} else if (!self.shouldElideSpaces()) {
|
||||
try self.serializer.space();
|
||||
}
|
||||
}
|
||||
try self.serializer.writer.writeByte('}');
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
fn fieldPrefix(self: *Container, name: ?[]const u8) Error!void {
|
||||
if (!self.empty) {
|
||||
try self.serializer.writer.writeByte(',');
|
||||
}
|
||||
self.empty = false;
|
||||
if (self.options.shouldWrap()) {
|
||||
try self.serializer.newline();
|
||||
} else if (!self.shouldElideSpaces()) {
|
||||
try self.serializer.space();
|
||||
}
|
||||
if (self.options.shouldWrap()) try self.serializer.indent();
|
||||
if (name) |n| {
|
||||
try self.serializer.ident(n);
|
||||
try self.serializer.space();
|
||||
try self.serializer.writer.writeByte('=');
|
||||
try self.serializer.space();
|
||||
}
|
||||
}
|
||||
|
||||
fn field(
|
||||
self: *Container,
|
||||
name: ?[]const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
comptime assert(!typeIsRecursive(@TypeOf(val)));
|
||||
try self.fieldArbitraryDepth(name, val, options);
|
||||
}
|
||||
|
||||
/// Returns `error.ExceededMaxDepth` if `depth` is exceeded.
|
||||
fn fieldMaxDepth(
|
||||
self: *Container,
|
||||
name: ?[]const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
depth: usize,
|
||||
) DepthError!void {
|
||||
try checkValueDepth(val, depth);
|
||||
try self.fieldArbitraryDepth(name, val, options);
|
||||
}
|
||||
|
||||
fn fieldArbitraryDepth(
|
||||
self: *Container,
|
||||
name: ?[]const u8,
|
||||
val: anytype,
|
||||
options: ValueOptions,
|
||||
) Error!void {
|
||||
try self.fieldPrefix(name);
|
||||
try self.serializer.valueArbitraryDepth(val, options);
|
||||
}
|
||||
|
||||
fn shouldElideSpaces(self: *const Container) bool {
|
||||
return switch (self.options.whitespace_style) {
|
||||
.fields => |fields| self.field_style != .named and fields == 1,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
test Serializer {
|
||||
var discarding: Writer.Discarding = .init(&.{});
|
||||
var s: Serializer = .{ .writer = &discarding.writer };
|
||||
var vec2 = try s.beginStruct(.{});
|
||||
try vec2.field("x", 1.5, .{});
|
||||
try vec2.fieldPrefix("prefix");
|
||||
try s.value(2.5, .{});
|
||||
try vec2.end();
|
||||
}
|
||||
|
||||
inline fn typeIsRecursive(comptime T: type) bool {
|
||||
return comptime typeIsRecursiveInner(T, &.{});
|
||||
}
|
||||
|
||||
fn typeIsRecursiveInner(comptime T: type, comptime prev_visited: []const type) bool {
|
||||
for (prev_visited) |V| {
|
||||
if (V == T) return true;
|
||||
}
|
||||
const visited = prev_visited ++ .{T};
|
||||
|
||||
return switch (@typeInfo(T)) {
|
||||
.pointer => |pointer| typeIsRecursiveInner(pointer.child, visited),
|
||||
.optional => |optional| typeIsRecursiveInner(optional.child, visited),
|
||||
.array => |array| typeIsRecursiveInner(array.child, visited),
|
||||
.vector => |vector| typeIsRecursiveInner(vector.child, visited),
|
||||
.@"struct" => |@"struct"| for (@"struct".fields) |field| {
|
||||
if (typeIsRecursiveInner(field.type, visited)) break true;
|
||||
} else false,
|
||||
.@"union" => |@"union"| inline for (@"union".fields) |field| {
|
||||
if (typeIsRecursiveInner(field.type, visited)) break true;
|
||||
} else false,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
test typeIsRecursive {
|
||||
try std.testing.expect(!typeIsRecursive(bool));
|
||||
try std.testing.expect(!typeIsRecursive(struct { x: i32, y: i32 }));
|
||||
try std.testing.expect(!typeIsRecursive(struct { i32, i32 }));
|
||||
try std.testing.expect(typeIsRecursive(struct { x: i32, y: i32, z: *@This() }));
|
||||
try std.testing.expect(typeIsRecursive(struct {
|
||||
a: struct {
|
||||
const A = @This();
|
||||
b: struct {
|
||||
c: *struct {
|
||||
a: ?A,
|
||||
},
|
||||
},
|
||||
},
|
||||
}));
|
||||
try std.testing.expect(typeIsRecursive(struct {
|
||||
a: [3]*@This(),
|
||||
}));
|
||||
try std.testing.expect(typeIsRecursive(struct {
|
||||
a: union { a: i32, b: *@This() },
|
||||
}));
|
||||
}
|
||||
|
||||
fn checkValueDepth(val: anytype, depth: usize) error{ExceededMaxDepth}!void {
|
||||
if (depth == 0) return error.ExceededMaxDepth;
|
||||
const child_depth = depth - 1;
|
||||
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.pointer => |pointer| switch (pointer.size) {
|
||||
.one => try checkValueDepth(val.*, child_depth),
|
||||
.slice => for (val) |item| {
|
||||
try checkValueDepth(item, child_depth);
|
||||
},
|
||||
.c, .many => {},
|
||||
},
|
||||
.array => for (val) |item| {
|
||||
try checkValueDepth(item, child_depth);
|
||||
},
|
||||
.@"struct" => |@"struct"| inline for (@"struct".fields) |field_info| {
|
||||
try checkValueDepth(@field(val, field_info.name), child_depth);
|
||||
},
|
||||
.@"union" => |@"union"| if (@"union".tag_type == null) {
|
||||
return;
|
||||
} else switch (val) {
|
||||
inline else => |payload| {
|
||||
return checkValueDepth(payload, child_depth);
|
||||
},
|
||||
},
|
||||
.optional => if (val) |inner| try checkValueDepth(inner, child_depth),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn expectValueDepthEquals(expected: usize, v: anytype) !void {
|
||||
try checkValueDepth(v, expected);
|
||||
try std.testing.expectError(error.ExceededMaxDepth, checkValueDepth(v, expected - 1));
|
||||
}
|
||||
|
||||
test checkValueDepth {
|
||||
try expectValueDepthEquals(1, 10);
|
||||
try expectValueDepthEquals(2, .{ .x = 1, .y = 2 });
|
||||
try expectValueDepthEquals(2, .{ 1, 2 });
|
||||
try expectValueDepthEquals(3, .{ 1, .{ 2, 3 } });
|
||||
try expectValueDepthEquals(3, .{ .{ 1, 2 }, 3 });
|
||||
try expectValueDepthEquals(3, .{ .x = 0, .y = 1, .z = .{ .x = 3 } });
|
||||
try expectValueDepthEquals(3, .{ .x = 0, .y = .{ .x = 1 }, .z = 2 });
|
||||
try expectValueDepthEquals(3, .{ .x = .{ .x = 0 }, .y = 1, .z = 2 });
|
||||
try expectValueDepthEquals(2, @as(?u32, 1));
|
||||
try expectValueDepthEquals(1, @as(?u32, null));
|
||||
try expectValueDepthEquals(1, null);
|
||||
try expectValueDepthEquals(2, &1);
|
||||
try expectValueDepthEquals(3, &@as(?u32, 1));
|
||||
|
||||
const Union = union(enum) {
|
||||
x: u32,
|
||||
y: struct { x: u32 },
|
||||
};
|
||||
try expectValueDepthEquals(2, Union{ .x = 1 });
|
||||
try expectValueDepthEquals(3, Union{ .y = .{ .x = 1 } });
|
||||
|
||||
const Recurse = struct { r: ?*const @This() };
|
||||
try expectValueDepthEquals(2, Recurse{ .r = null });
|
||||
try expectValueDepthEquals(5, Recurse{ .r = &Recurse{ .r = null } });
|
||||
try expectValueDepthEquals(8, Recurse{ .r = &Recurse{ .r = &Recurse{ .r = null } } });
|
||||
|
||||
try expectValueDepthEquals(2, @as([]const u8, &.{ 1, 2, 3 }));
|
||||
try expectValueDepthEquals(3, @as([]const []const u8, &.{&.{ 1, 2, 3 }}));
|
||||
}
|
||||
|
||||
inline fn canSerializeType(T: type) bool {
|
||||
comptime return canSerializeTypeInner(T, &.{}, false);
|
||||
}
|
||||
|
||||
fn canSerializeTypeInner(
|
||||
T: type,
|
||||
/// Visited structs and unions, to avoid infinite recursion.
|
||||
/// Tracking more types is unnecessary, and a little complex due to optional nesting.
|
||||
visited: []const type,
|
||||
parent_is_optional: bool,
|
||||
) bool {
|
||||
return switch (@typeInfo(T)) {
|
||||
.bool,
|
||||
.int,
|
||||
.float,
|
||||
.comptime_float,
|
||||
.comptime_int,
|
||||
.null,
|
||||
.enum_literal,
|
||||
=> true,
|
||||
|
||||
.noreturn,
|
||||
.void,
|
||||
.type,
|
||||
.undefined,
|
||||
.error_union,
|
||||
.error_set,
|
||||
.@"fn",
|
||||
.frame,
|
||||
.@"anyframe",
|
||||
.@"opaque",
|
||||
=> false,
|
||||
|
||||
.@"enum" => |@"enum"| @"enum".is_exhaustive,
|
||||
|
||||
.pointer => |pointer| switch (pointer.size) {
|
||||
.one => canSerializeTypeInner(pointer.child, visited, parent_is_optional),
|
||||
.slice => canSerializeTypeInner(pointer.child, visited, false),
|
||||
.many, .c => false,
|
||||
},
|
||||
|
||||
.optional => |optional| if (parent_is_optional)
|
||||
false
|
||||
else
|
||||
canSerializeTypeInner(optional.child, visited, true),
|
||||
|
||||
.array => |array| canSerializeTypeInner(array.child, visited, false),
|
||||
.vector => |vector| canSerializeTypeInner(vector.child, visited, false),
|
||||
|
||||
.@"struct" => |@"struct"| {
|
||||
for (visited) |V| if (T == V) return true;
|
||||
const new_visited = visited ++ .{T};
|
||||
for (@"struct".fields) |field| {
|
||||
if (!canSerializeTypeInner(field.type, new_visited, false)) return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
.@"union" => |@"union"| {
|
||||
for (visited) |V| if (T == V) return true;
|
||||
const new_visited = visited ++ .{T};
|
||||
if (@"union".tag_type == null) return false;
|
||||
for (@"union".fields) |field| {
|
||||
if (field.type != void and !canSerializeTypeInner(field.type, new_visited, false)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
test canSerializeType {
|
||||
try std.testing.expect(!comptime canSerializeType(void));
|
||||
try std.testing.expect(!comptime canSerializeType(struct { f: [*]u8 }));
|
||||
try std.testing.expect(!comptime canSerializeType(struct { error{foo} }));
|
||||
try std.testing.expect(!comptime canSerializeType(union(enum) { a: void, f: [*c]u8 }));
|
||||
try std.testing.expect(!comptime canSerializeType(@Vector(0, [*c]u8)));
|
||||
try std.testing.expect(!comptime canSerializeType(*?[*c]u8));
|
||||
try std.testing.expect(!comptime canSerializeType(enum(u8) { _ }));
|
||||
try std.testing.expect(!comptime canSerializeType(union { foo: void }));
|
||||
try std.testing.expect(comptime canSerializeType(union(enum) { foo: void }));
|
||||
try std.testing.expect(comptime canSerializeType(comptime_float));
|
||||
try std.testing.expect(comptime canSerializeType(comptime_int));
|
||||
try std.testing.expect(!comptime canSerializeType(struct { comptime foo: ??u8 = null }));
|
||||
try std.testing.expect(comptime canSerializeType(@TypeOf(.foo)));
|
||||
try std.testing.expect(comptime canSerializeType(?u8));
|
||||
try std.testing.expect(comptime canSerializeType(*?*u8));
|
||||
try std.testing.expect(comptime canSerializeType(?struct {
|
||||
foo: ?struct {
|
||||
?union(enum) {
|
||||
a: ?@Vector(0, ?*u8),
|
||||
},
|
||||
?struct {
|
||||
f: ?[]?u8,
|
||||
},
|
||||
},
|
||||
}));
|
||||
try std.testing.expect(!comptime canSerializeType(??u8));
|
||||
try std.testing.expect(!comptime canSerializeType(?*?u8));
|
||||
try std.testing.expect(!comptime canSerializeType(*?*?*u8));
|
||||
try std.testing.expect(comptime canSerializeType(struct { x: comptime_int = 2 }));
|
||||
try std.testing.expect(comptime canSerializeType(struct { x: comptime_float = 2 }));
|
||||
try std.testing.expect(comptime canSerializeType(struct { comptime_int }));
|
||||
try std.testing.expect(comptime canSerializeType(struct { comptime x: @TypeOf(.foo) = .foo }));
|
||||
const Recursive = struct { foo: ?*@This() };
|
||||
try std.testing.expect(comptime canSerializeType(Recursive));
|
||||
|
||||
// Make sure we validate nested optional before we early out due to already having seen
|
||||
// a type recursion!
|
||||
try std.testing.expect(!comptime canSerializeType(struct {
|
||||
add_to_visited: ?u8,
|
||||
retrieve_from_visited: ??u8,
|
||||
}));
|
||||
}
|
||||
@ -64,14 +64,14 @@ pub const Error = union(enum) {
|
||||
}
|
||||
};
|
||||
|
||||
fn formatMessage(self: []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
|
||||
fn formatMessage(self: []const u8, w: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
// Just writes the string for now, but we're keeping this behind a formatter so we have
|
||||
// the option to extend it in the future to print more advanced messages (like `Error`
|
||||
// does) without breaking the API.
|
||||
try w.writeAll(self);
|
||||
}
|
||||
|
||||
pub fn fmtMessage(self: Note, diag: *const Diagnostics) std.fmt.Formatter([]const u8, Note.formatMessage) {
|
||||
pub fn fmtMessage(self: Note, diag: *const Diagnostics) std.fmt.Alt([]const u8, Note.formatMessage) {
|
||||
return .{ .data = switch (self) {
|
||||
.zoir => |note| note.msg.get(diag.zoir),
|
||||
.type_check => |note| note.msg,
|
||||
@ -147,14 +147,14 @@ pub const Error = union(enum) {
|
||||
diag: *const Diagnostics,
|
||||
};
|
||||
|
||||
fn formatMessage(self: FormatMessage, w: *std.io.Writer) std.io.Writer.Error!void {
|
||||
fn formatMessage(self: FormatMessage, w: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
switch (self.err) {
|
||||
.zoir => |err| try w.writeAll(err.msg.get(self.diag.zoir)),
|
||||
.type_check => |tc| try w.writeAll(tc.message),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmtMessage(self: @This(), diag: *const Diagnostics) std.fmt.Formatter(FormatMessage, formatMessage) {
|
||||
pub fn fmtMessage(self: @This(), diag: *const Diagnostics) std.fmt.Alt(FormatMessage, formatMessage) {
|
||||
return .{ .data = .{
|
||||
.err = self,
|
||||
.diag = diag,
|
||||
@ -226,7 +226,7 @@ pub const Diagnostics = struct {
|
||||
return .{ .diag = self };
|
||||
}
|
||||
|
||||
pub fn format(self: *const @This(), w: *std.io.Writer) std.io.Writer.Error!void {
|
||||
pub fn format(self: *const @This(), w: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
var errors = self.iterateErrors();
|
||||
while (errors.next()) |err| {
|
||||
const loc = err.getLocation(self);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -2821,7 +2821,7 @@ pub fn loadZirCache(gpa: Allocator, cache_file: std.fs.File) !Zir {
|
||||
var buffer: [2000]u8 = undefined;
|
||||
var file_reader = cache_file.reader(&buffer);
|
||||
return result: {
|
||||
const header = file_reader.interface.takeStructReference(Zir.Header) catch |err| break :result err;
|
||||
const header = file_reader.interface.takeStructPointer(Zir.Header) catch |err| break :result err;
|
||||
break :result loadZirCacheBody(gpa, header.*, &file_reader.interface);
|
||||
} catch |err| switch (err) {
|
||||
error.ReadFailed => return file_reader.err.?,
|
||||
|
||||
@ -349,7 +349,7 @@ fn loadZirZoirCache(
|
||||
const cache_br = &cache_fr.interface;
|
||||
|
||||
// First we read the header to determine the lengths of arrays.
|
||||
const header = (cache_br.takeStructReference(Header) catch |err| switch (err) {
|
||||
const header = (cache_br.takeStructPointer(Header) catch |err| switch (err) {
|
||||
error.ReadFailed => return cache_fr.err.?,
|
||||
// This can happen if Zig bails out of this function between creating
|
||||
// the cached file and writing it.
|
||||
|
||||
@ -2438,7 +2438,10 @@ pub const DeclGen = struct {
|
||||
const ty = val.typeOf(zcu);
|
||||
return .{ .data = .{
|
||||
.dg = dg,
|
||||
.int_info = ty.intInfo(zcu),
|
||||
.int_info = if (ty.zigTypeTag(zcu) == .@"union" and ty.containerLayout(zcu) == .@"packed")
|
||||
.{ .signedness = .unsigned, .bits = @intCast(ty.bitSize(zcu)) }
|
||||
else
|
||||
ty.intInfo(zcu),
|
||||
.kind = kind,
|
||||
.ctype = try dg.ctypeFromType(ty, kind),
|
||||
.val = val,
|
||||
|
||||
@ -6385,6 +6385,9 @@ pub const FuncGen = struct {
|
||||
// * https://github.com/llvm/llvm-project/blob/56905dab7da50bccfcceaeb496b206ff476127e1/llvm/test/MC/WebAssembly/blockaddress.ll
|
||||
if (zcu.comp.getTarget().cpu.arch.isWasm()) break :jmp_table null;
|
||||
|
||||
// Workaround for https://github.com/ziglang/zig/issues/24383:
|
||||
if (self.ng.ownerModule().optimize_mode == .ReleaseSafe) break :jmp_table null;
|
||||
|
||||
// On a 64-bit target, 1024 pointers in our jump table is about 8K of pointers. This seems just
|
||||
// about acceptable - it won't fill L1d cache on most CPUs.
|
||||
const max_table_len = 1024;
|
||||
|
||||
@ -346,8 +346,9 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
} else if (mem.eql(u8, cmd, "targets")) {
|
||||
dev.check(.targets_command);
|
||||
const host = std.zig.resolveTargetQueryOrFatal(.{});
|
||||
const stdout = fs.File.stdout().deprecatedWriter();
|
||||
return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, &host);
|
||||
var stdout_writer = fs.File.stdout().writer(&stdout_buffer);
|
||||
try @import("print_targets.zig").cmdTargets(arena, cmd_args, &stdout_writer.interface, &host);
|
||||
return stdout_writer.interface.flush();
|
||||
} else if (mem.eql(u8, cmd, "version")) {
|
||||
dev.check(.version_command);
|
||||
try fs.File.stdout().writeAll(build_options.version ++ "\n");
|
||||
@ -358,7 +359,9 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
} else if (mem.eql(u8, cmd, "env")) {
|
||||
dev.check(.env_command);
|
||||
verifyLibcxxCorrectlyLinked();
|
||||
return @import("print_env.zig").cmdEnv(arena, cmd_args);
|
||||
var stdout_writer = fs.File.stdout().writer(&stdout_buffer);
|
||||
try @import("print_env.zig").cmdEnv(arena, &stdout_writer.interface);
|
||||
return stdout_writer.interface.flush();
|
||||
} else if (mem.eql(u8, cmd, "reduce")) {
|
||||
return jitCmd(gpa, arena, cmd_args, .{
|
||||
.cmd_name = "reduce",
|
||||
|
||||
@ -4,8 +4,7 @@ const introspect = @import("introspect.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const fatal = std.process.fatal;
|
||||
|
||||
pub fn cmdEnv(arena: Allocator, args: []const []const u8) !void {
|
||||
_ = args;
|
||||
pub fn cmdEnv(arena: Allocator, out: *std.Io.Writer) !void {
|
||||
const cwd_path = try introspect.getResolvedCwd(arena);
|
||||
const self_exe_path = try std.fs.selfExePathAlloc(arena);
|
||||
|
||||
@ -21,41 +20,21 @@ pub fn cmdEnv(arena: Allocator, args: []const []const u8) !void {
|
||||
const host = try std.zig.system.resolveTargetQuery(.{});
|
||||
const triple = try host.zigTriple(arena);
|
||||
|
||||
var buffer: [1024]u8 = undefined;
|
||||
var stdout_writer = std.fs.File.stdout().writer(&buffer);
|
||||
const w = &stdout_writer.interface();
|
||||
var jws: std.json.Stringify = .{ .writer = w, .options = .{ .whitespace = .indent_1 } };
|
||||
var serializer: std.zon.Serializer = .{ .writer = out };
|
||||
var root = try serializer.beginStruct(.{});
|
||||
|
||||
try jws.beginObject();
|
||||
|
||||
try jws.objectField("zig_exe");
|
||||
try jws.write(self_exe_path);
|
||||
|
||||
try jws.objectField("lib_dir");
|
||||
try jws.write(zig_lib_directory.path.?);
|
||||
|
||||
try jws.objectField("std_dir");
|
||||
try jws.write(zig_std_dir);
|
||||
|
||||
try jws.objectField("global_cache_dir");
|
||||
try jws.write(global_cache_dir);
|
||||
|
||||
try jws.objectField("version");
|
||||
try jws.write(build_options.version);
|
||||
|
||||
try jws.objectField("target");
|
||||
try jws.write(triple);
|
||||
|
||||
try jws.objectField("env");
|
||||
try jws.beginObject();
|
||||
try root.field("zig_exe", self_exe_path, .{});
|
||||
try root.field("lib_dir", zig_lib_directory.path.?, .{});
|
||||
try root.field("std_dir", zig_std_dir, .{});
|
||||
try root.field("global_cache_dir", global_cache_dir, .{});
|
||||
try root.field("version", build_options.version, .{});
|
||||
try root.field("target", triple, .{});
|
||||
var env = try root.beginStructField("env", .{});
|
||||
inline for (@typeInfo(std.zig.EnvVar).@"enum".fields) |field| {
|
||||
try jws.objectField(field.name);
|
||||
try jws.write(try @field(std.zig.EnvVar, field.name).get(arena));
|
||||
try env.field(field.name, try @field(std.zig.EnvVar, field.name).get(arena), .{});
|
||||
}
|
||||
try jws.endObject();
|
||||
try env.end();
|
||||
try root.end();
|
||||
|
||||
try jws.endObject();
|
||||
try w.writeByte('\n');
|
||||
|
||||
try w.flush();
|
||||
try out.writeByte('\n');
|
||||
}
|
||||
|
||||
@ -10,38 +10,37 @@ const target = @import("target.zig");
|
||||
const assert = std.debug.assert;
|
||||
const glibc = @import("libs/glibc.zig");
|
||||
const introspect = @import("introspect.zig");
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
pub fn cmdTargets(arena: Allocator, args: []const []const u8) !void {
|
||||
pub fn cmdTargets(
|
||||
allocator: Allocator,
|
||||
args: []const []const u8,
|
||||
out: *std.Io.Writer,
|
||||
native_target: *const Target,
|
||||
) !void {
|
||||
_ = args;
|
||||
const host = std.zig.resolveTargetQueryOrFatal(.{});
|
||||
var buffer: [1024]u8 = undefined;
|
||||
var bw = fs.File.stdout().writer().buffered(&buffer);
|
||||
try print(arena, &bw, host);
|
||||
try bw.flush();
|
||||
}
|
||||
|
||||
fn print(arena: Allocator, output: *Writer, host: *const Target) Writer.Error!void {
|
||||
var zig_lib_directory = introspect.findZigLibDir(arena) catch |err| {
|
||||
var zig_lib_directory = introspect.findZigLibDir(allocator) catch |err| {
|
||||
fatal("unable to find zig installation directory: {s}\n", .{@errorName(err)});
|
||||
};
|
||||
defer zig_lib_directory.handle.close();
|
||||
defer allocator.free(zig_lib_directory.path.?);
|
||||
|
||||
const abilists_contents = zig_lib_directory.handle.readFileAlloc(
|
||||
allocator,
|
||||
glibc.abilists_path,
|
||||
arena,
|
||||
.limited(glibc.abilists_max_size),
|
||||
glibc.abilists_max_size,
|
||||
) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => fatal("unable to read " ++ glibc.abilists_path ++ ": {s}", .{@errorName(err)}),
|
||||
};
|
||||
defer allocator.free(abilists_contents);
|
||||
|
||||
const glibc_abi = try glibc.loadMetaData(arena, abilists_contents);
|
||||
const glibc_abi = try glibc.loadMetaData(allocator, abilists_contents);
|
||||
defer glibc_abi.destroy(allocator);
|
||||
|
||||
var sz: std.zon.stringify.Serializer = .{ .writer = output };
|
||||
var serializer: std.zon.Serializer = .{ .writer = out };
|
||||
|
||||
{
|
||||
var root_obj = try sz.beginStruct(.{});
|
||||
var root_obj = try serializer.beginStruct(.{});
|
||||
|
||||
try root_obj.field("arch", meta.fieldNames(Target.Cpu.Arch), .{});
|
||||
try root_obj.field("os", meta.fieldNames(Target.Os.Tag), .{});
|
||||
@ -50,9 +49,10 @@ fn print(arena: Allocator, output: *Writer, host: *const Target) Writer.Error!vo
|
||||
{
|
||||
var libc_obj = try root_obj.beginTupleField("libc", .{});
|
||||
for (std.zig.target.available_libcs) |libc| {
|
||||
const tmp = try std.fmt.allocPrint(arena, "{s}-{s}-{s}", .{
|
||||
const tmp = try std.fmt.allocPrint(allocator, "{s}-{s}-{s}", .{
|
||||
@tagName(libc.arch), @tagName(libc.os), @tagName(libc.abi),
|
||||
});
|
||||
defer allocator.free(tmp);
|
||||
try libc_obj.field(tmp, .{});
|
||||
}
|
||||
try libc_obj.end();
|
||||
@ -61,7 +61,8 @@ fn print(arena: Allocator, output: *Writer, host: *const Target) Writer.Error!vo
|
||||
{
|
||||
var glibc_obj = try root_obj.beginTupleField("glibc", .{});
|
||||
for (glibc_abi.all_versions) |ver| {
|
||||
const tmp = try std.fmt.allocPrint(arena, "{f}", .{ver});
|
||||
const tmp = try std.fmt.allocPrint(allocator, "{f}", .{ver});
|
||||
defer allocator.free(tmp);
|
||||
try glibc_obj.field(tmp, .{});
|
||||
}
|
||||
try glibc_obj.end();
|
||||
@ -101,20 +102,21 @@ fn print(arena: Allocator, output: *Writer, host: *const Target) Writer.Error!vo
|
||||
{
|
||||
var native_obj = try root_obj.beginStructField("native", .{});
|
||||
{
|
||||
const triple = try host.zigTriple(arena);
|
||||
const triple = try native_target.zigTriple(allocator);
|
||||
defer allocator.free(triple);
|
||||
try native_obj.field("triple", triple, .{});
|
||||
}
|
||||
{
|
||||
var cpu_obj = try native_obj.beginStructField("cpu", .{});
|
||||
try cpu_obj.field("arch", @tagName(host.cpu.arch), .{});
|
||||
try cpu_obj.field("arch", @tagName(native_target.cpu.arch), .{});
|
||||
|
||||
try cpu_obj.field("name", host.cpu.model.name, .{});
|
||||
try cpu_obj.field("name", native_target.cpu.model.name, .{});
|
||||
|
||||
{
|
||||
var features = try native_obj.beginTupleField("features", .{});
|
||||
for (host.cpu.arch.allFeaturesList(), 0..) |feature, i_usize| {
|
||||
for (native_target.cpu.arch.allFeaturesList(), 0..) |feature, i_usize| {
|
||||
const index = @as(Target.Cpu.Feature.Set.Index, @intCast(i_usize));
|
||||
if (host.cpu.features.isEnabled(index)) {
|
||||
if (native_target.cpu.features.isEnabled(index)) {
|
||||
try features.field(feature.name, .{});
|
||||
}
|
||||
}
|
||||
@ -123,13 +125,13 @@ fn print(arena: Allocator, output: *Writer, host: *const Target) Writer.Error!vo
|
||||
try cpu_obj.end();
|
||||
}
|
||||
|
||||
try native_obj.field("os", @tagName(host.os.tag), .{});
|
||||
try native_obj.field("abi", @tagName(host.abi), .{});
|
||||
try native_obj.field("os", @tagName(native_target.os.tag), .{});
|
||||
try native_obj.field("abi", @tagName(native_target.abi), .{});
|
||||
try native_obj.end();
|
||||
}
|
||||
|
||||
try root_obj.end();
|
||||
}
|
||||
|
||||
try output.writeByte('\n');
|
||||
try out.writeByte('\n');
|
||||
}
|
||||
|
||||
@ -3338,7 +3338,7 @@ fn transPredefinedExpr(c: *Context, scope: *Scope, expr: *const clang.Predefined
|
||||
|
||||
fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!Node {
|
||||
return Tag.char_literal.create(c.arena, if (narrow)
|
||||
try std.fmt.allocPrint(c.arena, "'{f}'", .{std.zig.fmtChar(&.{@as(u8, @intCast(val))})})
|
||||
try std.fmt.allocPrint(c.arena, "'{f}'", .{std.zig.fmtChar(@intCast(val))})
|
||||
else
|
||||
try std.fmt.allocPrint(c.arena, "'\\u{{{x}}}'", .{val}));
|
||||
}
|
||||
|
||||
@ -539,20 +539,6 @@ fn add(a: i32, b: i32) i32 {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
test "type info for async frames" {
|
||||
if (true) {
|
||||
// https://github.com/ziglang/zig/issues/6025
|
||||
return error.SkipZigTest;
|
||||
}
|
||||
|
||||
switch (@typeInfo(@Frame(add))) {
|
||||
.frame => |frame| {
|
||||
try expect(@as(@TypeOf(add), @ptrCast(frame.function)) == add);
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
test "Declarations are returned in declaration order" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv) return error.SkipZigTest;
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
export fn entry() void {
|
||||
var frame: @Frame(func) = undefined;
|
||||
_ = &frame;
|
||||
}
|
||||
fn func(comptime T: type) void {
|
||||
var x: T = undefined;
|
||||
_ = &x;
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=native
|
||||
//
|
||||
// tmp.zig:2:16: error: @Frame() of generic function
|
||||
@ -1,13 +0,0 @@
|
||||
export fn entry() void {
|
||||
var ptr: fn () callconv(.@"async") void = func;
|
||||
var bytes: [64]u8 = undefined;
|
||||
_ = @asyncCall(&bytes, {}, ptr, .{});
|
||||
_ = &ptr;
|
||||
}
|
||||
fn func() callconv(.@"async") void {}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=aarch64-linux-none
|
||||
//
|
||||
// tmp.zig:4:21: error: expected type '[]align(8) u8', found '*[64]u8'
|
||||
@ -1,7 +0,0 @@
|
||||
export fn foo() callconv(.@"async") void {}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=native
|
||||
//
|
||||
// tmp.zig:1:1: error: exported function cannot be async
|
||||
@ -1,11 +0,0 @@
|
||||
var handle_undef: anyframe = undefined;
|
||||
var handle_dummy: anyframe = @frame();
|
||||
export fn entry() bool {
|
||||
return handle_undef == handle_dummy;
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=native
|
||||
//
|
||||
// tmp.zig:2:30: error: @frame() called outside of function definition
|
||||
@ -1,13 +0,0 @@
|
||||
export fn entry() void {
|
||||
func();
|
||||
}
|
||||
fn func() void {
|
||||
_ = @frame();
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=native
|
||||
//
|
||||
// tmp.zig:1:1: error: function with calling convention 'C' cannot be async
|
||||
// tmp.zig:5:9: note: @frame() causes function to be async
|
||||
@ -1,15 +0,0 @@
|
||||
export fn a() void {
|
||||
var non_async_fn: fn () void = undefined;
|
||||
non_async_fn = func;
|
||||
}
|
||||
fn func() void {
|
||||
suspend {}
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=native
|
||||
//
|
||||
// tmp.zig:5:1: error: 'func' cannot be async
|
||||
// tmp.zig:3:20: note: required to be non-async here
|
||||
// tmp.zig:6:5: note: suspends here
|
||||
@ -1,13 +0,0 @@
|
||||
export fn entry() void {
|
||||
var ptr = afunc;
|
||||
var bytes: [100]u8 align(16) = undefined;
|
||||
_ = @asyncCall(&bytes, {}, ptr, .{});
|
||||
_ = &ptr;
|
||||
}
|
||||
fn afunc() void {}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=native
|
||||
//
|
||||
// tmp.zig:4:32: error: expected async function, found 'fn () void'
|
||||
@ -1,24 +0,0 @@
|
||||
export fn a() void {
|
||||
var x: anyframe = undefined;
|
||||
var y: anyframe->i32 = x;
|
||||
_ = .{ &x, &y };
|
||||
}
|
||||
export fn b() void {
|
||||
var x: i32 = undefined;
|
||||
var y: anyframe->i32 = x;
|
||||
_ = .{ &x, &y };
|
||||
}
|
||||
export fn c() void {
|
||||
var x: @Frame(func) = undefined;
|
||||
var y: anyframe->i32 = &x;
|
||||
_ = .{ &x, &y };
|
||||
}
|
||||
fn func() void {}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=native
|
||||
//
|
||||
// :3:28: error: expected type 'anyframe->i32', found 'anyframe'
|
||||
// :8:28: error: expected type 'anyframe->i32', found 'i32'
|
||||
// tmp.zig:13:29: error: expected type 'anyframe->i32', found '*@Frame(func)'
|
||||
@ -1,14 +0,0 @@
|
||||
export fn entry1() void {
|
||||
var frame: @Frame(foo) = undefined;
|
||||
@asyncCall(&frame, {}, foo, {});
|
||||
}
|
||||
|
||||
fn foo() i32 {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage1
|
||||
// target=native
|
||||
//
|
||||
// tmp.zig:3:33: error: expected tuple or struct, found 'void'
|
||||
@ -1,20 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub fn panic(message: []const u8, stack_trace: ?*std.builtin.StackTrace, _: ?usize) noreturn {
|
||||
_ = message;
|
||||
_ = stack_trace;
|
||||
std.process.exit(0);
|
||||
}
|
||||
pub fn main() !void {
|
||||
_ = nosuspend add(101, 100);
|
||||
return error.TestFailed;
|
||||
}
|
||||
fn add(a: i32, b: i32) i32 {
|
||||
if (a > 100) {
|
||||
suspend {}
|
||||
}
|
||||
return a + b;
|
||||
}
|
||||
// run
|
||||
// backend=stage1
|
||||
// target=native
|
||||
Loading…
x
Reference in New Issue
Block a user