mirror of
https://github.com/ziglang/zig.git
synced 2026-02-03 13:13:40 +00:00
Merge pull request #11789 from Vexu/stage2
Stage2 fixes towards `zig2 build test-std` working
This commit is contained in:
commit
43db697b46
@ -147,7 +147,8 @@ pub fn __log2x(a: f80) callconv(.C) f80 {
|
||||
}
|
||||
|
||||
pub fn log2q(a: f128) callconv(.C) f128 {
|
||||
return math.log2(a);
|
||||
// TODO: more correct implementation
|
||||
return log2(@floatCast(f64, a));
|
||||
}
|
||||
|
||||
pub fn log2l(x: c_longdouble) callconv(.C) c_longdouble {
|
||||
|
||||
@ -5,6 +5,7 @@ pub const gzip = @import("compress/gzip.zig");
|
||||
pub const zlib = @import("compress/zlib.zig");
|
||||
|
||||
test {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
_ = deflate;
|
||||
_ = gzip;
|
||||
_ = zlib;
|
||||
|
||||
@ -34,12 +34,14 @@ pub fn rejectNonCanonical(s: CompressedScalar) NonCanonicalError!void {
|
||||
|
||||
/// Reduce a scalar to the field size.
|
||||
pub fn reduce(s: CompressedScalar) CompressedScalar {
|
||||
return Scalar.fromBytes(s).toBytes();
|
||||
var scalar = Scalar.fromBytes(s);
|
||||
return scalar.toBytes();
|
||||
}
|
||||
|
||||
/// Reduce a 64-bytes scalar to the field size.
|
||||
pub fn reduce64(s: [64]u8) CompressedScalar {
|
||||
return ScalarDouble.fromBytes64(s).toBytes();
|
||||
var scalar = ScalarDouble.fromBytes64(s);
|
||||
return scalar.toBytes();
|
||||
}
|
||||
|
||||
/// Perform the X25519 "clamping" operation.
|
||||
@ -106,12 +108,14 @@ pub const Scalar = struct {
|
||||
|
||||
/// Unpack a 32-byte representation of a scalar
|
||||
pub fn fromBytes(bytes: CompressedScalar) Scalar {
|
||||
return ScalarDouble.fromBytes32(bytes).reduce(5);
|
||||
var scalar = ScalarDouble.fromBytes32(bytes);
|
||||
return scalar.reduce(5);
|
||||
}
|
||||
|
||||
/// Unpack a 64-byte representation of a scalar
|
||||
pub fn fromBytes64(bytes: [64]u8) Scalar {
|
||||
return ScalarDouble.fromBytes64(bytes).reduce(5);
|
||||
var scalar = ScalarDouble.fromBytes64(bytes);
|
||||
return scalar.reduce(5);
|
||||
}
|
||||
|
||||
/// Pack a scalar into bytes
|
||||
|
||||
@ -679,9 +679,12 @@ fn testBlake3(hasher: *Blake3, input_len: usize, expected_hex: [262]u8) !void {
|
||||
}
|
||||
|
||||
test "BLAKE3 reference test cases" {
|
||||
var hash = &Blake3.init(.{});
|
||||
var keyed_hash = &Blake3.init(.{ .key = reference_test.key.* });
|
||||
var derive_key = &Blake3.initKdf(reference_test.context_string, .{});
|
||||
var hash_state = Blake3.init(.{});
|
||||
const hash = &hash_state;
|
||||
var keyed_hash_state = Blake3.init(.{ .key = reference_test.key.* });
|
||||
const keyed_hash = &keyed_hash_state;
|
||||
var derive_key_state = Blake3.initKdf(reference_test.context_string, .{});
|
||||
const derive_key = &derive_key_state;
|
||||
|
||||
for (reference_test.cases) |t| {
|
||||
try testBlake3(hash, t.input_len, t.hash.*);
|
||||
|
||||
@ -128,18 +128,16 @@ fn keccakF(comptime F: usize, d: *[F / 8]u8) void {
|
||||
r.* = mem.readIntLittle(u64, d[8 * i ..][0..8]);
|
||||
}
|
||||
|
||||
comptime var x: usize = 0;
|
||||
comptime var y: usize = 0;
|
||||
for (RC[0..no_rounds]) |round| {
|
||||
// theta
|
||||
x = 0;
|
||||
comptime var x: usize = 0;
|
||||
inline while (x < 5) : (x += 1) {
|
||||
c[x] = s[x] ^ s[x + 5] ^ s[x + 10] ^ s[x + 15] ^ s[x + 20];
|
||||
}
|
||||
x = 0;
|
||||
inline while (x < 5) : (x += 1) {
|
||||
t[0] = c[M5[x + 4]] ^ math.rotl(u64, c[M5[x + 1]], @as(usize, 1));
|
||||
y = 0;
|
||||
comptime var y: usize = 0;
|
||||
inline while (y < 5) : (y += 1) {
|
||||
s[x + y * 5] ^= t[0];
|
||||
}
|
||||
@ -155,7 +153,7 @@ fn keccakF(comptime F: usize, d: *[F / 8]u8) void {
|
||||
}
|
||||
|
||||
// chi
|
||||
y = 0;
|
||||
comptime var y: usize = 0;
|
||||
inline while (y < 5) : (y += 1) {
|
||||
x = 0;
|
||||
inline while (x < 5) : (x += 1) {
|
||||
|
||||
@ -109,6 +109,7 @@ pub fn Batch(
|
||||
}
|
||||
|
||||
test "std.event.Batch" {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
var count: usize = 0;
|
||||
var batch = Batch(void, 2, .auto_async).init();
|
||||
batch.add(&async sleepALittle(&count));
|
||||
|
||||
@ -2111,6 +2111,7 @@ test "slice" {
|
||||
}
|
||||
|
||||
test "escape non-printable" {
|
||||
if (builtin.zig_backend != .stage1) return error.SkipZigTest;
|
||||
try expectFmt("abc", "{s}", .{fmtSliceEscapeLower("abc")});
|
||||
try expectFmt("ab\\xffc", "{s}", .{fmtSliceEscapeLower("ab\xffc")});
|
||||
try expectFmt("ab\\xFFc", "{s}", .{fmtSliceEscapeUpper("ab\xffc")});
|
||||
@ -2122,6 +2123,7 @@ test "pointer" {
|
||||
try expectFmt("pointer: i32@deadbeef\n", "pointer: {}\n", .{value});
|
||||
try expectFmt("pointer: i32@deadbeef\n", "pointer: {*}\n", .{value});
|
||||
}
|
||||
if (builtin.zig_backend != .stage1) return error.SkipZigTest;
|
||||
{
|
||||
const value = @intToPtr(fn () void, 0xdeadbeef);
|
||||
try expectFmt("pointer: fn() void@deadbeef\n", "pointer: {}\n", .{value});
|
||||
@ -2146,6 +2148,7 @@ test "cstr" {
|
||||
}
|
||||
|
||||
test "filesize" {
|
||||
if (builtin.zig_backend != .stage1) return error.SkipZigTest;
|
||||
try expectFmt("file size: 42B\n", "file size: {}\n", .{fmtIntSizeDec(42)});
|
||||
try expectFmt("file size: 42B\n", "file size: {}\n", .{fmtIntSizeBin(42)});
|
||||
try expectFmt("file size: 63MB\n", "file size: {}\n", .{fmtIntSizeDec(63 * 1000 * 1000)});
|
||||
@ -2445,6 +2448,7 @@ test "struct.zero-size" {
|
||||
}
|
||||
|
||||
test "bytes.hex" {
|
||||
if (builtin.zig_backend != .stage1) return error.SkipZigTest;
|
||||
const some_bytes = "\xCA\xFE\xBA\xBE";
|
||||
try expectFmt("lowercase: cafebabe\n", "lowercase: {x}\n", .{fmtSliceHexLower(some_bytes)});
|
||||
try expectFmt("uppercase: CAFEBABE\n", "uppercase: {X}\n", .{fmtSliceHexUpper(some_bytes)});
|
||||
@ -2476,6 +2480,7 @@ pub fn hexToBytes(out: []u8, input: []const u8) ![]u8 {
|
||||
}
|
||||
|
||||
test "hexToBytes" {
|
||||
if (builtin.zig_backend != .stage1) return error.SkipZigTest;
|
||||
var buf: [32]u8 = undefined;
|
||||
try expectFmt("90" ** 32, "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, "90" ** 32))});
|
||||
try expectFmt("ABCD", "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, "ABCD"))});
|
||||
|
||||
@ -1210,7 +1210,8 @@ pub fn testAllocatorAlignedShrink(base_allocator: mem.Allocator) !void {
|
||||
const allocator = validationAllocator.allocator();
|
||||
|
||||
var debug_buffer: [1000]u8 = undefined;
|
||||
const debug_allocator = FixedBufferAllocator.init(&debug_buffer).allocator();
|
||||
var fib = FixedBufferAllocator.init(&debug_buffer);
|
||||
const debug_allocator = fib.allocator();
|
||||
|
||||
const alloc_size = mem.page_size * 2 + 50;
|
||||
var slice = try allocator.alignedAlloc(u8, 16, alloc_size);
|
||||
|
||||
@ -91,7 +91,8 @@ test "LogToWriterAllocator" {
|
||||
|
||||
var allocator_buf: [10]u8 = undefined;
|
||||
var fixedBufferAllocator = std.mem.validationWrap(std.heap.FixedBufferAllocator.init(&allocator_buf));
|
||||
const allocator = logToWriterAllocator(fixedBufferAllocator.allocator(), fbs.writer()).allocator();
|
||||
var allocator_state = logToWriterAllocator(fixedBufferAllocator.allocator(), fbs.writer());
|
||||
const allocator = allocator_state.allocator();
|
||||
|
||||
var a = try allocator.alloc(u8, 10);
|
||||
a = allocator.shrink(a, 5);
|
||||
|
||||
@ -344,7 +344,8 @@ pub fn Reader(
|
||||
|
||||
test "Reader" {
|
||||
var buf = "a\x02".*;
|
||||
const reader = std.io.fixedBufferStream(&buf).reader();
|
||||
var fis = std.io.fixedBufferStream(&buf);
|
||||
const reader = fis.reader();
|
||||
try testing.expect((try reader.readByte()) == 'a');
|
||||
try testing.expect((try reader.readEnum(enum(u8) {
|
||||
a = 0,
|
||||
@ -356,13 +357,15 @@ test "Reader" {
|
||||
}
|
||||
|
||||
test "Reader.isBytes" {
|
||||
const reader = std.io.fixedBufferStream("foobar").reader();
|
||||
var fis = std.io.fixedBufferStream("foobar");
|
||||
const reader = fis.reader();
|
||||
try testing.expectEqual(true, try reader.isBytes("foo"));
|
||||
try testing.expectEqual(false, try reader.isBytes("qux"));
|
||||
}
|
||||
|
||||
test "Reader.skipBytes" {
|
||||
const reader = std.io.fixedBufferStream("foobar").reader();
|
||||
var fis = std.io.fixedBufferStream("foobar");
|
||||
const reader = fis.reader();
|
||||
try reader.skipBytes(3, .{});
|
||||
try testing.expect(try reader.isBytes("bar"));
|
||||
try reader.skipBytes(0, .{});
|
||||
@ -374,7 +377,8 @@ test "Reader.readUntilDelimiterArrayList returns ArrayLists with bytes read unti
|
||||
var list = std.ArrayList(u8).init(a);
|
||||
defer list.deinit();
|
||||
|
||||
const reader = std.io.fixedBufferStream("0000\n1234\n").reader();
|
||||
var fis = std.io.fixedBufferStream("0000\n1234\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
try reader.readUntilDelimiterArrayList(&list, '\n', 5);
|
||||
try std.testing.expectEqualStrings("0000", list.items);
|
||||
@ -388,7 +392,8 @@ test "Reader.readUntilDelimiterArrayList returns an empty ArrayList" {
|
||||
var list = std.ArrayList(u8).init(a);
|
||||
defer list.deinit();
|
||||
|
||||
const reader = std.io.fixedBufferStream("\n").reader();
|
||||
var fis = std.io.fixedBufferStream("\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
try reader.readUntilDelimiterArrayList(&list, '\n', 5);
|
||||
try std.testing.expectEqualStrings("", list.items);
|
||||
@ -399,7 +404,8 @@ test "Reader.readUntilDelimiterArrayList returns StreamTooLong, then an ArrayLis
|
||||
var list = std.ArrayList(u8).init(a);
|
||||
defer list.deinit();
|
||||
|
||||
const reader = std.io.fixedBufferStream("1234567\n").reader();
|
||||
var fis = std.io.fixedBufferStream("1234567\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterArrayList(&list, '\n', 5));
|
||||
try std.testing.expectEqualStrings("12345", list.items);
|
||||
@ -412,7 +418,8 @@ test "Reader.readUntilDelimiterArrayList returns EndOfStream" {
|
||||
var list = std.ArrayList(u8).init(a);
|
||||
defer list.deinit();
|
||||
|
||||
const reader = std.io.fixedBufferStream("1234").reader();
|
||||
var fis = std.io.fixedBufferStream("1234");
|
||||
const reader = fis.reader();
|
||||
|
||||
try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiterArrayList(&list, '\n', 5));
|
||||
try std.testing.expectEqualStrings("1234", list.items);
|
||||
@ -421,7 +428,8 @@ test "Reader.readUntilDelimiterArrayList returns EndOfStream" {
|
||||
test "Reader.readUntilDelimiterAlloc returns ArrayLists with bytes read until the delimiter, then EndOfStream" {
|
||||
const a = std.testing.allocator;
|
||||
|
||||
const reader = std.io.fixedBufferStream("0000\n1234\n").reader();
|
||||
var fis = std.io.fixedBufferStream("0000\n1234\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
{
|
||||
var result = try reader.readUntilDelimiterAlloc(a, '\n', 5);
|
||||
@ -441,7 +449,8 @@ test "Reader.readUntilDelimiterAlloc returns ArrayLists with bytes read until th
|
||||
test "Reader.readUntilDelimiterAlloc returns an empty ArrayList" {
|
||||
const a = std.testing.allocator;
|
||||
|
||||
const reader = std.io.fixedBufferStream("\n").reader();
|
||||
var fis = std.io.fixedBufferStream("\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
{
|
||||
var result = try reader.readUntilDelimiterAlloc(a, '\n', 5);
|
||||
@ -453,7 +462,8 @@ test "Reader.readUntilDelimiterAlloc returns an empty ArrayList" {
|
||||
test "Reader.readUntilDelimiterAlloc returns StreamTooLong, then an ArrayList with bytes read until the delimiter" {
|
||||
const a = std.testing.allocator;
|
||||
|
||||
const reader = std.io.fixedBufferStream("1234567\n").reader();
|
||||
var fis = std.io.fixedBufferStream("1234567\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterAlloc(a, '\n', 5));
|
||||
|
||||
@ -465,67 +475,77 @@ test "Reader.readUntilDelimiterAlloc returns StreamTooLong, then an ArrayList wi
|
||||
test "Reader.readUntilDelimiterAlloc returns EndOfStream" {
|
||||
const a = std.testing.allocator;
|
||||
|
||||
const reader = std.io.fixedBufferStream("1234").reader();
|
||||
var fis = std.io.fixedBufferStream("1234");
|
||||
const reader = fis.reader();
|
||||
|
||||
try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiterAlloc(a, '\n', 5));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter returns bytes read until the delimiter" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("0000\n1234\n").reader();
|
||||
var fis = std.io.fixedBufferStream("0000\n1234\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectEqualStrings("0000", try reader.readUntilDelimiter(&buf, '\n'));
|
||||
try std.testing.expectEqualStrings("1234", try reader.readUntilDelimiter(&buf, '\n'));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter returns an empty string" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("\n").reader();
|
||||
var fis = std.io.fixedBufferStream("\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectEqualStrings("", try reader.readUntilDelimiter(&buf, '\n'));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter returns StreamTooLong, then an empty string" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("12345\n").reader();
|
||||
var fis = std.io.fixedBufferStream("12345\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiter(&buf, '\n'));
|
||||
try std.testing.expectEqualStrings("", try reader.readUntilDelimiter(&buf, '\n'));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter returns StreamTooLong, then bytes read until the delimiter" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("1234567\n").reader();
|
||||
var fis = std.io.fixedBufferStream("1234567\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiter(&buf, '\n'));
|
||||
try std.testing.expectEqualStrings("67", try reader.readUntilDelimiter(&buf, '\n'));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter returns EndOfStream" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("").reader();
|
||||
var fis = std.io.fixedBufferStream("");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiter(&buf, '\n'));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter returns bytes read until delimiter, then EndOfStream" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("1234\n").reader();
|
||||
var fis = std.io.fixedBufferStream("1234\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectEqualStrings("1234", try reader.readUntilDelimiter(&buf, '\n'));
|
||||
try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiter(&buf, '\n'));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter returns EndOfStream" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("1234").reader();
|
||||
var fis = std.io.fixedBufferStream("1234");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiter(&buf, '\n'));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter returns StreamTooLong, then EndOfStream" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("12345").reader();
|
||||
var fis = std.io.fixedBufferStream("12345");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiter(&buf, '\n'));
|
||||
try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiter(&buf, '\n'));
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiter writes all bytes read to the output buffer" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("0000\n12345").reader();
|
||||
var fis = std.io.fixedBufferStream("0000\n12345");
|
||||
const reader = fis.reader();
|
||||
_ = try reader.readUntilDelimiter(&buf, '\n');
|
||||
try std.testing.expectEqualStrings("0000\n", &buf);
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiter(&buf, '\n'));
|
||||
@ -535,7 +555,8 @@ test "Reader.readUntilDelimiter writes all bytes read to the output buffer" {
|
||||
test "Reader.readUntilDelimiterOrEofAlloc returns ArrayLists with bytes read until the delimiter, then EndOfStream" {
|
||||
const a = std.testing.allocator;
|
||||
|
||||
const reader = std.io.fixedBufferStream("0000\n1234\n").reader();
|
||||
var fis = std.io.fixedBufferStream("0000\n1234\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
{
|
||||
var result = (try reader.readUntilDelimiterOrEofAlloc(a, '\n', 5)).?;
|
||||
@ -555,7 +576,8 @@ test "Reader.readUntilDelimiterOrEofAlloc returns ArrayLists with bytes read unt
|
||||
test "Reader.readUntilDelimiterOrEofAlloc returns an empty ArrayList" {
|
||||
const a = std.testing.allocator;
|
||||
|
||||
const reader = std.io.fixedBufferStream("\n").reader();
|
||||
var fis = std.io.fixedBufferStream("\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
{
|
||||
var result = (try reader.readUntilDelimiterOrEofAlloc(a, '\n', 5)).?;
|
||||
@ -567,7 +589,8 @@ test "Reader.readUntilDelimiterOrEofAlloc returns an empty ArrayList" {
|
||||
test "Reader.readUntilDelimiterOrEofAlloc returns StreamTooLong, then an ArrayList with bytes read until the delimiter" {
|
||||
const a = std.testing.allocator;
|
||||
|
||||
const reader = std.io.fixedBufferStream("1234567\n").reader();
|
||||
var fis = std.io.fixedBufferStream("1234567\n");
|
||||
const reader = fis.reader();
|
||||
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEofAlloc(a, '\n', 5));
|
||||
|
||||
@ -578,60 +601,69 @@ test "Reader.readUntilDelimiterOrEofAlloc returns StreamTooLong, then an ArrayLi
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof returns bytes read until the delimiter" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("0000\n1234\n").reader();
|
||||
var fis = std.io.fixedBufferStream("0000\n1234\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectEqualStrings("0000", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?);
|
||||
try std.testing.expectEqualStrings("1234", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?);
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof returns an empty string" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("\n").reader();
|
||||
var fis = std.io.fixedBufferStream("\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectEqualStrings("", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?);
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof returns StreamTooLong, then an empty string" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("12345\n").reader();
|
||||
var fis = std.io.fixedBufferStream("12345\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEof(&buf, '\n'));
|
||||
try std.testing.expectEqualStrings("", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?);
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof returns StreamTooLong, then bytes read until the delimiter" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("1234567\n").reader();
|
||||
var fis = std.io.fixedBufferStream("1234567\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEof(&buf, '\n'));
|
||||
try std.testing.expectEqualStrings("67", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?);
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof returns null" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("").reader();
|
||||
var fis = std.io.fixedBufferStream("");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expect((try reader.readUntilDelimiterOrEof(&buf, '\n')) == null);
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof returns bytes read until delimiter, then null" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("1234\n").reader();
|
||||
var fis = std.io.fixedBufferStream("1234\n");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectEqualStrings("1234", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?);
|
||||
try std.testing.expect((try reader.readUntilDelimiterOrEof(&buf, '\n')) == null);
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof returns bytes read until end-of-stream" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("1234").reader();
|
||||
var fis = std.io.fixedBufferStream("1234");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectEqualStrings("1234", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?);
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof returns StreamTooLong, then bytes read until end-of-stream" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("1234567").reader();
|
||||
var fis = std.io.fixedBufferStream("1234567");
|
||||
const reader = fis.reader();
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEof(&buf, '\n'));
|
||||
try std.testing.expectEqualStrings("67", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?);
|
||||
}
|
||||
|
||||
test "Reader.readUntilDelimiterOrEof writes all bytes read to the output buffer" {
|
||||
var buf: [5]u8 = undefined;
|
||||
const reader = std.io.fixedBufferStream("0000\n12345").reader();
|
||||
var fis = std.io.fixedBufferStream("0000\n12345");
|
||||
const reader = fis.reader();
|
||||
_ = try reader.readUntilDelimiterOrEof(&buf, '\n');
|
||||
try std.testing.expectEqualStrings("0000\n", &buf);
|
||||
try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEof(&buf, '\n'));
|
||||
|
||||
219
lib/std/json.zig
219
lib/std/json.zig
@ -1506,42 +1506,46 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void {
|
||||
}
|
||||
|
||||
test "skipValue" {
|
||||
try skipValue(&TokenStream.init("false"));
|
||||
try skipValue(&TokenStream.init("true"));
|
||||
try skipValue(&TokenStream.init("null"));
|
||||
try skipValue(&TokenStream.init("42"));
|
||||
try skipValue(&TokenStream.init("42.0"));
|
||||
try skipValue(&TokenStream.init("\"foo\""));
|
||||
try skipValue(&TokenStream.init("[101, 111, 121]"));
|
||||
try skipValue(&TokenStream.init("{}"));
|
||||
try skipValue(&TokenStream.init("{\"foo\": \"bar\"}"));
|
||||
var ts = TokenStream.init("false");
|
||||
try skipValue(&ts);
|
||||
ts = TokenStream.init("true");
|
||||
try skipValue(&ts);
|
||||
ts = TokenStream.init("null");
|
||||
try skipValue(&ts);
|
||||
ts = TokenStream.init("42");
|
||||
try skipValue(&ts);
|
||||
ts = TokenStream.init("42.0");
|
||||
try skipValue(&ts);
|
||||
ts = TokenStream.init("\"foo\"");
|
||||
try skipValue(&ts);
|
||||
ts = TokenStream.init("[101, 111, 121]");
|
||||
try skipValue(&ts);
|
||||
ts = TokenStream.init("{}");
|
||||
try skipValue(&ts);
|
||||
ts = TokenStream.init("{\"foo\": \"bar\"}");
|
||||
try skipValue(&ts);
|
||||
|
||||
{ // An absurd number of nestings
|
||||
const nestings = StreamingParser.default_max_nestings + 1;
|
||||
|
||||
try testing.expectError(
|
||||
error.TooManyNestedItems,
|
||||
skipValue(&TokenStream.init("[" ** nestings ++ "]" ** nestings)),
|
||||
);
|
||||
ts = TokenStream.init("[" ** nestings ++ "]" ** nestings);
|
||||
try testing.expectError(error.TooManyNestedItems, skipValue(&ts));
|
||||
}
|
||||
|
||||
{ // Would a number token cause problems in a deeply-nested array?
|
||||
const nestings = StreamingParser.default_max_nestings;
|
||||
const deeply_nested_array = "[" ** nestings ++ "0.118, 999, 881.99, 911.9, 725, 3" ++ "]" ** nestings;
|
||||
|
||||
try skipValue(&TokenStream.init(deeply_nested_array));
|
||||
ts = TokenStream.init(deeply_nested_array);
|
||||
try skipValue(&ts);
|
||||
|
||||
try testing.expectError(
|
||||
error.TooManyNestedItems,
|
||||
skipValue(&TokenStream.init("[" ++ deeply_nested_array ++ "]")),
|
||||
);
|
||||
ts = TokenStream.init("[" ++ deeply_nested_array ++ "]");
|
||||
try testing.expectError(error.TooManyNestedItems, skipValue(&ts));
|
||||
}
|
||||
|
||||
// Mismatched brace/square bracket
|
||||
try testing.expectError(
|
||||
error.UnexpectedClosingBrace,
|
||||
skipValue(&TokenStream.init("[102, 111, 111}")),
|
||||
);
|
||||
ts = TokenStream.init("[102, 111, 111}");
|
||||
try testing.expectError(error.UnexpectedClosingBrace, skipValue(&ts));
|
||||
|
||||
{ // should fail if no value found (e.g. immediate close of object)
|
||||
var empty_object = TokenStream.init("{}");
|
||||
@ -1980,18 +1984,29 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
||||
}
|
||||
|
||||
test "parse" {
|
||||
try testing.expectEqual(false, try parse(bool, &TokenStream.init("false"), ParseOptions{}));
|
||||
try testing.expectEqual(true, try parse(bool, &TokenStream.init("true"), ParseOptions{}));
|
||||
try testing.expectEqual(@as(u1, 1), try parse(u1, &TokenStream.init("1"), ParseOptions{}));
|
||||
try testing.expectError(error.Overflow, parse(u1, &TokenStream.init("50"), ParseOptions{}));
|
||||
try testing.expectEqual(@as(u64, 42), try parse(u64, &TokenStream.init("42"), ParseOptions{}));
|
||||
try testing.expectEqual(@as(f64, 42), try parse(f64, &TokenStream.init("42.0"), ParseOptions{}));
|
||||
try testing.expectEqual(@as(?bool, null), try parse(?bool, &TokenStream.init("null"), ParseOptions{}));
|
||||
try testing.expectEqual(@as(?bool, true), try parse(?bool, &TokenStream.init("true"), ParseOptions{}));
|
||||
var ts = TokenStream.init("false");
|
||||
try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("true");
|
||||
try testing.expectEqual(true, try parse(bool, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("1");
|
||||
try testing.expectEqual(@as(u1, 1), try parse(u1, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("50");
|
||||
try testing.expectError(error.Overflow, parse(u1, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("42");
|
||||
try testing.expectEqual(@as(u64, 42), try parse(u64, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("42.0");
|
||||
try testing.expectEqual(@as(f64, 42), try parse(f64, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("null");
|
||||
try testing.expectEqual(@as(?bool, null), try parse(?bool, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("true");
|
||||
try testing.expectEqual(@as(?bool, true), try parse(?bool, &ts, ParseOptions{}));
|
||||
|
||||
try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &TokenStream.init("\"foo\""), ParseOptions{}));
|
||||
try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &TokenStream.init("[102, 111, 111]"), ParseOptions{}));
|
||||
try testing.expectEqual(@as([0]u8, undefined), try parse([0]u8, &TokenStream.init("[]"), ParseOptions{}));
|
||||
ts = TokenStream.init("\"foo\"");
|
||||
try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("[102, 111, 111]");
|
||||
try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("[]");
|
||||
try testing.expectEqual(@as([0]u8, undefined), try parse([0]u8, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
test "parse into enum" {
|
||||
@ -2000,36 +2015,48 @@ test "parse into enum" {
|
||||
Bar,
|
||||
@"with\\escape",
|
||||
};
|
||||
try testing.expectEqual(@as(T, .Foo), try parse(T, &TokenStream.init("\"Foo\""), ParseOptions{}));
|
||||
try testing.expectEqual(@as(T, .Foo), try parse(T, &TokenStream.init("42"), ParseOptions{}));
|
||||
try testing.expectEqual(@as(T, .@"with\\escape"), try parse(T, &TokenStream.init("\"with\\\\escape\""), ParseOptions{}));
|
||||
try testing.expectError(error.InvalidEnumTag, parse(T, &TokenStream.init("5"), ParseOptions{}));
|
||||
try testing.expectError(error.InvalidEnumTag, parse(T, &TokenStream.init("\"Qux\""), ParseOptions{}));
|
||||
var ts = TokenStream.init("\"Foo\"");
|
||||
try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("42");
|
||||
try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("\"with\\\\escape\"");
|
||||
try testing.expectEqual(@as(T, .@"with\\escape"), try parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("5");
|
||||
try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("\"Qux\"");
|
||||
try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
test "parse with trailing data" {
|
||||
try testing.expectEqual(false, try parse(bool, &TokenStream.init("falsed"), ParseOptions{ .allow_trailing_data = true }));
|
||||
try testing.expectError(error.InvalidTopLevelTrailing, parse(bool, &TokenStream.init("falsed"), ParseOptions{ .allow_trailing_data = false }));
|
||||
var ts = TokenStream.init("falsed");
|
||||
try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = true }));
|
||||
ts = TokenStream.init("falsed");
|
||||
try testing.expectError(error.InvalidTopLevelTrailing, parse(bool, &ts, ParseOptions{ .allow_trailing_data = false }));
|
||||
// trailing whitespace is okay
|
||||
try testing.expectEqual(false, try parse(bool, &TokenStream.init("false \n"), ParseOptions{ .allow_trailing_data = false }));
|
||||
ts = TokenStream.init("false \n");
|
||||
try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = false }));
|
||||
}
|
||||
|
||||
test "parse into that allocates a slice" {
|
||||
try testing.expectError(error.AllocatorRequired, parse([]u8, &TokenStream.init("\"foo\""), ParseOptions{}));
|
||||
var ts = TokenStream.init("\"foo\"");
|
||||
try testing.expectError(error.AllocatorRequired, parse([]u8, &ts, ParseOptions{}));
|
||||
|
||||
const options = ParseOptions{ .allocator = testing.allocator };
|
||||
{
|
||||
const r = try parse([]u8, &TokenStream.init("\"foo\""), options);
|
||||
ts = TokenStream.init("\"foo\"");
|
||||
const r = try parse([]u8, &ts, options);
|
||||
defer parseFree([]u8, r, options);
|
||||
try testing.expectEqualSlices(u8, "foo", r);
|
||||
}
|
||||
{
|
||||
const r = try parse([]u8, &TokenStream.init("[102, 111, 111]"), options);
|
||||
ts = TokenStream.init("[102, 111, 111]");
|
||||
const r = try parse([]u8, &ts, options);
|
||||
defer parseFree([]u8, r, options);
|
||||
try testing.expectEqualSlices(u8, "foo", r);
|
||||
}
|
||||
{
|
||||
const r = try parse([]u8, &TokenStream.init("\"with\\\\escape\""), options);
|
||||
ts = TokenStream.init("\"with\\\\escape\"");
|
||||
const r = try parse([]u8, &ts, options);
|
||||
defer parseFree([]u8, r, options);
|
||||
try testing.expectEqualSlices(u8, "with\\escape", r);
|
||||
}
|
||||
@ -2042,7 +2069,8 @@ test "parse into tagged union" {
|
||||
float: f64,
|
||||
string: []const u8,
|
||||
};
|
||||
try testing.expectEqual(T{ .float = 1.5 }, try parse(T, &TokenStream.init("1.5"), ParseOptions{}));
|
||||
var ts = TokenStream.init("1.5");
|
||||
try testing.expectEqual(T{ .float = 1.5 }, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
{ // failing allocations should be bubbled up instantly without trying next member
|
||||
@ -2053,7 +2081,8 @@ test "parse into tagged union" {
|
||||
string: []const u8,
|
||||
array: [3]u8,
|
||||
};
|
||||
try testing.expectError(error.OutOfMemory, parse(T, &TokenStream.init("[1,2,3]"), options));
|
||||
var ts = TokenStream.init("[1,2,3]");
|
||||
try testing.expectError(error.OutOfMemory, parse(T, &ts, options));
|
||||
}
|
||||
|
||||
{
|
||||
@ -2062,7 +2091,8 @@ test "parse into tagged union" {
|
||||
x: u8,
|
||||
y: u8,
|
||||
};
|
||||
try testing.expectEqual(T{ .x = 42 }, try parse(T, &TokenStream.init("42"), ParseOptions{}));
|
||||
var ts = TokenStream.init("42");
|
||||
try testing.expectEqual(T{ .x = 42 }, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
{ // needs to back out when first union member doesn't match
|
||||
@ -2070,7 +2100,8 @@ test "parse into tagged union" {
|
||||
A: struct { x: u32 },
|
||||
B: struct { y: u32 },
|
||||
};
|
||||
try testing.expectEqual(T{ .B = .{ .y = 42 } }, try parse(T, &TokenStream.init("{\"y\":42}"), ParseOptions{}));
|
||||
var ts = TokenStream.init("{\"y\":42}");
|
||||
try testing.expectEqual(T{ .B = .{ .y = 42 } }, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2080,7 +2111,8 @@ test "parse union bubbles up AllocatorRequired" {
|
||||
string: []const u8,
|
||||
int: i32,
|
||||
};
|
||||
try testing.expectError(error.AllocatorRequired, parse(T, &TokenStream.init("42"), ParseOptions{}));
|
||||
var ts = TokenStream.init("42");
|
||||
try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
{ // string member not first in union (and matching)
|
||||
@ -2089,7 +2121,8 @@ test "parse union bubbles up AllocatorRequired" {
|
||||
float: f64,
|
||||
string: []const u8,
|
||||
};
|
||||
try testing.expectError(error.AllocatorRequired, parse(T, &TokenStream.init("\"foo\""), ParseOptions{}));
|
||||
var ts = TokenStream.init("\"foo\"");
|
||||
try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2102,7 +2135,8 @@ test "parseFree descends into tagged union" {
|
||||
string: []const u8,
|
||||
};
|
||||
// use a string with unicode escape so we know result can't be a reference to global constant
|
||||
const r = try parse(T, &TokenStream.init("\"with\\u0105unicode\""), options);
|
||||
var ts = TokenStream.init("\"with\\u0105unicode\"");
|
||||
const r = try parse(T, &ts, options);
|
||||
try testing.expectEqual(std.meta.Tag(T).string, @as(std.meta.Tag(T), r));
|
||||
try testing.expectEqualSlices(u8, "withąunicode", r.string);
|
||||
try testing.expectEqual(@as(usize, 0), fail_alloc.deallocations);
|
||||
@ -2116,12 +2150,13 @@ test "parse with comptime field" {
|
||||
comptime a: i32 = 0,
|
||||
b: bool,
|
||||
};
|
||||
try testing.expectEqual(T{ .a = 0, .b = true }, try parse(T, &TokenStream.init(
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "a": 0,
|
||||
\\ "b": true
|
||||
\\}
|
||||
), ParseOptions{}));
|
||||
);
|
||||
try testing.expectEqual(T{ .a = 0, .b = true }, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
{ // string comptime values currently require an allocator
|
||||
@ -2140,12 +2175,13 @@ test "parse with comptime field" {
|
||||
.allocator = std.testing.allocator,
|
||||
};
|
||||
|
||||
const r = try parse(T, &TokenStream.init(
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "kind": "float",
|
||||
\\ "b": 1.0
|
||||
\\}
|
||||
), options);
|
||||
);
|
||||
const r = try parse(T, &ts, options);
|
||||
|
||||
// check that parseFree doesn't try to free comptime fields
|
||||
parseFree(T, r, options);
|
||||
@ -2154,7 +2190,8 @@ test "parse with comptime field" {
|
||||
|
||||
test "parse into struct with no fields" {
|
||||
const T = struct {};
|
||||
try testing.expectEqual(T{}, try parse(T, &TokenStream.init("{}"), ParseOptions{}));
|
||||
var ts = TokenStream.init("{}");
|
||||
try testing.expectEqual(T{}, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
test "parse into struct with misc fields" {
|
||||
@ -2186,7 +2223,7 @@ test "parse into struct with misc fields" {
|
||||
string: []const u8,
|
||||
};
|
||||
};
|
||||
const r = try parse(T, &TokenStream.init(
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "int": 420,
|
||||
\\ "float": 3.14,
|
||||
@ -2208,7 +2245,8 @@ test "parse into struct with misc fields" {
|
||||
\\ ],
|
||||
\\ "a_union": 100000
|
||||
\\}
|
||||
), options);
|
||||
);
|
||||
const r = try parse(T, &ts, options);
|
||||
defer parseFree(T, r, options);
|
||||
try testing.expectEqual(@as(i64, 420), r.int);
|
||||
try testing.expectEqual(@as(f64, 3.14), r.float);
|
||||
@ -2239,14 +2277,15 @@ test "parse into struct with strings and arrays with sentinels" {
|
||||
data: [:99]const i32,
|
||||
simple_data: []const i32,
|
||||
};
|
||||
const r = try parse(T, &TokenStream.init(
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "language": "zig",
|
||||
\\ "language_without_sentinel": "zig again!",
|
||||
\\ "data": [1, 2, 3],
|
||||
\\ "simple_data": [4, 5, 6]
|
||||
\\}
|
||||
), options);
|
||||
);
|
||||
const r = try parse(T, &ts, options);
|
||||
defer parseFree(T, r, options);
|
||||
|
||||
try testing.expectEqualSentinel(u8, 0, "zig", r.language);
|
||||
@ -2275,19 +2314,25 @@ test "parse into struct with duplicate field" {
|
||||
|
||||
const T1 = struct { a: *u64 };
|
||||
// both .UseFirst and .UseLast should fail because second "a" value isn't a u64
|
||||
try testing.expectError(error.InvalidNumber, parse(T1, &TokenStream.init(str), options_first));
|
||||
try testing.expectError(error.InvalidNumber, parse(T1, &TokenStream.init(str), options_last));
|
||||
var ts = TokenStream.init(str);
|
||||
try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_first));
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_last));
|
||||
|
||||
const T2 = struct { a: f64 };
|
||||
try testing.expectEqual(T2{ .a = 1.0 }, try parse(T2, &TokenStream.init(str), options_first));
|
||||
try testing.expectEqual(T2{ .a = 0.25 }, try parse(T2, &TokenStream.init(str), options_last));
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectEqual(T2{ .a = 1.0 }, try parse(T2, &ts, options_first));
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectEqual(T2{ .a = 0.25 }, try parse(T2, &ts, options_last));
|
||||
|
||||
const T3 = struct { comptime a: f64 = 1.0 };
|
||||
// .UseFirst should succeed because second "a" value is unconditionally ignored (even though != 1.0)
|
||||
const t3 = T3{ .a = 1.0 };
|
||||
try testing.expectEqual(t3, try parse(T3, &TokenStream.init(str), options_first));
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectEqual(t3, try parse(T3, &ts, options_first));
|
||||
// .UseLast should fail because second "a" value is 0.25 which is not equal to default value of 1.0
|
||||
try testing.expectError(error.UnexpectedValue, parse(T3, &TokenStream.init(str), options_last));
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectError(error.UnexpectedValue, parse(T3, &ts, options_last));
|
||||
}
|
||||
|
||||
test "parse into struct ignoring unknown fields" {
|
||||
@ -2301,7 +2346,7 @@ test "parse into struct ignoring unknown fields" {
|
||||
.ignore_unknown_fields = true,
|
||||
};
|
||||
|
||||
const r = try parse(T, &std.json.TokenStream.init(
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "int": 420,
|
||||
\\ "float": 3.14,
|
||||
@ -2323,7 +2368,8 @@ test "parse into struct ignoring unknown fields" {
|
||||
\\ "a_union": 100000,
|
||||
\\ "language": "zig"
|
||||
\\}
|
||||
), ops);
|
||||
);
|
||||
const r = try parse(T, &ts, ops);
|
||||
defer parseFree(T, r, ops);
|
||||
|
||||
try testing.expectEqual(@as(i64, 420), r.int);
|
||||
@ -2341,7 +2387,8 @@ test "parse into recursive union definition" {
|
||||
};
|
||||
const ops = ParseOptions{ .allocator = testing.allocator };
|
||||
|
||||
const r = try parse(T, &std.json.TokenStream.init("{\"values\":[58]}"), ops);
|
||||
var ts = TokenStream.init("{\"values\":[58]}");
|
||||
const r = try parse(T, &ts, ops);
|
||||
defer parseFree(T, r, ops);
|
||||
|
||||
try testing.expectEqual(@as(i64, 58), r.values.array[0].integer);
|
||||
@ -2363,7 +2410,8 @@ test "parse into double recursive union definition" {
|
||||
};
|
||||
const ops = ParseOptions{ .allocator = testing.allocator };
|
||||
|
||||
const r = try parse(T, &std.json.TokenStream.init("{\"values\":[[58]]}"), ops);
|
||||
var ts = TokenStream.init("{\"values\":[[58]]}");
|
||||
const r = try parse(T, &ts, ops);
|
||||
defer parseFree(T, r, ops);
|
||||
|
||||
try testing.expectEqual(@as(i64, 58), r.values.array[0].array[0].integer);
|
||||
@ -2806,10 +2854,13 @@ test "integer after float has proper type" {
|
||||
|
||||
test "parse exponential into int" {
|
||||
const T = struct { int: i64 };
|
||||
const r = try parse(T, &TokenStream.init("{ \"int\": 4.2e2 }"), ParseOptions{});
|
||||
var ts = TokenStream.init("{ \"int\": 4.2e2 }");
|
||||
const r = try parse(T, &ts, ParseOptions{});
|
||||
try testing.expectEqual(@as(i64, 420), r.int);
|
||||
try testing.expectError(error.InvalidNumber, parse(T, &TokenStream.init("{ \"int\": 0.042e2 }"), ParseOptions{}));
|
||||
try testing.expectError(error.Overflow, parse(T, &TokenStream.init("{ \"int\": 18446744073709551616.0 }"), ParseOptions{}));
|
||||
ts = TokenStream.init("{ \"int\": 0.042e2 }");
|
||||
try testing.expectError(error.InvalidNumber, parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("{ \"int\": 18446744073709551616.0 }");
|
||||
try testing.expectError(error.Overflow, parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
test "escaped characters" {
|
||||
@ -2858,10 +2909,12 @@ test "string copy option" {
|
||||
defer arena_allocator.deinit();
|
||||
const allocator = arena_allocator.allocator();
|
||||
|
||||
const tree_nocopy = try Parser.init(allocator, false).parse(input);
|
||||
var parser = Parser.init(allocator, false);
|
||||
const tree_nocopy = try parser.parse(input);
|
||||
const obj_nocopy = tree_nocopy.root.Object;
|
||||
|
||||
const tree_copy = try Parser.init(allocator, true).parse(input);
|
||||
parser = Parser.init(allocator, true);
|
||||
const tree_copy = try parser.parse(input);
|
||||
const obj_copy = tree_copy.root.Object;
|
||||
|
||||
for ([_][]const u8{ "noescape", "simple", "unicode", "surrogatepair" }) |field_name| {
|
||||
@ -3376,14 +3429,12 @@ test "stringify null optional fields" {
|
||||
StringifyOptions{ .emit_null_optional_fields = false },
|
||||
);
|
||||
|
||||
try std.testing.expect(try parsesTo(
|
||||
MyStruct,
|
||||
MyStruct{},
|
||||
&TokenStream.init(
|
||||
\\{"required":"something","another_required":"something else"}
|
||||
),
|
||||
.{ .allocator = std.testing.allocator },
|
||||
));
|
||||
var ts = TokenStream.init(
|
||||
\\{"required":"something","another_required":"something else"}
|
||||
);
|
||||
try std.testing.expect(try parsesTo(MyStruct, MyStruct{}, &ts, .{
|
||||
.allocator = std.testing.allocator,
|
||||
}));
|
||||
}
|
||||
|
||||
// Same as `stringify` but accepts an Allocator and stores result in dynamically allocated memory instead of using a Writer.
|
||||
|
||||
@ -311,7 +311,10 @@ pub fn assumeSentinel(p: anytype, comptime sentinel_val: Elem(@TypeOf(p))) Senti
|
||||
const ReturnType = Sentinel(T, sentinel_val);
|
||||
switch (@typeInfo(T)) {
|
||||
.Pointer => |info| switch (info.size) {
|
||||
.Slice => return @bitCast(ReturnType, p),
|
||||
.Slice => if (@import("builtin").zig_backend == .stage1)
|
||||
return @bitCast(ReturnType, p)
|
||||
else
|
||||
return @ptrCast(ReturnType, p),
|
||||
.Many, .One => return @ptrCast(ReturnType, p),
|
||||
.C => {},
|
||||
},
|
||||
|
||||
@ -1141,18 +1141,20 @@ fn linuxLookupNameFromHosts(
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
const stream = std.io.bufferedReader(file.reader()).reader();
|
||||
var buffered_reader = std.io.bufferedReader(file.reader());
|
||||
const reader = buffered_reader.reader();
|
||||
var line_buf: [512]u8 = undefined;
|
||||
while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) {
|
||||
while (reader.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) {
|
||||
error.StreamTooLong => blk: {
|
||||
// Skip to the delimiter in the stream, to fix parsing
|
||||
try stream.skipUntilDelimiterOrEof('\n');
|
||||
// Skip to the delimiter in the reader, to fix parsing
|
||||
try reader.skipUntilDelimiterOrEof('\n');
|
||||
// Use the truncated line. A truncated comment or hostname will be handled correctly.
|
||||
break :blk &line_buf;
|
||||
},
|
||||
else => |e| return e,
|
||||
}) |line| {
|
||||
const no_comment_line = mem.split(u8, line, "#").next().?;
|
||||
var split_it = mem.split(u8, line, "#");
|
||||
const no_comment_line = split_it.next().?;
|
||||
|
||||
var line_it = mem.tokenize(u8, no_comment_line, " \t");
|
||||
const ip_text = line_it.next() orelse continue;
|
||||
|
||||
@ -391,7 +391,10 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type
|
||||
}
|
||||
|
||||
test "SegmentedList basic usage" {
|
||||
try testSegmentedList(0);
|
||||
if (@import("builtin").zig_backend == .stage1) {
|
||||
// https://github.com/ziglang/zig/issues/11787
|
||||
try testSegmentedList(0);
|
||||
}
|
||||
try testSegmentedList(1);
|
||||
try testSegmentedList(2);
|
||||
try testSegmentedList(4);
|
||||
|
||||
@ -804,6 +804,7 @@ pub fn fmtUtf16le(utf16le: []const u16) std.fmt.Formatter(formatUtf16le) {
|
||||
}
|
||||
|
||||
test "fmtUtf16le" {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
const expectFmt = std.testing.expectFmt;
|
||||
try expectFmt("", "{}", .{fmtUtf16le(utf8ToUtf16LeStringLiteral(""))});
|
||||
try expectFmt("foo", "{}", .{fmtUtf16le(utf8ToUtf16LeStringLiteral("foo"))});
|
||||
|
||||
@ -13,6 +13,7 @@ pub const net = struct {
|
||||
};
|
||||
|
||||
test {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
inline for (.{ os, net }) |module| {
|
||||
std.testing.refAllDecls(module);
|
||||
}
|
||||
|
||||
@ -8,10 +8,19 @@ pub fn cast(comptime DestType: type, target: anytype) DestType {
|
||||
// this function should behave like transCCast in translate-c, except it's for macros
|
||||
const SourceType = @TypeOf(target);
|
||||
switch (@typeInfo(DestType)) {
|
||||
.Fn, .Pointer => return castToPtr(DestType, SourceType, target),
|
||||
.Fn => if (@import("builtin").zig_backend == .stage1)
|
||||
return castToPtr(DestType, SourceType, target)
|
||||
else
|
||||
return castToPtr(*const DestType, SourceType, target),
|
||||
.Pointer => return castToPtr(DestType, SourceType, target),
|
||||
.Optional => |dest_opt| {
|
||||
if (@typeInfo(dest_opt.child) == .Pointer or @typeInfo(dest_opt.child) == .Fn) {
|
||||
if (@typeInfo(dest_opt.child) == .Pointer) {
|
||||
return castToPtr(DestType, SourceType, target);
|
||||
} else if (@typeInfo(dest_opt.child) == .Fn) {
|
||||
if (@import("builtin").zig_backend == .stage1)
|
||||
return castToPtr(DestType, SourceType, target)
|
||||
else
|
||||
return castToPtr(?*const dest_opt.child, SourceType, target);
|
||||
}
|
||||
},
|
||||
.Int => {
|
||||
@ -124,7 +133,10 @@ test "cast" {
|
||||
try testing.expect(cast(?*anyopaque, -1) == @intToPtr(?*anyopaque, @bitCast(usize, @as(isize, -1))));
|
||||
try testing.expect(cast(?*anyopaque, foo) == @intToPtr(?*anyopaque, @bitCast(usize, @as(isize, -1))));
|
||||
|
||||
const FnPtr = ?fn (*anyopaque) void;
|
||||
const FnPtr = if (@import("builtin").zig_backend == .stage1)
|
||||
?fn (*anyopaque) void
|
||||
else
|
||||
?*const fn (*anyopaque) void;
|
||||
try testing.expect(cast(FnPtr, 0) == @intToPtr(FnPtr, @as(usize, 0)));
|
||||
try testing.expect(cast(FnPtr, foo) == @intToPtr(FnPtr, @bitCast(usize, @as(isize, -1))));
|
||||
}
|
||||
@ -135,9 +147,14 @@ pub fn sizeof(target: anytype) usize {
|
||||
switch (@typeInfo(T)) {
|
||||
.Float, .Int, .Struct, .Union, .Array, .Bool, .Vector => return @sizeOf(T),
|
||||
.Fn => {
|
||||
// sizeof(main) returns 1, sizeof(&main) returns pointer size.
|
||||
// We cannot distinguish those types in Zig, so use pointer size.
|
||||
return @sizeOf(T);
|
||||
if (@import("builtin").zig_backend == .stage1) {
|
||||
// sizeof(main) returns 1, sizeof(&main) returns pointer size.
|
||||
// We cannot distinguish those types in Zig, so use pointer size.
|
||||
return @sizeOf(T);
|
||||
}
|
||||
|
||||
// sizeof(main) in C returns 1
|
||||
return 1;
|
||||
},
|
||||
.Null => return @sizeOf(*anyopaque),
|
||||
.Void => {
|
||||
@ -233,7 +250,12 @@ test "sizeof" {
|
||||
try testing.expect(sizeof(*const *const [4:0]u8) == ptr_size);
|
||||
try testing.expect(sizeof(*const [4]u8) == ptr_size);
|
||||
|
||||
try testing.expect(sizeof(sizeof) == @sizeOf(@TypeOf(sizeof)));
|
||||
if (@import("builtin").zig_backend == .stage1) {
|
||||
try testing.expect(sizeof(sizeof) == @sizeOf(@TypeOf(sizeof)));
|
||||
} else if (false) { // TODO
|
||||
try testing.expect(sizeof(&sizeof) == @sizeOf(@TypeOf(&sizeof)));
|
||||
try testing.expect(sizeof(sizeof) == 1);
|
||||
}
|
||||
|
||||
try testing.expect(sizeof(void) == 1);
|
||||
try testing.expect(sizeof(anyopaque) == 1);
|
||||
|
||||
45
src/Sema.zig
45
src/Sema.zig
@ -3598,7 +3598,7 @@ fn zirValidateArrayInit(
|
||||
// any ZIR instructions at comptime; we need to do that here.
|
||||
if (array_ty.sentinel()) |sentinel_val| {
|
||||
const array_len_ref = try sema.addIntUnsigned(Type.usize, array_len);
|
||||
const sentinel_ptr = try sema.elemPtrArray(block, init_src, array_ptr, init_src, array_len_ref);
|
||||
const sentinel_ptr = try sema.elemPtrArray(block, init_src, array_ptr, init_src, array_len_ref, true);
|
||||
const sentinel = try sema.addConstant(array_ty.childType(), sentinel_val);
|
||||
try sema.storePtr2(block, init_src, sentinel_ptr, init_src, sentinel, init_src, .store);
|
||||
}
|
||||
@ -6654,7 +6654,11 @@ fn zirFunc(
|
||||
src_locs = sema.code.extraData(Zir.Inst.Func.SrcLocs, extra_index).data;
|
||||
}
|
||||
|
||||
const cc: std.builtin.CallingConvention = if (sema.owner_decl.is_exported)
|
||||
// If this instruction has a body it means it's the type of the `owner_decl`
|
||||
// otherwise it's a function type without a `callconv` attribute and should
|
||||
// never be `.C`.
|
||||
// NOTE: revisit when doing #1717
|
||||
const cc: std.builtin.CallingConvention = if (sema.owner_decl.is_exported and has_body)
|
||||
.C
|
||||
else
|
||||
.Unspecified;
|
||||
@ -7540,7 +7544,7 @@ fn zirElemPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
|
||||
const bin_inst = sema.code.instructions.items(.data)[inst].bin;
|
||||
const array_ptr = try sema.resolveInst(bin_inst.lhs);
|
||||
const elem_index = try sema.resolveInst(bin_inst.rhs);
|
||||
return sema.elemPtr(block, sema.src, array_ptr, elem_index, sema.src);
|
||||
return sema.elemPtr(block, sema.src, array_ptr, elem_index, sema.src, false);
|
||||
}
|
||||
|
||||
fn zirElemPtrNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@ -7553,7 +7557,7 @@ fn zirElemPtrNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
|
||||
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
|
||||
const array_ptr = try sema.resolveInst(extra.lhs);
|
||||
const elem_index = try sema.resolveInst(extra.rhs);
|
||||
return sema.elemPtr(block, src, array_ptr, elem_index, elem_index_src);
|
||||
return sema.elemPtr(block, src, array_ptr, elem_index, elem_index_src, false);
|
||||
}
|
||||
|
||||
fn zirElemPtrImm(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@ -7565,7 +7569,7 @@ fn zirElemPtrImm(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
|
||||
const extra = sema.code.extraData(Zir.Inst.ElemPtrImm, inst_data.payload_index).data;
|
||||
const array_ptr = try sema.resolveInst(extra.ptr);
|
||||
const elem_index = try sema.addIntUnsigned(Type.usize, extra.index);
|
||||
return sema.elemPtr(block, src, array_ptr, elem_index, src);
|
||||
return sema.elemPtr(block, src, array_ptr, elem_index, src, true);
|
||||
}
|
||||
|
||||
fn zirSliceStart(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
@ -11547,7 +11551,7 @@ fn zirSizeOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
|
||||
const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
|
||||
const ty = try sema.resolveType(block, operand_src, inst_data.operand);
|
||||
switch (ty.zigTypeTag()) {
|
||||
.Fn => unreachable,
|
||||
.Fn,
|
||||
.NoReturn,
|
||||
.Undefined,
|
||||
.Null,
|
||||
@ -13465,7 +13469,12 @@ fn zirStructInit(
|
||||
}
|
||||
|
||||
if (is_ref) {
|
||||
const alloc = try block.addTy(.alloc, resolved_ty);
|
||||
const target = sema.mod.getTarget();
|
||||
const alloc_ty = try Type.ptr(sema.arena, sema.mod, .{
|
||||
.pointee_type = resolved_ty,
|
||||
.@"addrspace" = target_util.defaultAddressSpace(target, .local),
|
||||
});
|
||||
const alloc = try block.addTy(.alloc, alloc_ty);
|
||||
const field_ptr = try sema.unionFieldPtr(block, field_src, alloc, field_name, field_src, resolved_ty);
|
||||
try sema.storePtr(block, src, field_ptr, init_inst);
|
||||
return alloc;
|
||||
@ -18719,6 +18728,7 @@ fn elemPtr(
|
||||
indexable_ptr: Air.Inst.Ref,
|
||||
elem_index: Air.Inst.Ref,
|
||||
elem_index_src: LazySrcLoc,
|
||||
init: bool,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const indexable_ptr_src = src; // TODO better source location
|
||||
const indexable_ptr_ty = sema.typeOf(indexable_ptr);
|
||||
@ -18755,11 +18765,11 @@ fn elemPtr(
|
||||
},
|
||||
.One => {
|
||||
assert(indexable_ty.childType().zigTypeTag() == .Array); // Guaranteed by isIndexable
|
||||
return sema.elemPtrArray(block, indexable_ptr_src, indexable, elem_index_src, elem_index);
|
||||
return sema.elemPtrArray(block, indexable_ptr_src, indexable, elem_index_src, elem_index, init);
|
||||
},
|
||||
}
|
||||
},
|
||||
.Array, .Vector => return sema.elemPtrArray(block, indexable_ptr_src, indexable_ptr, elem_index_src, elem_index),
|
||||
.Array, .Vector => return sema.elemPtrArray(block, indexable_ptr_src, indexable_ptr, elem_index_src, elem_index, init),
|
||||
.Struct => {
|
||||
// Tuple field access.
|
||||
const index_val = try sema.resolveConstValue(block, elem_index_src, elem_index);
|
||||
@ -18813,7 +18823,7 @@ fn elemVal(
|
||||
},
|
||||
.One => {
|
||||
assert(indexable_ty.childType().zigTypeTag() == .Array); // Guaranteed by isIndexable
|
||||
const elem_ptr = try sema.elemPtr(block, indexable_src, indexable, elem_index, elem_index_src);
|
||||
const elem_ptr = try sema.elemPtr(block, indexable_src, indexable, elem_index, elem_index_src, false);
|
||||
return sema.analyzeLoad(block, indexable_src, elem_ptr, elem_index_src);
|
||||
},
|
||||
},
|
||||
@ -18994,6 +19004,7 @@ fn elemPtrArray(
|
||||
array_ptr: Air.Inst.Ref,
|
||||
elem_index_src: LazySrcLoc,
|
||||
elem_index: Air.Inst.Ref,
|
||||
init: bool,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const target = sema.mod.getTarget();
|
||||
const array_ptr_ty = sema.typeOf(array_ptr);
|
||||
@ -19030,7 +19041,7 @@ fn elemPtrArray(
|
||||
}
|
||||
|
||||
const valid_rt = try sema.validateRunTimeType(block, elem_index_src, array_ty.elemType2(), false);
|
||||
if (!valid_rt) {
|
||||
if (!valid_rt and !init) {
|
||||
const msg = msg: {
|
||||
const msg = try sema.errMsg(
|
||||
block,
|
||||
@ -20133,7 +20144,7 @@ fn storePtr2(
|
||||
const elem_src = operand_src; // TODO better source location
|
||||
const elem = try tupleField(sema, block, operand_src, uncasted_operand, elem_src, i);
|
||||
const elem_index = try sema.addIntUnsigned(Type.usize, i);
|
||||
const elem_ptr = try sema.elemPtr(block, ptr_src, ptr, elem_index, elem_src);
|
||||
const elem_ptr = try sema.elemPtr(block, ptr_src, ptr, elem_index, elem_src, false);
|
||||
try sema.storePtr2(block, src, elem_ptr, elem_src, elem, elem_src, .store);
|
||||
}
|
||||
return;
|
||||
@ -20400,6 +20411,16 @@ fn beginComptimePtrMutation(
|
||||
.ty = elem_ty,
|
||||
},
|
||||
|
||||
.the_only_possible_value => {
|
||||
const duped = try sema.arena.create(Value);
|
||||
duped.* = Value.initTag(.the_only_possible_value);
|
||||
return ComptimePtrMutationKit{
|
||||
.decl_ref_mut = parent.decl_ref_mut,
|
||||
.val = duped,
|
||||
.ty = elem_ty,
|
||||
};
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
|
||||
@ -5389,7 +5389,9 @@ pub const FuncGen = struct {
|
||||
}
|
||||
llvm_constraints.appendSliceAssumeCapacity(constraint);
|
||||
|
||||
name_map.putAssumeCapacityNoClobber(name, {});
|
||||
if (!std.mem.eql(u8, name, "_")) {
|
||||
name_map.putAssumeCapacityNoClobber(name, {});
|
||||
}
|
||||
llvm_param_i += 1;
|
||||
total_i += 1;
|
||||
}
|
||||
|
||||
34
src/type.zig
34
src/type.zig
@ -784,7 +784,7 @@ pub const Type = extern union {
|
||||
|
||||
.anyframe_T => {
|
||||
if (b.zigTypeTag() != .AnyFrame) return false;
|
||||
return a.childType().eql(b.childType(), mod);
|
||||
return a.elemType2().eql(b.elemType2(), mod);
|
||||
},
|
||||
|
||||
.empty_struct => {
|
||||
@ -2035,7 +2035,11 @@ pub const Type = extern union {
|
||||
try writer.writeAll("fn(");
|
||||
for (fn_info.param_types) |param_ty, i| {
|
||||
if (i != 0) try writer.writeAll(", ");
|
||||
try print(param_ty, writer, mod);
|
||||
if (param_ty.tag() == .generic_poison) {
|
||||
try writer.writeAll("anytype");
|
||||
} else {
|
||||
try print(param_ty, writer, mod);
|
||||
}
|
||||
}
|
||||
if (fn_info.is_var_args) {
|
||||
if (fn_info.param_types.len != 0) {
|
||||
@ -2052,7 +2056,11 @@ pub const Type = extern union {
|
||||
if (fn_info.alignment != 0) {
|
||||
try writer.print("align({d}) ", .{fn_info.alignment});
|
||||
}
|
||||
try print(fn_info.return_type, writer, mod);
|
||||
if (fn_info.return_type.tag() == .generic_poison) {
|
||||
try writer.writeAll("anytype");
|
||||
} else {
|
||||
try print(fn_info.return_type, writer, mod);
|
||||
}
|
||||
},
|
||||
|
||||
.error_union => {
|
||||
@ -4125,14 +4133,15 @@ pub const Type = extern union {
|
||||
/// TODO this is deprecated in favor of `childType`.
|
||||
pub const elemType = childType;
|
||||
|
||||
/// For *[N]T, returns T.
|
||||
/// For ?*T, returns T.
|
||||
/// For ?*[N]T, returns T.
|
||||
/// For ?[*]T, returns T.
|
||||
/// For *T, returns T.
|
||||
/// For [*]T, returns T.
|
||||
/// For [N]T, returns T.
|
||||
/// For []T, returns T.
|
||||
/// For *[N]T, returns T.
|
||||
/// For ?*T, returns T.
|
||||
/// For ?*[N]T, returns T.
|
||||
/// For ?[*]T, returns T.
|
||||
/// For *T, returns T.
|
||||
/// For [*]T, returns T.
|
||||
/// For [N]T, returns T.
|
||||
/// For []T, returns T.
|
||||
/// For anyframe->T, returns T.
|
||||
pub fn elemType2(ty: Type) Type {
|
||||
return switch (ty.tag()) {
|
||||
.vector => ty.castTag(.vector).?.data.elem_type,
|
||||
@ -4173,6 +4182,9 @@ pub const Type = extern union {
|
||||
.optional_single_mut_pointer => ty.castPointer().?.data,
|
||||
.optional_single_const_pointer => ty.castPointer().?.data,
|
||||
|
||||
.anyframe_T => ty.castTag(.anyframe_T).?.data,
|
||||
.@"anyframe" => Type.@"void",
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
@ -1174,6 +1174,10 @@ pub const Value = extern union {
|
||||
return;
|
||||
}
|
||||
switch (ty.zigTypeTag()) {
|
||||
.Void => {},
|
||||
.Bool => {
|
||||
buffer[0] = @boolToInt(val.toBool());
|
||||
},
|
||||
.Int => {
|
||||
var bigint_buffer: BigIntSpace = undefined;
|
||||
const bigint = val.toBigInt(&bigint_buffer, target);
|
||||
@ -1291,6 +1295,14 @@ pub const Value = extern union {
|
||||
) Allocator.Error!Value {
|
||||
const target = mod.getTarget();
|
||||
switch (ty.zigTypeTag()) {
|
||||
.Void => return Value.@"void",
|
||||
.Bool => {
|
||||
if (buffer[0] == 0) {
|
||||
return Value.@"false";
|
||||
} else {
|
||||
return Value.@"true";
|
||||
}
|
||||
},
|
||||
.Int => {
|
||||
if (buffer.len == 0) return Value.zero;
|
||||
const int_info = ty.intInfo(target);
|
||||
@ -1311,7 +1323,7 @@ pub const Value = extern union {
|
||||
128 => return Value.Tag.float_128.create(arena, floatReadFromMemory(f128, target, buffer)),
|
||||
else => unreachable,
|
||||
},
|
||||
.Array => {
|
||||
.Array, .Vector => {
|
||||
const elem_ty = ty.childType();
|
||||
const elem_size = elem_ty.abiSize(target);
|
||||
const elems = try arena.alloc(Value, @intCast(usize, ty.arrayLen()));
|
||||
|
||||
@ -573,3 +573,12 @@ test "type coercion of pointer to anon struct literal to pointer to array" {
|
||||
try S.doTheTest();
|
||||
comptime try S.doTheTest();
|
||||
}
|
||||
|
||||
test "array with comptime only element type" {
|
||||
const a = [_]type{
|
||||
u32,
|
||||
i32,
|
||||
};
|
||||
try testing.expect(a[0] == u32);
|
||||
try testing.expect(a[1] == i32);
|
||||
}
|
||||
|
||||
@ -23,9 +23,9 @@ pub export fn entry3() void {
|
||||
// error
|
||||
// backend=stage2,llvm
|
||||
//
|
||||
// :6:33: error: values of type '[2]fn() callconv(.C) void' must be comptime known, but index value is runtime known
|
||||
// :6:33: note: use '*const fn() callconv(.C) void' for a function pointer type
|
||||
// :13:33: error: values of type '[2]fn() callconv(.C) void' must be comptime known, but index value is runtime known
|
||||
// :13:33: note: use '*const fn() callconv(.C) void' for a function pointer type
|
||||
// :19:33: error: values of type '[2]fn() callconv(.C) void' must be comptime known, but index value is runtime known
|
||||
// :19:33: note: use '*const fn() callconv(.C) void' for a function pointer type
|
||||
// :6:5: error: values of type '[2]fn() void' must be comptime known, but index value is runtime known
|
||||
// :6:5: note: use '*const fn() void' for a function pointer type
|
||||
// :13:5: error: values of type '[2]fn() void' must be comptime known, but index value is runtime known
|
||||
// :13:5: note: use '*const fn() void' for a function pointer type
|
||||
// :19:5: error: values of type '[2]fn() void' must be comptime known, but index value is runtime known
|
||||
// :19:5: note: use '*const fn() void' for a function pointer type
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user