From 9aae4d57cb6baf409b54d67a8a312624f8ad84b6 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 14:27:45 +0300 Subject: [PATCH 01/11] compiler_rt: fix infinite loop --- lib/compiler_rt/log2.zig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/compiler_rt/log2.zig b/lib/compiler_rt/log2.zig index 53f35c9a80..aa294b33fd 100644 --- a/lib/compiler_rt/log2.zig +++ b/lib/compiler_rt/log2.zig @@ -147,7 +147,8 @@ pub fn __log2x(a: f80) callconv(.C) f80 { } pub fn log2q(a: f128) callconv(.C) f128 { - return math.log2(a); + // TODO: more correct implementation + return log2(@floatCast(f64, a)); } pub fn log2l(x: c_longdouble) callconv(.C) c_longdouble { From 1258b5f7d6fc9b56379c2c671ed55286f7e5b663 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 14:28:10 +0300 Subject: [PATCH 02/11] Type: implement elemType2 for anyframe --- src/type.zig | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/type.zig b/src/type.zig index fc9841e28b..a1adec491d 100644 --- a/src/type.zig +++ b/src/type.zig @@ -784,7 +784,7 @@ pub const Type = extern union { .anyframe_T => { if (b.zigTypeTag() != .AnyFrame) return false; - return a.childType().eql(b.childType(), mod); + return a.elemType2().eql(b.elemType2(), mod); }, .empty_struct => { @@ -4125,14 +4125,15 @@ pub const Type = extern union { /// TODO this is deprecated in favor of `childType`. pub const elemType = childType; - /// For *[N]T, returns T. - /// For ?*T, returns T. - /// For ?*[N]T, returns T. - /// For ?[*]T, returns T. - /// For *T, returns T. - /// For [*]T, returns T. - /// For [N]T, returns T. - /// For []T, returns T. + /// For *[N]T, returns T. + /// For ?*T, returns T. + /// For ?*[N]T, returns T. + /// For ?[*]T, returns T. + /// For *T, returns T. + /// For [*]T, returns T. + /// For [N]T, returns T. + /// For []T, returns T. + /// For anyframe->T, returns T. pub fn elemType2(ty: Type) Type { return switch (ty.tag()) { .vector => ty.castTag(.vector).?.data.elem_type, @@ -4173,6 +4174,9 @@ pub const Type = extern union { .optional_single_mut_pointer => ty.castPointer().?.data, .optional_single_const_pointer => ty.castPointer().?.data, + .anyframe_T => ty.castTag(.anyframe_T).?.data, + .@"anyframe" => Type.@"void", + else => unreachable, }; } From 3db4513b33425d34ad9def8af8bd52536a85fecd Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 14:28:56 +0300 Subject: [PATCH 03/11] Sema: fix type of alloc --- src/Sema.zig | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/Sema.zig b/src/Sema.zig index fd3dab4866..e56f5f1e03 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -13465,7 +13465,12 @@ fn zirStructInit( } if (is_ref) { - const alloc = try block.addTy(.alloc, resolved_ty); + const target = sema.mod.getTarget(); + const alloc_ty = try Type.ptr(sema.arena, sema.mod, .{ + .pointee_type = resolved_ty, + .@"addrspace" = target_util.defaultAddressSpace(target, .local), + }); + const alloc = try block.addTy(.alloc, alloc_ty); const field_ptr = try sema.unionFieldPtr(block, field_src, alloc, field_name, field_src, resolved_ty); try sema.storePtr(block, src, field_ptr, init_inst); return alloc; From 5c65b086d68280d49037ae316db0aba76fe5cd72 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 14:29:24 +0300 Subject: [PATCH 04/11] Value: implement {read,write}Value for more types --- src/value.zig | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/value.zig b/src/value.zig index da8f7a2c62..a80d788894 100644 --- a/src/value.zig +++ b/src/value.zig @@ -1174,6 +1174,10 @@ pub const Value = extern union { return; } switch (ty.zigTypeTag()) { + .Void => {}, + .Bool => { + buffer[0] = @boolToInt(val.toBool()); + }, .Int => { var bigint_buffer: BigIntSpace = undefined; const bigint = val.toBigInt(&bigint_buffer, target); @@ -1291,6 +1295,14 @@ pub const Value = extern union { ) Allocator.Error!Value { const target = mod.getTarget(); switch (ty.zigTypeTag()) { + .Void => return Value.@"void", + .Bool => { + if (buffer[0] == 0) { + return Value.@"false"; + } else { + return Value.@"true"; + } + }, .Int => { if (buffer.len == 0) return Value.zero; const int_info = ty.intInfo(target); @@ -1311,7 +1323,7 @@ pub const Value = extern union { 128 => return Value.Tag.float_128.create(arena, floatReadFromMemory(f128, target, buffer)), else => unreachable, }, - .Array => { + .Array, .Vector => { const elem_ty = ty.childType(); const elem_size = elem_ty.abiSize(target); const elems = try arena.alloc(Value, @intCast(usize, ty.arrayLen())); From 8f45e81c840c79097850bb87bbee1303e6d87dd4 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 15:10:37 +0300 Subject: [PATCH 05/11] stage2: ignore asm inputs named `_` This is a hacky solution but the entire asm syntax is supposed to be reworked anyways. --- src/codegen/llvm.zig | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index c7282fa22b..aab2dfe51a 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -5389,7 +5389,9 @@ pub const FuncGen = struct { } llvm_constraints.appendSliceAssumeCapacity(constraint); - name_map.putAssumeCapacityNoClobber(name, {}); + if (!std.mem.eql(u8, name, "_")) { + name_map.putAssumeCapacityNoClobber(name, {}); + } llvm_param_i += 1; total_i += 1; } From 019537cb2a447fc28365685e71d323092e1830b0 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 15:24:58 +0300 Subject: [PATCH 06/11] Sema: `@sizeOf` function should give an error --- src/Sema.zig | 2 +- src/type.zig | 12 ++++++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/Sema.zig b/src/Sema.zig index e56f5f1e03..074a102c95 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -11547,7 +11547,7 @@ fn zirSizeOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node }; const ty = try sema.resolveType(block, operand_src, inst_data.operand); switch (ty.zigTypeTag()) { - .Fn => unreachable, + .Fn, .NoReturn, .Undefined, .Null, diff --git a/src/type.zig b/src/type.zig index a1adec491d..ee669df620 100644 --- a/src/type.zig +++ b/src/type.zig @@ -2035,7 +2035,11 @@ pub const Type = extern union { try writer.writeAll("fn("); for (fn_info.param_types) |param_ty, i| { if (i != 0) try writer.writeAll(", "); - try print(param_ty, writer, mod); + if (param_ty.tag() == .generic_poison) { + try writer.writeAll("anytype"); + } else { + try print(param_ty, writer, mod); + } } if (fn_info.is_var_args) { if (fn_info.param_types.len != 0) { @@ -2052,7 +2056,11 @@ pub const Type = extern union { if (fn_info.alignment != 0) { try writer.print("align({d}) ", .{fn_info.alignment}); } - try print(fn_info.return_type, writer, mod); + if (fn_info.return_type.tag() == .generic_poison) { + try writer.writeAll("anytype"); + } else { + try print(fn_info.return_type, writer, mod); + } }, .error_union => { From 4e1aa5d54377aa2b8d1395d666efb6eb935b7917 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 15:25:53 +0300 Subject: [PATCH 07/11] Sema: handle the_only_possible_value in beginComptimePtrMutation --- src/Sema.zig | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/Sema.zig b/src/Sema.zig index 074a102c95..05ce2adb77 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -20405,6 +20405,16 @@ fn beginComptimePtrMutation( .ty = elem_ty, }, + .the_only_possible_value => { + const duped = try sema.arena.create(Value); + duped.* = Value.initTag(.the_only_possible_value); + return ComptimePtrMutationKit{ + .decl_ref_mut = parent.decl_ref_mut, + .val = duped, + .ty = elem_ty, + }; + }, + else => unreachable, } }, From 2b93546b39a85e9316c2bf12f336c357577114fa Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 16:10:18 +0300 Subject: [PATCH 08/11] Sema: fix initialization of array with comptime only elem type --- src/Sema.zig | 20 +++++++++++--------- test/behavior/array.zig | 9 +++++++++ 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/src/Sema.zig b/src/Sema.zig index 05ce2adb77..d3ca6a8cc5 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -3598,7 +3598,7 @@ fn zirValidateArrayInit( // any ZIR instructions at comptime; we need to do that here. if (array_ty.sentinel()) |sentinel_val| { const array_len_ref = try sema.addIntUnsigned(Type.usize, array_len); - const sentinel_ptr = try sema.elemPtrArray(block, init_src, array_ptr, init_src, array_len_ref); + const sentinel_ptr = try sema.elemPtrArray(block, init_src, array_ptr, init_src, array_len_ref, true); const sentinel = try sema.addConstant(array_ty.childType(), sentinel_val); try sema.storePtr2(block, init_src, sentinel_ptr, init_src, sentinel, init_src, .store); } @@ -7540,7 +7540,7 @@ fn zirElemPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air const bin_inst = sema.code.instructions.items(.data)[inst].bin; const array_ptr = try sema.resolveInst(bin_inst.lhs); const elem_index = try sema.resolveInst(bin_inst.rhs); - return sema.elemPtr(block, sema.src, array_ptr, elem_index, sema.src); + return sema.elemPtr(block, sema.src, array_ptr, elem_index, sema.src, false); } fn zirElemPtrNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { @@ -7553,7 +7553,7 @@ fn zirElemPtrNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data; const array_ptr = try sema.resolveInst(extra.lhs); const elem_index = try sema.resolveInst(extra.rhs); - return sema.elemPtr(block, src, array_ptr, elem_index, elem_index_src); + return sema.elemPtr(block, src, array_ptr, elem_index, elem_index_src, false); } fn zirElemPtrImm(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { @@ -7565,7 +7565,7 @@ fn zirElemPtrImm(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError! const extra = sema.code.extraData(Zir.Inst.ElemPtrImm, inst_data.payload_index).data; const array_ptr = try sema.resolveInst(extra.ptr); const elem_index = try sema.addIntUnsigned(Type.usize, extra.index); - return sema.elemPtr(block, src, array_ptr, elem_index, src); + return sema.elemPtr(block, src, array_ptr, elem_index, src, true); } fn zirSliceStart(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { @@ -18724,6 +18724,7 @@ fn elemPtr( indexable_ptr: Air.Inst.Ref, elem_index: Air.Inst.Ref, elem_index_src: LazySrcLoc, + init: bool, ) CompileError!Air.Inst.Ref { const indexable_ptr_src = src; // TODO better source location const indexable_ptr_ty = sema.typeOf(indexable_ptr); @@ -18760,11 +18761,11 @@ fn elemPtr( }, .One => { assert(indexable_ty.childType().zigTypeTag() == .Array); // Guaranteed by isIndexable - return sema.elemPtrArray(block, indexable_ptr_src, indexable, elem_index_src, elem_index); + return sema.elemPtrArray(block, indexable_ptr_src, indexable, elem_index_src, elem_index, init); }, } }, - .Array, .Vector => return sema.elemPtrArray(block, indexable_ptr_src, indexable_ptr, elem_index_src, elem_index), + .Array, .Vector => return sema.elemPtrArray(block, indexable_ptr_src, indexable_ptr, elem_index_src, elem_index, init), .Struct => { // Tuple field access. const index_val = try sema.resolveConstValue(block, elem_index_src, elem_index); @@ -18818,7 +18819,7 @@ fn elemVal( }, .One => { assert(indexable_ty.childType().zigTypeTag() == .Array); // Guaranteed by isIndexable - const elem_ptr = try sema.elemPtr(block, indexable_src, indexable, elem_index, elem_index_src); + const elem_ptr = try sema.elemPtr(block, indexable_src, indexable, elem_index, elem_index_src, false); return sema.analyzeLoad(block, indexable_src, elem_ptr, elem_index_src); }, }, @@ -18999,6 +19000,7 @@ fn elemPtrArray( array_ptr: Air.Inst.Ref, elem_index_src: LazySrcLoc, elem_index: Air.Inst.Ref, + init: bool, ) CompileError!Air.Inst.Ref { const target = sema.mod.getTarget(); const array_ptr_ty = sema.typeOf(array_ptr); @@ -19035,7 +19037,7 @@ fn elemPtrArray( } const valid_rt = try sema.validateRunTimeType(block, elem_index_src, array_ty.elemType2(), false); - if (!valid_rt) { + if (!valid_rt and !init) { const msg = msg: { const msg = try sema.errMsg( block, @@ -20138,7 +20140,7 @@ fn storePtr2( const elem_src = operand_src; // TODO better source location const elem = try tupleField(sema, block, operand_src, uncasted_operand, elem_src, i); const elem_index = try sema.addIntUnsigned(Type.usize, i); - const elem_ptr = try sema.elemPtr(block, ptr_src, ptr, elem_index, elem_src); + const elem_ptr = try sema.elemPtr(block, ptr_src, ptr, elem_index, elem_src, false); try sema.storePtr2(block, src, elem_ptr, elem_src, elem, elem_src, .store); } return; diff --git a/test/behavior/array.zig b/test/behavior/array.zig index 93de42df67..bccff1edf0 100644 --- a/test/behavior/array.zig +++ b/test/behavior/array.zig @@ -573,3 +573,12 @@ test "type coercion of pointer to anon struct literal to pointer to array" { try S.doTheTest(); comptime try S.doTheTest(); } + +test "array with comptime only element type" { + const a = [_]type{ + u32, + i32, + }; + try testing.expect(a[0] == u32); + try testing.expect(a[1] == i32); +} From 1a7b4ddeaedb81255cfa8907958c3cf09dd340ee Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 15:30:25 +0300 Subject: [PATCH 09/11] std: disable tests that crash stage2 --- lib/std/compress.zig | 1 + lib/std/event/batch.zig | 1 + lib/std/fmt.zig | 4 ++++ lib/std/segmented_list.zig | 5 ++++- lib/std/unicode.zig | 1 + lib/std/x.zig | 1 + 6 files changed, 12 insertions(+), 1 deletion(-) diff --git a/lib/std/compress.zig b/lib/std/compress.zig index 7fa25175d5..1d671f1aa6 100644 --- a/lib/std/compress.zig +++ b/lib/std/compress.zig @@ -5,6 +5,7 @@ pub const gzip = @import("compress/gzip.zig"); pub const zlib = @import("compress/zlib.zig"); test { + if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest; _ = deflate; _ = gzip; _ = zlib; diff --git a/lib/std/event/batch.zig b/lib/std/event/batch.zig index 4165f88f48..ba50d4bee5 100644 --- a/lib/std/event/batch.zig +++ b/lib/std/event/batch.zig @@ -109,6 +109,7 @@ pub fn Batch( } test "std.event.Batch" { + if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest; var count: usize = 0; var batch = Batch(void, 2, .auto_async).init(); batch.add(&async sleepALittle(&count)); diff --git a/lib/std/fmt.zig b/lib/std/fmt.zig index 939921535d..20f1319d50 100644 --- a/lib/std/fmt.zig +++ b/lib/std/fmt.zig @@ -2111,6 +2111,7 @@ test "slice" { } test "escape non-printable" { + if (builtin.zig_backend != .stage1) return error.SkipZigTest; try expectFmt("abc", "{s}", .{fmtSliceEscapeLower("abc")}); try expectFmt("ab\\xffc", "{s}", .{fmtSliceEscapeLower("ab\xffc")}); try expectFmt("ab\\xFFc", "{s}", .{fmtSliceEscapeUpper("ab\xffc")}); @@ -2146,6 +2147,7 @@ test "cstr" { } test "filesize" { + if (builtin.zig_backend != .stage1) return error.SkipZigTest; try expectFmt("file size: 42B\n", "file size: {}\n", .{fmtIntSizeDec(42)}); try expectFmt("file size: 42B\n", "file size: {}\n", .{fmtIntSizeBin(42)}); try expectFmt("file size: 63MB\n", "file size: {}\n", .{fmtIntSizeDec(63 * 1000 * 1000)}); @@ -2445,6 +2447,7 @@ test "struct.zero-size" { } test "bytes.hex" { + if (builtin.zig_backend != .stage1) return error.SkipZigTest; const some_bytes = "\xCA\xFE\xBA\xBE"; try expectFmt("lowercase: cafebabe\n", "lowercase: {x}\n", .{fmtSliceHexLower(some_bytes)}); try expectFmt("uppercase: CAFEBABE\n", "uppercase: {X}\n", .{fmtSliceHexUpper(some_bytes)}); @@ -2476,6 +2479,7 @@ pub fn hexToBytes(out: []u8, input: []const u8) ![]u8 { } test "hexToBytes" { + if (builtin.zig_backend != .stage1) return error.SkipZigTest; var buf: [32]u8 = undefined; try expectFmt("90" ** 32, "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, "90" ** 32))}); try expectFmt("ABCD", "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, "ABCD"))}); diff --git a/lib/std/segmented_list.zig b/lib/std/segmented_list.zig index 72e956a637..81ad6f6211 100644 --- a/lib/std/segmented_list.zig +++ b/lib/std/segmented_list.zig @@ -391,7 +391,10 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type } test "SegmentedList basic usage" { - try testSegmentedList(0); + if (@import("builtin").zig_backend == .stage1) { + // https://github.com/ziglang/zig/issues/11787 + try testSegmentedList(0); + } try testSegmentedList(1); try testSegmentedList(2); try testSegmentedList(4); diff --git a/lib/std/unicode.zig b/lib/std/unicode.zig index 81a7ed838f..a0cf7f6624 100644 --- a/lib/std/unicode.zig +++ b/lib/std/unicode.zig @@ -804,6 +804,7 @@ pub fn fmtUtf16le(utf16le: []const u16) std.fmt.Formatter(formatUtf16le) { } test "fmtUtf16le" { + if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest; const expectFmt = std.testing.expectFmt; try expectFmt("", "{}", .{fmtUtf16le(utf8ToUtf16LeStringLiteral(""))}); try expectFmt("foo", "{}", .{fmtUtf16le(utf8ToUtf16LeStringLiteral("foo"))}); diff --git a/lib/std/x.zig b/lib/std/x.zig index 64caf324ed..bafcdd5426 100644 --- a/lib/std/x.zig +++ b/lib/std/x.zig @@ -13,6 +13,7 @@ pub const net = struct { }; test { + if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest; inline for (.{ os, net }) |module| { std.testing.refAllDecls(module); } From 6d44c0a16c90a13cb3507751e2015edf51c642cf Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 16:02:23 +0300 Subject: [PATCH 10/11] std: update tests to stage2 semantics --- lib/std/crypto/25519/scalar.zig | 12 +- lib/std/crypto/blake3.zig | 9 +- lib/std/crypto/sha3.zig | 8 +- lib/std/fmt.zig | 1 + lib/std/heap.zig | 3 +- lib/std/heap/log_to_writer_allocator.zig | 3 +- lib/std/io/reader.zig | 96 ++++++---- lib/std/json.zig | 219 ++++++++++++++--------- lib/std/meta.zig | 5 +- lib/std/net.zig | 12 +- lib/std/zig/c_translation.zig | 36 +++- 11 files changed, 261 insertions(+), 143 deletions(-) diff --git a/lib/std/crypto/25519/scalar.zig b/lib/std/crypto/25519/scalar.zig index c3170673d1..c3e7fb4fa0 100644 --- a/lib/std/crypto/25519/scalar.zig +++ b/lib/std/crypto/25519/scalar.zig @@ -34,12 +34,14 @@ pub fn rejectNonCanonical(s: CompressedScalar) NonCanonicalError!void { /// Reduce a scalar to the field size. pub fn reduce(s: CompressedScalar) CompressedScalar { - return Scalar.fromBytes(s).toBytes(); + var scalar = Scalar.fromBytes(s); + return scalar.toBytes(); } /// Reduce a 64-bytes scalar to the field size. pub fn reduce64(s: [64]u8) CompressedScalar { - return ScalarDouble.fromBytes64(s).toBytes(); + var scalar = ScalarDouble.fromBytes64(s); + return scalar.toBytes(); } /// Perform the X25519 "clamping" operation. @@ -106,12 +108,14 @@ pub const Scalar = struct { /// Unpack a 32-byte representation of a scalar pub fn fromBytes(bytes: CompressedScalar) Scalar { - return ScalarDouble.fromBytes32(bytes).reduce(5); + var scalar = ScalarDouble.fromBytes32(bytes); + return scalar.reduce(5); } /// Unpack a 64-byte representation of a scalar pub fn fromBytes64(bytes: [64]u8) Scalar { - return ScalarDouble.fromBytes64(bytes).reduce(5); + var scalar = ScalarDouble.fromBytes64(bytes); + return scalar.reduce(5); } /// Pack a scalar into bytes diff --git a/lib/std/crypto/blake3.zig b/lib/std/crypto/blake3.zig index a31dcc814a..762ec67f31 100644 --- a/lib/std/crypto/blake3.zig +++ b/lib/std/crypto/blake3.zig @@ -679,9 +679,12 @@ fn testBlake3(hasher: *Blake3, input_len: usize, expected_hex: [262]u8) !void { } test "BLAKE3 reference test cases" { - var hash = &Blake3.init(.{}); - var keyed_hash = &Blake3.init(.{ .key = reference_test.key.* }); - var derive_key = &Blake3.initKdf(reference_test.context_string, .{}); + var hash_state = Blake3.init(.{}); + const hash = &hash_state; + var keyed_hash_state = Blake3.init(.{ .key = reference_test.key.* }); + const keyed_hash = &keyed_hash_state; + var derive_key_state = Blake3.initKdf(reference_test.context_string, .{}); + const derive_key = &derive_key_state; for (reference_test.cases) |t| { try testBlake3(hash, t.input_len, t.hash.*); diff --git a/lib/std/crypto/sha3.zig b/lib/std/crypto/sha3.zig index ccc0775df1..567b39bfc3 100644 --- a/lib/std/crypto/sha3.zig +++ b/lib/std/crypto/sha3.zig @@ -128,18 +128,16 @@ fn keccakF(comptime F: usize, d: *[F / 8]u8) void { r.* = mem.readIntLittle(u64, d[8 * i ..][0..8]); } - comptime var x: usize = 0; - comptime var y: usize = 0; for (RC[0..no_rounds]) |round| { // theta - x = 0; + comptime var x: usize = 0; inline while (x < 5) : (x += 1) { c[x] = s[x] ^ s[x + 5] ^ s[x + 10] ^ s[x + 15] ^ s[x + 20]; } x = 0; inline while (x < 5) : (x += 1) { t[0] = c[M5[x + 4]] ^ math.rotl(u64, c[M5[x + 1]], @as(usize, 1)); - y = 0; + comptime var y: usize = 0; inline while (y < 5) : (y += 1) { s[x + y * 5] ^= t[0]; } @@ -155,7 +153,7 @@ fn keccakF(comptime F: usize, d: *[F / 8]u8) void { } // chi - y = 0; + comptime var y: usize = 0; inline while (y < 5) : (y += 1) { x = 0; inline while (x < 5) : (x += 1) { diff --git a/lib/std/fmt.zig b/lib/std/fmt.zig index 20f1319d50..9afb556b5a 100644 --- a/lib/std/fmt.zig +++ b/lib/std/fmt.zig @@ -2123,6 +2123,7 @@ test "pointer" { try expectFmt("pointer: i32@deadbeef\n", "pointer: {}\n", .{value}); try expectFmt("pointer: i32@deadbeef\n", "pointer: {*}\n", .{value}); } + if (builtin.zig_backend != .stage1) return error.SkipZigTest; { const value = @intToPtr(fn () void, 0xdeadbeef); try expectFmt("pointer: fn() void@deadbeef\n", "pointer: {}\n", .{value}); diff --git a/lib/std/heap.zig b/lib/std/heap.zig index cbbe111a26..b71cdb7932 100644 --- a/lib/std/heap.zig +++ b/lib/std/heap.zig @@ -1210,7 +1210,8 @@ pub fn testAllocatorAlignedShrink(base_allocator: mem.Allocator) !void { const allocator = validationAllocator.allocator(); var debug_buffer: [1000]u8 = undefined; - const debug_allocator = FixedBufferAllocator.init(&debug_buffer).allocator(); + var fib = FixedBufferAllocator.init(&debug_buffer); + const debug_allocator = fib.allocator(); const alloc_size = mem.page_size * 2 + 50; var slice = try allocator.alignedAlloc(u8, 16, alloc_size); diff --git a/lib/std/heap/log_to_writer_allocator.zig b/lib/std/heap/log_to_writer_allocator.zig index c63c1a826f..15f1f30b40 100644 --- a/lib/std/heap/log_to_writer_allocator.zig +++ b/lib/std/heap/log_to_writer_allocator.zig @@ -91,7 +91,8 @@ test "LogToWriterAllocator" { var allocator_buf: [10]u8 = undefined; var fixedBufferAllocator = std.mem.validationWrap(std.heap.FixedBufferAllocator.init(&allocator_buf)); - const allocator = logToWriterAllocator(fixedBufferAllocator.allocator(), fbs.writer()).allocator(); + var allocator_state = logToWriterAllocator(fixedBufferAllocator.allocator(), fbs.writer()); + const allocator = allocator_state.allocator(); var a = try allocator.alloc(u8, 10); a = allocator.shrink(a, 5); diff --git a/lib/std/io/reader.zig b/lib/std/io/reader.zig index 6d216c71a5..16acef8e48 100644 --- a/lib/std/io/reader.zig +++ b/lib/std/io/reader.zig @@ -344,7 +344,8 @@ pub fn Reader( test "Reader" { var buf = "a\x02".*; - const reader = std.io.fixedBufferStream(&buf).reader(); + var fis = std.io.fixedBufferStream(&buf); + const reader = fis.reader(); try testing.expect((try reader.readByte()) == 'a'); try testing.expect((try reader.readEnum(enum(u8) { a = 0, @@ -356,13 +357,15 @@ test "Reader" { } test "Reader.isBytes" { - const reader = std.io.fixedBufferStream("foobar").reader(); + var fis = std.io.fixedBufferStream("foobar"); + const reader = fis.reader(); try testing.expectEqual(true, try reader.isBytes("foo")); try testing.expectEqual(false, try reader.isBytes("qux")); } test "Reader.skipBytes" { - const reader = std.io.fixedBufferStream("foobar").reader(); + var fis = std.io.fixedBufferStream("foobar"); + const reader = fis.reader(); try reader.skipBytes(3, .{}); try testing.expect(try reader.isBytes("bar")); try reader.skipBytes(0, .{}); @@ -374,7 +377,8 @@ test "Reader.readUntilDelimiterArrayList returns ArrayLists with bytes read unti var list = std.ArrayList(u8).init(a); defer list.deinit(); - const reader = std.io.fixedBufferStream("0000\n1234\n").reader(); + var fis = std.io.fixedBufferStream("0000\n1234\n"); + const reader = fis.reader(); try reader.readUntilDelimiterArrayList(&list, '\n', 5); try std.testing.expectEqualStrings("0000", list.items); @@ -388,7 +392,8 @@ test "Reader.readUntilDelimiterArrayList returns an empty ArrayList" { var list = std.ArrayList(u8).init(a); defer list.deinit(); - const reader = std.io.fixedBufferStream("\n").reader(); + var fis = std.io.fixedBufferStream("\n"); + const reader = fis.reader(); try reader.readUntilDelimiterArrayList(&list, '\n', 5); try std.testing.expectEqualStrings("", list.items); @@ -399,7 +404,8 @@ test "Reader.readUntilDelimiterArrayList returns StreamTooLong, then an ArrayLis var list = std.ArrayList(u8).init(a); defer list.deinit(); - const reader = std.io.fixedBufferStream("1234567\n").reader(); + var fis = std.io.fixedBufferStream("1234567\n"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterArrayList(&list, '\n', 5)); try std.testing.expectEqualStrings("12345", list.items); @@ -412,7 +418,8 @@ test "Reader.readUntilDelimiterArrayList returns EndOfStream" { var list = std.ArrayList(u8).init(a); defer list.deinit(); - const reader = std.io.fixedBufferStream("1234").reader(); + var fis = std.io.fixedBufferStream("1234"); + const reader = fis.reader(); try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiterArrayList(&list, '\n', 5)); try std.testing.expectEqualStrings("1234", list.items); @@ -421,7 +428,8 @@ test "Reader.readUntilDelimiterArrayList returns EndOfStream" { test "Reader.readUntilDelimiterAlloc returns ArrayLists with bytes read until the delimiter, then EndOfStream" { const a = std.testing.allocator; - const reader = std.io.fixedBufferStream("0000\n1234\n").reader(); + var fis = std.io.fixedBufferStream("0000\n1234\n"); + const reader = fis.reader(); { var result = try reader.readUntilDelimiterAlloc(a, '\n', 5); @@ -441,7 +449,8 @@ test "Reader.readUntilDelimiterAlloc returns ArrayLists with bytes read until th test "Reader.readUntilDelimiterAlloc returns an empty ArrayList" { const a = std.testing.allocator; - const reader = std.io.fixedBufferStream("\n").reader(); + var fis = std.io.fixedBufferStream("\n"); + const reader = fis.reader(); { var result = try reader.readUntilDelimiterAlloc(a, '\n', 5); @@ -453,7 +462,8 @@ test "Reader.readUntilDelimiterAlloc returns an empty ArrayList" { test "Reader.readUntilDelimiterAlloc returns StreamTooLong, then an ArrayList with bytes read until the delimiter" { const a = std.testing.allocator; - const reader = std.io.fixedBufferStream("1234567\n").reader(); + var fis = std.io.fixedBufferStream("1234567\n"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterAlloc(a, '\n', 5)); @@ -465,67 +475,77 @@ test "Reader.readUntilDelimiterAlloc returns StreamTooLong, then an ArrayList wi test "Reader.readUntilDelimiterAlloc returns EndOfStream" { const a = std.testing.allocator; - const reader = std.io.fixedBufferStream("1234").reader(); + var fis = std.io.fixedBufferStream("1234"); + const reader = fis.reader(); try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiterAlloc(a, '\n', 5)); } test "Reader.readUntilDelimiter returns bytes read until the delimiter" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("0000\n1234\n").reader(); + var fis = std.io.fixedBufferStream("0000\n1234\n"); + const reader = fis.reader(); try std.testing.expectEqualStrings("0000", try reader.readUntilDelimiter(&buf, '\n')); try std.testing.expectEqualStrings("1234", try reader.readUntilDelimiter(&buf, '\n')); } test "Reader.readUntilDelimiter returns an empty string" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("\n").reader(); + var fis = std.io.fixedBufferStream("\n"); + const reader = fis.reader(); try std.testing.expectEqualStrings("", try reader.readUntilDelimiter(&buf, '\n')); } test "Reader.readUntilDelimiter returns StreamTooLong, then an empty string" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("12345\n").reader(); + var fis = std.io.fixedBufferStream("12345\n"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiter(&buf, '\n')); try std.testing.expectEqualStrings("", try reader.readUntilDelimiter(&buf, '\n')); } test "Reader.readUntilDelimiter returns StreamTooLong, then bytes read until the delimiter" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("1234567\n").reader(); + var fis = std.io.fixedBufferStream("1234567\n"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiter(&buf, '\n')); try std.testing.expectEqualStrings("67", try reader.readUntilDelimiter(&buf, '\n')); } test "Reader.readUntilDelimiter returns EndOfStream" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("").reader(); + var fis = std.io.fixedBufferStream(""); + const reader = fis.reader(); try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiter(&buf, '\n')); } test "Reader.readUntilDelimiter returns bytes read until delimiter, then EndOfStream" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("1234\n").reader(); + var fis = std.io.fixedBufferStream("1234\n"); + const reader = fis.reader(); try std.testing.expectEqualStrings("1234", try reader.readUntilDelimiter(&buf, '\n')); try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiter(&buf, '\n')); } test "Reader.readUntilDelimiter returns EndOfStream" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("1234").reader(); + var fis = std.io.fixedBufferStream("1234"); + const reader = fis.reader(); try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiter(&buf, '\n')); } test "Reader.readUntilDelimiter returns StreamTooLong, then EndOfStream" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("12345").reader(); + var fis = std.io.fixedBufferStream("12345"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiter(&buf, '\n')); try std.testing.expectError(error.EndOfStream, reader.readUntilDelimiter(&buf, '\n')); } test "Reader.readUntilDelimiter writes all bytes read to the output buffer" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("0000\n12345").reader(); + var fis = std.io.fixedBufferStream("0000\n12345"); + const reader = fis.reader(); _ = try reader.readUntilDelimiter(&buf, '\n'); try std.testing.expectEqualStrings("0000\n", &buf); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiter(&buf, '\n')); @@ -535,7 +555,8 @@ test "Reader.readUntilDelimiter writes all bytes read to the output buffer" { test "Reader.readUntilDelimiterOrEofAlloc returns ArrayLists with bytes read until the delimiter, then EndOfStream" { const a = std.testing.allocator; - const reader = std.io.fixedBufferStream("0000\n1234\n").reader(); + var fis = std.io.fixedBufferStream("0000\n1234\n"); + const reader = fis.reader(); { var result = (try reader.readUntilDelimiterOrEofAlloc(a, '\n', 5)).?; @@ -555,7 +576,8 @@ test "Reader.readUntilDelimiterOrEofAlloc returns ArrayLists with bytes read unt test "Reader.readUntilDelimiterOrEofAlloc returns an empty ArrayList" { const a = std.testing.allocator; - const reader = std.io.fixedBufferStream("\n").reader(); + var fis = std.io.fixedBufferStream("\n"); + const reader = fis.reader(); { var result = (try reader.readUntilDelimiterOrEofAlloc(a, '\n', 5)).?; @@ -567,7 +589,8 @@ test "Reader.readUntilDelimiterOrEofAlloc returns an empty ArrayList" { test "Reader.readUntilDelimiterOrEofAlloc returns StreamTooLong, then an ArrayList with bytes read until the delimiter" { const a = std.testing.allocator; - const reader = std.io.fixedBufferStream("1234567\n").reader(); + var fis = std.io.fixedBufferStream("1234567\n"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEofAlloc(a, '\n', 5)); @@ -578,60 +601,69 @@ test "Reader.readUntilDelimiterOrEofAlloc returns StreamTooLong, then an ArrayLi test "Reader.readUntilDelimiterOrEof returns bytes read until the delimiter" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("0000\n1234\n").reader(); + var fis = std.io.fixedBufferStream("0000\n1234\n"); + const reader = fis.reader(); try std.testing.expectEqualStrings("0000", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?); try std.testing.expectEqualStrings("1234", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?); } test "Reader.readUntilDelimiterOrEof returns an empty string" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("\n").reader(); + var fis = std.io.fixedBufferStream("\n"); + const reader = fis.reader(); try std.testing.expectEqualStrings("", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?); } test "Reader.readUntilDelimiterOrEof returns StreamTooLong, then an empty string" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("12345\n").reader(); + var fis = std.io.fixedBufferStream("12345\n"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEof(&buf, '\n')); try std.testing.expectEqualStrings("", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?); } test "Reader.readUntilDelimiterOrEof returns StreamTooLong, then bytes read until the delimiter" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("1234567\n").reader(); + var fis = std.io.fixedBufferStream("1234567\n"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEof(&buf, '\n')); try std.testing.expectEqualStrings("67", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?); } test "Reader.readUntilDelimiterOrEof returns null" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("").reader(); + var fis = std.io.fixedBufferStream(""); + const reader = fis.reader(); try std.testing.expect((try reader.readUntilDelimiterOrEof(&buf, '\n')) == null); } test "Reader.readUntilDelimiterOrEof returns bytes read until delimiter, then null" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("1234\n").reader(); + var fis = std.io.fixedBufferStream("1234\n"); + const reader = fis.reader(); try std.testing.expectEqualStrings("1234", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?); try std.testing.expect((try reader.readUntilDelimiterOrEof(&buf, '\n')) == null); } test "Reader.readUntilDelimiterOrEof returns bytes read until end-of-stream" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("1234").reader(); + var fis = std.io.fixedBufferStream("1234"); + const reader = fis.reader(); try std.testing.expectEqualStrings("1234", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?); } test "Reader.readUntilDelimiterOrEof returns StreamTooLong, then bytes read until end-of-stream" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("1234567").reader(); + var fis = std.io.fixedBufferStream("1234567"); + const reader = fis.reader(); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEof(&buf, '\n')); try std.testing.expectEqualStrings("67", (try reader.readUntilDelimiterOrEof(&buf, '\n')).?); } test "Reader.readUntilDelimiterOrEof writes all bytes read to the output buffer" { var buf: [5]u8 = undefined; - const reader = std.io.fixedBufferStream("0000\n12345").reader(); + var fis = std.io.fixedBufferStream("0000\n12345"); + const reader = fis.reader(); _ = try reader.readUntilDelimiterOrEof(&buf, '\n'); try std.testing.expectEqualStrings("0000\n", &buf); try std.testing.expectError(error.StreamTooLong, reader.readUntilDelimiterOrEof(&buf, '\n')); diff --git a/lib/std/json.zig b/lib/std/json.zig index 2650b98822..cc82c1d0a5 100644 --- a/lib/std/json.zig +++ b/lib/std/json.zig @@ -1506,42 +1506,46 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void { } test "skipValue" { - try skipValue(&TokenStream.init("false")); - try skipValue(&TokenStream.init("true")); - try skipValue(&TokenStream.init("null")); - try skipValue(&TokenStream.init("42")); - try skipValue(&TokenStream.init("42.0")); - try skipValue(&TokenStream.init("\"foo\"")); - try skipValue(&TokenStream.init("[101, 111, 121]")); - try skipValue(&TokenStream.init("{}")); - try skipValue(&TokenStream.init("{\"foo\": \"bar\"}")); + var ts = TokenStream.init("false"); + try skipValue(&ts); + ts = TokenStream.init("true"); + try skipValue(&ts); + ts = TokenStream.init("null"); + try skipValue(&ts); + ts = TokenStream.init("42"); + try skipValue(&ts); + ts = TokenStream.init("42.0"); + try skipValue(&ts); + ts = TokenStream.init("\"foo\""); + try skipValue(&ts); + ts = TokenStream.init("[101, 111, 121]"); + try skipValue(&ts); + ts = TokenStream.init("{}"); + try skipValue(&ts); + ts = TokenStream.init("{\"foo\": \"bar\"}"); + try skipValue(&ts); { // An absurd number of nestings const nestings = StreamingParser.default_max_nestings + 1; - try testing.expectError( - error.TooManyNestedItems, - skipValue(&TokenStream.init("[" ** nestings ++ "]" ** nestings)), - ); + ts = TokenStream.init("[" ** nestings ++ "]" ** nestings); + try testing.expectError(error.TooManyNestedItems, skipValue(&ts)); } { // Would a number token cause problems in a deeply-nested array? const nestings = StreamingParser.default_max_nestings; const deeply_nested_array = "[" ** nestings ++ "0.118, 999, 881.99, 911.9, 725, 3" ++ "]" ** nestings; - try skipValue(&TokenStream.init(deeply_nested_array)); + ts = TokenStream.init(deeply_nested_array); + try skipValue(&ts); - try testing.expectError( - error.TooManyNestedItems, - skipValue(&TokenStream.init("[" ++ deeply_nested_array ++ "]")), - ); + ts = TokenStream.init("[" ++ deeply_nested_array ++ "]"); + try testing.expectError(error.TooManyNestedItems, skipValue(&ts)); } // Mismatched brace/square bracket - try testing.expectError( - error.UnexpectedClosingBrace, - skipValue(&TokenStream.init("[102, 111, 111}")), - ); + ts = TokenStream.init("[102, 111, 111}"); + try testing.expectError(error.UnexpectedClosingBrace, skipValue(&ts)); { // should fail if no value found (e.g. immediate close of object) var empty_object = TokenStream.init("{}"); @@ -1980,18 +1984,29 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { } test "parse" { - try testing.expectEqual(false, try parse(bool, &TokenStream.init("false"), ParseOptions{})); - try testing.expectEqual(true, try parse(bool, &TokenStream.init("true"), ParseOptions{})); - try testing.expectEqual(@as(u1, 1), try parse(u1, &TokenStream.init("1"), ParseOptions{})); - try testing.expectError(error.Overflow, parse(u1, &TokenStream.init("50"), ParseOptions{})); - try testing.expectEqual(@as(u64, 42), try parse(u64, &TokenStream.init("42"), ParseOptions{})); - try testing.expectEqual(@as(f64, 42), try parse(f64, &TokenStream.init("42.0"), ParseOptions{})); - try testing.expectEqual(@as(?bool, null), try parse(?bool, &TokenStream.init("null"), ParseOptions{})); - try testing.expectEqual(@as(?bool, true), try parse(?bool, &TokenStream.init("true"), ParseOptions{})); + var ts = TokenStream.init("false"); + try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{})); + ts = TokenStream.init("true"); + try testing.expectEqual(true, try parse(bool, &ts, ParseOptions{})); + ts = TokenStream.init("1"); + try testing.expectEqual(@as(u1, 1), try parse(u1, &ts, ParseOptions{})); + ts = TokenStream.init("50"); + try testing.expectError(error.Overflow, parse(u1, &ts, ParseOptions{})); + ts = TokenStream.init("42"); + try testing.expectEqual(@as(u64, 42), try parse(u64, &ts, ParseOptions{})); + ts = TokenStream.init("42.0"); + try testing.expectEqual(@as(f64, 42), try parse(f64, &ts, ParseOptions{})); + ts = TokenStream.init("null"); + try testing.expectEqual(@as(?bool, null), try parse(?bool, &ts, ParseOptions{})); + ts = TokenStream.init("true"); + try testing.expectEqual(@as(?bool, true), try parse(?bool, &ts, ParseOptions{})); - try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &TokenStream.init("\"foo\""), ParseOptions{})); - try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &TokenStream.init("[102, 111, 111]"), ParseOptions{})); - try testing.expectEqual(@as([0]u8, undefined), try parse([0]u8, &TokenStream.init("[]"), ParseOptions{})); + ts = TokenStream.init("\"foo\""); + try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{})); + ts = TokenStream.init("[102, 111, 111]"); + try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{})); + ts = TokenStream.init("[]"); + try testing.expectEqual(@as([0]u8, undefined), try parse([0]u8, &ts, ParseOptions{})); } test "parse into enum" { @@ -2000,36 +2015,48 @@ test "parse into enum" { Bar, @"with\\escape", }; - try testing.expectEqual(@as(T, .Foo), try parse(T, &TokenStream.init("\"Foo\""), ParseOptions{})); - try testing.expectEqual(@as(T, .Foo), try parse(T, &TokenStream.init("42"), ParseOptions{})); - try testing.expectEqual(@as(T, .@"with\\escape"), try parse(T, &TokenStream.init("\"with\\\\escape\""), ParseOptions{})); - try testing.expectError(error.InvalidEnumTag, parse(T, &TokenStream.init("5"), ParseOptions{})); - try testing.expectError(error.InvalidEnumTag, parse(T, &TokenStream.init("\"Qux\""), ParseOptions{})); + var ts = TokenStream.init("\"Foo\""); + try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{})); + ts = TokenStream.init("42"); + try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{})); + ts = TokenStream.init("\"with\\\\escape\""); + try testing.expectEqual(@as(T, .@"with\\escape"), try parse(T, &ts, ParseOptions{})); + ts = TokenStream.init("5"); + try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{})); + ts = TokenStream.init("\"Qux\""); + try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{})); } test "parse with trailing data" { - try testing.expectEqual(false, try parse(bool, &TokenStream.init("falsed"), ParseOptions{ .allow_trailing_data = true })); - try testing.expectError(error.InvalidTopLevelTrailing, parse(bool, &TokenStream.init("falsed"), ParseOptions{ .allow_trailing_data = false })); + var ts = TokenStream.init("falsed"); + try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = true })); + ts = TokenStream.init("falsed"); + try testing.expectError(error.InvalidTopLevelTrailing, parse(bool, &ts, ParseOptions{ .allow_trailing_data = false })); // trailing whitespace is okay - try testing.expectEqual(false, try parse(bool, &TokenStream.init("false \n"), ParseOptions{ .allow_trailing_data = false })); + ts = TokenStream.init("false \n"); + try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = false })); } test "parse into that allocates a slice" { - try testing.expectError(error.AllocatorRequired, parse([]u8, &TokenStream.init("\"foo\""), ParseOptions{})); + var ts = TokenStream.init("\"foo\""); + try testing.expectError(error.AllocatorRequired, parse([]u8, &ts, ParseOptions{})); const options = ParseOptions{ .allocator = testing.allocator }; { - const r = try parse([]u8, &TokenStream.init("\"foo\""), options); + ts = TokenStream.init("\"foo\""); + const r = try parse([]u8, &ts, options); defer parseFree([]u8, r, options); try testing.expectEqualSlices(u8, "foo", r); } { - const r = try parse([]u8, &TokenStream.init("[102, 111, 111]"), options); + ts = TokenStream.init("[102, 111, 111]"); + const r = try parse([]u8, &ts, options); defer parseFree([]u8, r, options); try testing.expectEqualSlices(u8, "foo", r); } { - const r = try parse([]u8, &TokenStream.init("\"with\\\\escape\""), options); + ts = TokenStream.init("\"with\\\\escape\""); + const r = try parse([]u8, &ts, options); defer parseFree([]u8, r, options); try testing.expectEqualSlices(u8, "with\\escape", r); } @@ -2042,7 +2069,8 @@ test "parse into tagged union" { float: f64, string: []const u8, }; - try testing.expectEqual(T{ .float = 1.5 }, try parse(T, &TokenStream.init("1.5"), ParseOptions{})); + var ts = TokenStream.init("1.5"); + try testing.expectEqual(T{ .float = 1.5 }, try parse(T, &ts, ParseOptions{})); } { // failing allocations should be bubbled up instantly without trying next member @@ -2053,7 +2081,8 @@ test "parse into tagged union" { string: []const u8, array: [3]u8, }; - try testing.expectError(error.OutOfMemory, parse(T, &TokenStream.init("[1,2,3]"), options)); + var ts = TokenStream.init("[1,2,3]"); + try testing.expectError(error.OutOfMemory, parse(T, &ts, options)); } { @@ -2062,7 +2091,8 @@ test "parse into tagged union" { x: u8, y: u8, }; - try testing.expectEqual(T{ .x = 42 }, try parse(T, &TokenStream.init("42"), ParseOptions{})); + var ts = TokenStream.init("42"); + try testing.expectEqual(T{ .x = 42 }, try parse(T, &ts, ParseOptions{})); } { // needs to back out when first union member doesn't match @@ -2070,7 +2100,8 @@ test "parse into tagged union" { A: struct { x: u32 }, B: struct { y: u32 }, }; - try testing.expectEqual(T{ .B = .{ .y = 42 } }, try parse(T, &TokenStream.init("{\"y\":42}"), ParseOptions{})); + var ts = TokenStream.init("{\"y\":42}"); + try testing.expectEqual(T{ .B = .{ .y = 42 } }, try parse(T, &ts, ParseOptions{})); } } @@ -2080,7 +2111,8 @@ test "parse union bubbles up AllocatorRequired" { string: []const u8, int: i32, }; - try testing.expectError(error.AllocatorRequired, parse(T, &TokenStream.init("42"), ParseOptions{})); + var ts = TokenStream.init("42"); + try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{})); } { // string member not first in union (and matching) @@ -2089,7 +2121,8 @@ test "parse union bubbles up AllocatorRequired" { float: f64, string: []const u8, }; - try testing.expectError(error.AllocatorRequired, parse(T, &TokenStream.init("\"foo\""), ParseOptions{})); + var ts = TokenStream.init("\"foo\""); + try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{})); } } @@ -2102,7 +2135,8 @@ test "parseFree descends into tagged union" { string: []const u8, }; // use a string with unicode escape so we know result can't be a reference to global constant - const r = try parse(T, &TokenStream.init("\"with\\u0105unicode\""), options); + var ts = TokenStream.init("\"with\\u0105unicode\""); + const r = try parse(T, &ts, options); try testing.expectEqual(std.meta.Tag(T).string, @as(std.meta.Tag(T), r)); try testing.expectEqualSlices(u8, "withÄ…unicode", r.string); try testing.expectEqual(@as(usize, 0), fail_alloc.deallocations); @@ -2116,12 +2150,13 @@ test "parse with comptime field" { comptime a: i32 = 0, b: bool, }; - try testing.expectEqual(T{ .a = 0, .b = true }, try parse(T, &TokenStream.init( + var ts = TokenStream.init( \\{ \\ "a": 0, \\ "b": true \\} - ), ParseOptions{})); + ); + try testing.expectEqual(T{ .a = 0, .b = true }, try parse(T, &ts, ParseOptions{})); } { // string comptime values currently require an allocator @@ -2140,12 +2175,13 @@ test "parse with comptime field" { .allocator = std.testing.allocator, }; - const r = try parse(T, &TokenStream.init( + var ts = TokenStream.init( \\{ \\ "kind": "float", \\ "b": 1.0 \\} - ), options); + ); + const r = try parse(T, &ts, options); // check that parseFree doesn't try to free comptime fields parseFree(T, r, options); @@ -2154,7 +2190,8 @@ test "parse with comptime field" { test "parse into struct with no fields" { const T = struct {}; - try testing.expectEqual(T{}, try parse(T, &TokenStream.init("{}"), ParseOptions{})); + var ts = TokenStream.init("{}"); + try testing.expectEqual(T{}, try parse(T, &ts, ParseOptions{})); } test "parse into struct with misc fields" { @@ -2186,7 +2223,7 @@ test "parse into struct with misc fields" { string: []const u8, }; }; - const r = try parse(T, &TokenStream.init( + var ts = TokenStream.init( \\{ \\ "int": 420, \\ "float": 3.14, @@ -2208,7 +2245,8 @@ test "parse into struct with misc fields" { \\ ], \\ "a_union": 100000 \\} - ), options); + ); + const r = try parse(T, &ts, options); defer parseFree(T, r, options); try testing.expectEqual(@as(i64, 420), r.int); try testing.expectEqual(@as(f64, 3.14), r.float); @@ -2239,14 +2277,15 @@ test "parse into struct with strings and arrays with sentinels" { data: [:99]const i32, simple_data: []const i32, }; - const r = try parse(T, &TokenStream.init( + var ts = TokenStream.init( \\{ \\ "language": "zig", \\ "language_without_sentinel": "zig again!", \\ "data": [1, 2, 3], \\ "simple_data": [4, 5, 6] \\} - ), options); + ); + const r = try parse(T, &ts, options); defer parseFree(T, r, options); try testing.expectEqualSentinel(u8, 0, "zig", r.language); @@ -2275,19 +2314,25 @@ test "parse into struct with duplicate field" { const T1 = struct { a: *u64 }; // both .UseFirst and .UseLast should fail because second "a" value isn't a u64 - try testing.expectError(error.InvalidNumber, parse(T1, &TokenStream.init(str), options_first)); - try testing.expectError(error.InvalidNumber, parse(T1, &TokenStream.init(str), options_last)); + var ts = TokenStream.init(str); + try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_first)); + ts = TokenStream.init(str); + try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_last)); const T2 = struct { a: f64 }; - try testing.expectEqual(T2{ .a = 1.0 }, try parse(T2, &TokenStream.init(str), options_first)); - try testing.expectEqual(T2{ .a = 0.25 }, try parse(T2, &TokenStream.init(str), options_last)); + ts = TokenStream.init(str); + try testing.expectEqual(T2{ .a = 1.0 }, try parse(T2, &ts, options_first)); + ts = TokenStream.init(str); + try testing.expectEqual(T2{ .a = 0.25 }, try parse(T2, &ts, options_last)); const T3 = struct { comptime a: f64 = 1.0 }; // .UseFirst should succeed because second "a" value is unconditionally ignored (even though != 1.0) const t3 = T3{ .a = 1.0 }; - try testing.expectEqual(t3, try parse(T3, &TokenStream.init(str), options_first)); + ts = TokenStream.init(str); + try testing.expectEqual(t3, try parse(T3, &ts, options_first)); // .UseLast should fail because second "a" value is 0.25 which is not equal to default value of 1.0 - try testing.expectError(error.UnexpectedValue, parse(T3, &TokenStream.init(str), options_last)); + ts = TokenStream.init(str); + try testing.expectError(error.UnexpectedValue, parse(T3, &ts, options_last)); } test "parse into struct ignoring unknown fields" { @@ -2301,7 +2346,7 @@ test "parse into struct ignoring unknown fields" { .ignore_unknown_fields = true, }; - const r = try parse(T, &std.json.TokenStream.init( + var ts = TokenStream.init( \\{ \\ "int": 420, \\ "float": 3.14, @@ -2323,7 +2368,8 @@ test "parse into struct ignoring unknown fields" { \\ "a_union": 100000, \\ "language": "zig" \\} - ), ops); + ); + const r = try parse(T, &ts, ops); defer parseFree(T, r, ops); try testing.expectEqual(@as(i64, 420), r.int); @@ -2341,7 +2387,8 @@ test "parse into recursive union definition" { }; const ops = ParseOptions{ .allocator = testing.allocator }; - const r = try parse(T, &std.json.TokenStream.init("{\"values\":[58]}"), ops); + var ts = TokenStream.init("{\"values\":[58]}"); + const r = try parse(T, &ts, ops); defer parseFree(T, r, ops); try testing.expectEqual(@as(i64, 58), r.values.array[0].integer); @@ -2363,7 +2410,8 @@ test "parse into double recursive union definition" { }; const ops = ParseOptions{ .allocator = testing.allocator }; - const r = try parse(T, &std.json.TokenStream.init("{\"values\":[[58]]}"), ops); + var ts = TokenStream.init("{\"values\":[[58]]}"); + const r = try parse(T, &ts, ops); defer parseFree(T, r, ops); try testing.expectEqual(@as(i64, 58), r.values.array[0].array[0].integer); @@ -2806,10 +2854,13 @@ test "integer after float has proper type" { test "parse exponential into int" { const T = struct { int: i64 }; - const r = try parse(T, &TokenStream.init("{ \"int\": 4.2e2 }"), ParseOptions{}); + var ts = TokenStream.init("{ \"int\": 4.2e2 }"); + const r = try parse(T, &ts, ParseOptions{}); try testing.expectEqual(@as(i64, 420), r.int); - try testing.expectError(error.InvalidNumber, parse(T, &TokenStream.init("{ \"int\": 0.042e2 }"), ParseOptions{})); - try testing.expectError(error.Overflow, parse(T, &TokenStream.init("{ \"int\": 18446744073709551616.0 }"), ParseOptions{})); + ts = TokenStream.init("{ \"int\": 0.042e2 }"); + try testing.expectError(error.InvalidNumber, parse(T, &ts, ParseOptions{})); + ts = TokenStream.init("{ \"int\": 18446744073709551616.0 }"); + try testing.expectError(error.Overflow, parse(T, &ts, ParseOptions{})); } test "escaped characters" { @@ -2858,10 +2909,12 @@ test "string copy option" { defer arena_allocator.deinit(); const allocator = arena_allocator.allocator(); - const tree_nocopy = try Parser.init(allocator, false).parse(input); + var parser = Parser.init(allocator, false); + const tree_nocopy = try parser.parse(input); const obj_nocopy = tree_nocopy.root.Object; - const tree_copy = try Parser.init(allocator, true).parse(input); + parser = Parser.init(allocator, true); + const tree_copy = try parser.parse(input); const obj_copy = tree_copy.root.Object; for ([_][]const u8{ "noescape", "simple", "unicode", "surrogatepair" }) |field_name| { @@ -3376,14 +3429,12 @@ test "stringify null optional fields" { StringifyOptions{ .emit_null_optional_fields = false }, ); - try std.testing.expect(try parsesTo( - MyStruct, - MyStruct{}, - &TokenStream.init( - \\{"required":"something","another_required":"something else"} - ), - .{ .allocator = std.testing.allocator }, - )); + var ts = TokenStream.init( + \\{"required":"something","another_required":"something else"} + ); + try std.testing.expect(try parsesTo(MyStruct, MyStruct{}, &ts, .{ + .allocator = std.testing.allocator, + })); } // Same as `stringify` but accepts an Allocator and stores result in dynamically allocated memory instead of using a Writer. diff --git a/lib/std/meta.zig b/lib/std/meta.zig index 0de51bfa68..c6717ad1c0 100644 --- a/lib/std/meta.zig +++ b/lib/std/meta.zig @@ -311,7 +311,10 @@ pub fn assumeSentinel(p: anytype, comptime sentinel_val: Elem(@TypeOf(p))) Senti const ReturnType = Sentinel(T, sentinel_val); switch (@typeInfo(T)) { .Pointer => |info| switch (info.size) { - .Slice => return @bitCast(ReturnType, p), + .Slice => if (@import("builtin").zig_backend == .stage1) + return @bitCast(ReturnType, p) + else + return @ptrCast(ReturnType, p), .Many, .One => return @ptrCast(ReturnType, p), .C => {}, }, diff --git a/lib/std/net.zig b/lib/std/net.zig index 0853a08c53..2bd3e6cfb1 100644 --- a/lib/std/net.zig +++ b/lib/std/net.zig @@ -1141,18 +1141,20 @@ fn linuxLookupNameFromHosts( }; defer file.close(); - const stream = std.io.bufferedReader(file.reader()).reader(); + var buffered_reader = std.io.bufferedReader(file.reader()); + const reader = buffered_reader.reader(); var line_buf: [512]u8 = undefined; - while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) { + while (reader.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) { error.StreamTooLong => blk: { - // Skip to the delimiter in the stream, to fix parsing - try stream.skipUntilDelimiterOrEof('\n'); + // Skip to the delimiter in the reader, to fix parsing + try reader.skipUntilDelimiterOrEof('\n'); // Use the truncated line. A truncated comment or hostname will be handled correctly. break :blk &line_buf; }, else => |e| return e, }) |line| { - const no_comment_line = mem.split(u8, line, "#").next().?; + var split_it = mem.split(u8, line, "#"); + const no_comment_line = split_it.next().?; var line_it = mem.tokenize(u8, no_comment_line, " \t"); const ip_text = line_it.next() orelse continue; diff --git a/lib/std/zig/c_translation.zig b/lib/std/zig/c_translation.zig index 494fa5ecae..b76b7078dc 100644 --- a/lib/std/zig/c_translation.zig +++ b/lib/std/zig/c_translation.zig @@ -8,10 +8,19 @@ pub fn cast(comptime DestType: type, target: anytype) DestType { // this function should behave like transCCast in translate-c, except it's for macros const SourceType = @TypeOf(target); switch (@typeInfo(DestType)) { - .Fn, .Pointer => return castToPtr(DestType, SourceType, target), + .Fn => if (@import("builtin").zig_backend == .stage1) + return castToPtr(DestType, SourceType, target) + else + return castToPtr(*const DestType, SourceType, target), + .Pointer => return castToPtr(DestType, SourceType, target), .Optional => |dest_opt| { - if (@typeInfo(dest_opt.child) == .Pointer or @typeInfo(dest_opt.child) == .Fn) { + if (@typeInfo(dest_opt.child) == .Pointer) { return castToPtr(DestType, SourceType, target); + } else if (@typeInfo(dest_opt.child) == .Fn) { + if (@import("builtin").zig_backend == .stage1) + return castToPtr(DestType, SourceType, target) + else + return castToPtr(?*const dest_opt.child, SourceType, target); } }, .Int => { @@ -124,7 +133,10 @@ test "cast" { try testing.expect(cast(?*anyopaque, -1) == @intToPtr(?*anyopaque, @bitCast(usize, @as(isize, -1)))); try testing.expect(cast(?*anyopaque, foo) == @intToPtr(?*anyopaque, @bitCast(usize, @as(isize, -1)))); - const FnPtr = ?fn (*anyopaque) void; + const FnPtr = if (@import("builtin").zig_backend == .stage1) + ?fn (*anyopaque) void + else + ?*const fn (*anyopaque) void; try testing.expect(cast(FnPtr, 0) == @intToPtr(FnPtr, @as(usize, 0))); try testing.expect(cast(FnPtr, foo) == @intToPtr(FnPtr, @bitCast(usize, @as(isize, -1)))); } @@ -135,9 +147,14 @@ pub fn sizeof(target: anytype) usize { switch (@typeInfo(T)) { .Float, .Int, .Struct, .Union, .Array, .Bool, .Vector => return @sizeOf(T), .Fn => { - // sizeof(main) returns 1, sizeof(&main) returns pointer size. - // We cannot distinguish those types in Zig, so use pointer size. - return @sizeOf(T); + if (@import("builtin").zig_backend == .stage1) { + // sizeof(main) returns 1, sizeof(&main) returns pointer size. + // We cannot distinguish those types in Zig, so use pointer size. + return @sizeOf(T); + } + + // sizeof(main) in C returns 1 + return 1; }, .Null => return @sizeOf(*anyopaque), .Void => { @@ -233,7 +250,12 @@ test "sizeof" { try testing.expect(sizeof(*const *const [4:0]u8) == ptr_size); try testing.expect(sizeof(*const [4]u8) == ptr_size); - try testing.expect(sizeof(sizeof) == @sizeOf(@TypeOf(sizeof))); + if (@import("builtin").zig_backend == .stage1) { + try testing.expect(sizeof(sizeof) == @sizeOf(@TypeOf(sizeof))); + } else if (false) { // TODO + try testing.expect(sizeof(&sizeof) == @sizeOf(@TypeOf(&sizeof))); + try testing.expect(sizeof(sizeof) == 1); + } try testing.expect(sizeof(void) == 1); try testing.expect(sizeof(anyopaque) == 1); From 50a6b0f3acb2a17f74d57301dbf3d4b13e30953b Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 3 Jun 2022 23:49:16 +0300 Subject: [PATCH 11/11] Sema: fix function type callconv inference --- src/Sema.zig | 6 +++++- .../runtime_indexing_comptime_array.zig | 12 ++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/Sema.zig b/src/Sema.zig index d3ca6a8cc5..593b299833 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -6654,7 +6654,11 @@ fn zirFunc( src_locs = sema.code.extraData(Zir.Inst.Func.SrcLocs, extra_index).data; } - const cc: std.builtin.CallingConvention = if (sema.owner_decl.is_exported) + // If this instruction has a body it means it's the type of the `owner_decl` + // otherwise it's a function type without a `callconv` attribute and should + // never be `.C`. + // NOTE: revisit when doing #1717 + const cc: std.builtin.CallingConvention = if (sema.owner_decl.is_exported and has_body) .C else .Unspecified; diff --git a/test/cases/compile_errors/runtime_indexing_comptime_array.zig b/test/cases/compile_errors/runtime_indexing_comptime_array.zig index da603d3630..3acd0feb61 100644 --- a/test/cases/compile_errors/runtime_indexing_comptime_array.zig +++ b/test/cases/compile_errors/runtime_indexing_comptime_array.zig @@ -23,9 +23,9 @@ pub export fn entry3() void { // error // backend=stage2,llvm // -// :6:33: error: values of type '[2]fn() callconv(.C) void' must be comptime known, but index value is runtime known -// :6:33: note: use '*const fn() callconv(.C) void' for a function pointer type -// :13:33: error: values of type '[2]fn() callconv(.C) void' must be comptime known, but index value is runtime known -// :13:33: note: use '*const fn() callconv(.C) void' for a function pointer type -// :19:33: error: values of type '[2]fn() callconv(.C) void' must be comptime known, but index value is runtime known -// :19:33: note: use '*const fn() callconv(.C) void' for a function pointer type +// :6:5: error: values of type '[2]fn() void' must be comptime known, but index value is runtime known +// :6:5: note: use '*const fn() void' for a function pointer type +// :13:5: error: values of type '[2]fn() void' must be comptime known, but index value is runtime known +// :13:5: note: use '*const fn() void' for a function pointer type +// :19:5: error: values of type '[2]fn() void' must be comptime known, but index value is runtime known +// :19:5: note: use '*const fn() void' for a function pointer type