diff --git a/CMakeLists.txt b/CMakeLists.txt
index 721690e9dc..d435092723 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -416,8 +416,8 @@ set(ZIG_CPP_SOURCES
set(ZIG_STD_FILES
"array_list.zig"
"atomic/index.zig"
- "atomic/stack.zig"
"atomic/queue.zig"
+ "atomic/stack.zig"
"base64.zig"
"buf_map.zig"
"buf_set.zig"
@@ -427,13 +427,13 @@ set(ZIG_STD_FILES
"c/index.zig"
"c/linux.zig"
"c/windows.zig"
+ "crypto/blake2.zig"
+ "crypto/hmac.zig"
"crypto/index.zig"
"crypto/md5.zig"
"crypto/sha1.zig"
"crypto/sha2.zig"
"crypto/sha3.zig"
- "crypto/blake2.zig"
- "crypto/hmac.zig"
"cstr.zig"
"debug/failing_allocator.zig"
"debug/index.zig"
@@ -445,15 +445,16 @@ set(ZIG_STD_FILES
"fmt/errol/index.zig"
"fmt/errol/lookup.zig"
"fmt/index.zig"
- "hash_map.zig"
- "hash/index.zig"
"hash/adler.zig"
"hash/crc.zig"
"hash/fnv.zig"
+ "hash/index.zig"
"hash/siphash.zig"
+ "hash_map.zig"
"heap.zig"
"index.zig"
"io.zig"
+ "json.zig"
"linked_list.zig"
"macho.zig"
"math/acos.zig"
@@ -465,6 +466,28 @@ set(ZIG_STD_FILES
"math/atanh.zig"
"math/cbrt.zig"
"math/ceil.zig"
+ "math/complex/abs.zig"
+ "math/complex/acos.zig"
+ "math/complex/acosh.zig"
+ "math/complex/arg.zig"
+ "math/complex/asin.zig"
+ "math/complex/asinh.zig"
+ "math/complex/atan.zig"
+ "math/complex/atanh.zig"
+ "math/complex/conj.zig"
+ "math/complex/cos.zig"
+ "math/complex/cosh.zig"
+ "math/complex/exp.zig"
+ "math/complex/index.zig"
+ "math/complex/ldexp.zig"
+ "math/complex/log.zig"
+ "math/complex/pow.zig"
+ "math/complex/proj.zig"
+ "math/complex/sin.zig"
+ "math/complex/sinh.zig"
+ "math/complex/sqrt.zig"
+ "math/complex/tan.zig"
+ "math/complex/tanh.zig"
"math/copysign.zig"
"math/cos.zig"
"math/cosh.zig"
@@ -501,33 +524,12 @@ set(ZIG_STD_FILES
"math/tan.zig"
"math/tanh.zig"
"math/trunc.zig"
- "math/complex/abs.zig"
- "math/complex/acosh.zig"
- "math/complex/acos.zig"
- "math/complex/arg.zig"
- "math/complex/asinh.zig"
- "math/complex/asin.zig"
- "math/complex/atanh.zig"
- "math/complex/atan.zig"
- "math/complex/conj.zig"
- "math/complex/cosh.zig"
- "math/complex/cos.zig"
- "math/complex/exp.zig"
- "math/complex/index.zig"
- "math/complex/ldexp.zig"
- "math/complex/log.zig"
- "math/complex/pow.zig"
- "math/complex/proj.zig"
- "math/complex/sinh.zig"
- "math/complex/sin.zig"
- "math/complex/sqrt.zig"
- "math/complex/tanh.zig"
- "math/complex/tan.zig"
"mem.zig"
"net.zig"
"os/child_process.zig"
"os/darwin.zig"
"os/darwin_errno.zig"
+ "os/epoch.zig"
"os/file.zig"
"os/get_user_id.zig"
"os/index.zig"
@@ -537,13 +539,13 @@ set(ZIG_STD_FILES
"os/linux/x86_64.zig"
"os/path.zig"
"os/time.zig"
- "os/epoch.zig"
"os/windows/error.zig"
"os/windows/index.zig"
"os/windows/util.zig"
"os/zen.zig"
"rand/index.zig"
"rand/ziggurat.zig"
+ "segmented_list.zig"
"sort.zig"
"special/bootstrap.zig"
"special/bootstrap_lib.zig"
diff --git a/doc/langref.html.in b/doc/langref.html.in
index 9fb2ebf9f5..644cc5b9e0 100644
--- a/doc/langref.html.in
+++ b/doc/langref.html.in
@@ -4809,6 +4809,182 @@ pub const TypeId = enum {
BoundFn,
ArgTuple,
Opaque,
+};
+ {#code_end#}
+ {#header_close#}
+ {#header_open|@typeInfo#}
+
@typeInfo(comptime T: type) -> @import("builtin").TypeInfo
+
+ Returns information on the type. Returns a value of the following union:
+
+ {#code_begin|syntax#}
+pub const TypeInfo = union(TypeId) {
+ Type: void,
+ Void: void,
+ Bool: void,
+ NoReturn: void,
+ Int: Int,
+ Float: Float,
+ Pointer: Pointer,
+ Array: Array,
+ Struct: Struct,
+ FloatLiteral: void,
+ IntLiteral: void,
+ UndefinedLiteral: void,
+ NullLiteral: void,
+ Nullable: Nullable,
+ ErrorUnion: ErrorUnion,
+ ErrorSet: ErrorSet,
+ Enum: Enum,
+ Union: Union,
+ Fn: Fn,
+ Namespace: void,
+ Block: void,
+ BoundFn: Fn,
+ ArgTuple: void,
+ Opaque: void,
+ Promise: Promise,
+
+
+ pub const Int = struct {
+ is_signed: bool,
+ bits: u8,
+ };
+
+ pub const Float = struct {
+ bits: u8,
+ };
+
+ pub const Pointer = struct {
+ is_const: bool,
+ is_volatile: bool,
+ alignment: u32,
+ child: type,
+ };
+
+ pub const Array = struct {
+ len: usize,
+ child: type,
+ };
+
+ pub const ContainerLayout = enum {
+ Auto,
+ Extern,
+ Packed,
+ };
+
+ pub const StructField = struct {
+ name: []const u8,
+ offset: ?usize,
+ field_type: type,
+ };
+
+ pub const Struct = struct {
+ layout: ContainerLayout,
+ fields: []StructField,
+ defs: []Definition,
+ };
+
+ pub const Nullable = struct {
+ child: type,
+ };
+
+ pub const ErrorUnion = struct {
+ error_set: type,
+ payload: type,
+ };
+
+ pub const Error = struct {
+ name: []const u8,
+ value: usize,
+ };
+
+ pub const ErrorSet = struct {
+ errors: []Error,
+ };
+
+ pub const EnumField = struct {
+ name: []const u8,
+ value: usize,
+ };
+
+ pub const Enum = struct {
+ layout: ContainerLayout,
+ tag_type: type,
+ fields: []EnumField,
+ defs: []Definition,
+ };
+
+ pub const UnionField = struct {
+ name: []const u8,
+ enum_field: ?EnumField,
+ field_type: type,
+ };
+
+ pub const Union = struct {
+ layout: ContainerLayout,
+ tag_type: type,
+ fields: []UnionField,
+ defs: []Definition,
+ };
+
+ pub const CallingConvention = enum {
+ Unspecified,
+ C,
+ Cold,
+ Naked,
+ Stdcall,
+ Async,
+ };
+
+ pub const FnArg = struct {
+ is_generic: bool,
+ is_noalias: bool,
+ arg_type: type,
+ };
+
+ pub const Fn = struct {
+ calling_convention: CallingConvention,
+ is_generic: bool,
+ is_var_args: bool,
+ return_type: type,
+ async_allocator_type: type,
+ args: []FnArg,
+ };
+
+ pub const Promise = struct {
+ child: type,
+ };
+
+ pub const Definition = struct {
+ name: []const u8,
+ is_pub: bool,
+ data: Data,
+
+ pub const Data = union(enum) {
+ Type: type,
+ Var: type,
+ Fn: FnDef,
+
+ pub const FnDef = struct {
+ fn_type: type,
+ inline_type: Inline,
+ calling_convention: CallingConvention,
+ is_var_args: bool,
+ is_extern: bool,
+ is_export: bool,
+ lib_name: ?[]const u8,
+ return_type: type,
+ arg_names: [][] const u8,
+
+ pub const Inline = enum {
+ Auto,
+ Always,
+ Never,
+ };
+ };
+ };
+ };
};
{#code_end#}
{#header_close#}
@@ -5226,7 +5402,6 @@ pub const Os = enum {
rtems,
nacl,
cnk,
- bitrig,
aix,
cuda,
nvcl,
@@ -5237,10 +5412,12 @@ pub const Os = enum {
watchos,
mesa3d,
contiki,
+ amdpal,
zen,
};
pub const Arch = enum {
+ armv8_3a,
armv8_2a,
armv8_1a,
armv8,
@@ -5260,9 +5437,29 @@ pub const Arch = enum {
armv5,
armv5te,
armv4t,
- armeb,
+ armebv8_3a,
+ armebv8_2a,
+ armebv8_1a,
+ armebv8,
+ armebv8r,
+ armebv8m_baseline,
+ armebv8m_mainline,
+ armebv7,
+ armebv7em,
+ armebv7m,
+ armebv7s,
+ armebv7k,
+ armebv7ve,
+ armebv6,
+ armebv6m,
+ armebv6k,
+ armebv6t2,
+ armebv5,
+ armebv5te,
+ armebv4t,
aarch64,
aarch64_be,
+ arc,
avr,
bpfel,
bpfeb,
@@ -5315,6 +5512,7 @@ pub const Arch = enum {
pub const Environ = enum {
unknown,
gnu,
+ gnuabin32,
gnuabi64,
gnueabi,
gnueabihf,
@@ -5332,6 +5530,7 @@ pub const Environ = enum {
amdopencl,
coreclr,
opencl,
+ simulator,
};
pub const ObjectFormat = enum {
@@ -5358,10 +5557,23 @@ pub const AtomicOrder = enum {
SeqCst,
};
+pub const AtomicRmwOp = enum {
+ Xchg,
+ Add,
+ Sub,
+ And,
+ Nand,
+ Or,
+ Xor,
+ Max,
+ Min,
+};
+
pub const Mode = enum {
Debug,
ReleaseSafe,
ReleaseFast,
+ ReleaseSmall,
};
pub const TypeId = enum {
@@ -5380,7 +5592,7 @@ pub const TypeId = enum {
NullLiteral,
Nullable,
ErrorUnion,
- Error,
+ ErrorSet,
Enum,
Union,
Fn,
@@ -5389,6 +5601,176 @@ pub const TypeId = enum {
BoundFn,
ArgTuple,
Opaque,
+ Promise,
+};
+
+pub const TypeInfo = union(TypeId) {
+ Type: void,
+ Void: void,
+ Bool: void,
+ NoReturn: void,
+ Int: Int,
+ Float: Float,
+ Pointer: Pointer,
+ Array: Array,
+ Struct: Struct,
+ FloatLiteral: void,
+ IntLiteral: void,
+ UndefinedLiteral: void,
+ NullLiteral: void,
+ Nullable: Nullable,
+ ErrorUnion: ErrorUnion,
+ ErrorSet: ErrorSet,
+ Enum: Enum,
+ Union: Union,
+ Fn: Fn,
+ Namespace: void,
+ Block: void,
+ BoundFn: Fn,
+ ArgTuple: void,
+ Opaque: void,
+ Promise: Promise,
+
+
+ pub const Int = struct {
+ is_signed: bool,
+ bits: u8,
+ };
+
+ pub const Float = struct {
+ bits: u8,
+ };
+
+ pub const Pointer = struct {
+ is_const: bool,
+ is_volatile: bool,
+ alignment: u32,
+ child: type,
+ };
+
+ pub const Array = struct {
+ len: usize,
+ child: type,
+ };
+
+ pub const ContainerLayout = enum {
+ Auto,
+ Extern,
+ Packed,
+ };
+
+ pub const StructField = struct {
+ name: []const u8,
+ offset: ?usize,
+ field_type: type,
+ };
+
+ pub const Struct = struct {
+ layout: ContainerLayout,
+ fields: []StructField,
+ defs: []Definition,
+ };
+
+ pub const Nullable = struct {
+ child: type,
+ };
+
+ pub const ErrorUnion = struct {
+ error_set: type,
+ payload: type,
+ };
+
+ pub const Error = struct {
+ name: []const u8,
+ value: usize,
+ };
+
+ pub const ErrorSet = struct {
+ errors: []Error,
+ };
+
+ pub const EnumField = struct {
+ name: []const u8,
+ value: usize,
+ };
+
+ pub const Enum = struct {
+ layout: ContainerLayout,
+ tag_type: type,
+ fields: []EnumField,
+ defs: []Definition,
+ };
+
+ pub const UnionField = struct {
+ name: []const u8,
+ enum_field: ?EnumField,
+ field_type: type,
+ };
+
+ pub const Union = struct {
+ layout: ContainerLayout,
+ tag_type: type,
+ fields: []UnionField,
+ defs: []Definition,
+ };
+
+ pub const CallingConvention = enum {
+ Unspecified,
+ C,
+ Cold,
+ Naked,
+ Stdcall,
+ Async,
+ };
+
+ pub const FnArg = struct {
+ is_generic: bool,
+ is_noalias: bool,
+ arg_type: type,
+ };
+
+ pub const Fn = struct {
+ calling_convention: CallingConvention,
+ is_generic: bool,
+ is_var_args: bool,
+ return_type: type,
+ async_allocator_type: type,
+ args: []FnArg,
+ };
+
+ pub const Promise = struct {
+ child: type,
+ };
+
+ pub const Definition = struct {
+ name: []const u8,
+ is_pub: bool,
+ data: Data,
+
+ pub const Data = union(enum) {
+ Type: type,
+ Var: type,
+ Fn: FnDef,
+
+ pub const FnDef = struct {
+ fn_type: type,
+ inline_type: Inline,
+ calling_convention: CallingConvention,
+ is_var_args: bool,
+ is_extern: bool,
+ is_export: bool,
+ lib_name: ?[]const u8,
+ return_type: type,
+ arg_names: [][] const u8,
+
+ pub const Inline = enum {
+ Auto,
+ Always,
+ Never,
+ };
+ };
+ };
+ };
};
pub const FloatMode = enum {
@@ -5402,7 +5784,7 @@ pub const Endian = enum {
};
pub const endian = Endian.Little;
-pub const is_test = false;
+pub const is_test = true;
pub const os = Os.linux;
pub const arch = Arch.x86_64;
pub const environ = Environ.gnu;
@@ -5410,6 +5792,7 @@ pub const object_format = ObjectFormat.elf;
pub const mode = Mode.Debug;
pub const link_libc = false;
pub const have_error_return_tracing = true;
+pub const __zig_test_fn_slice = {}; // overwritten later
{#code_end#}
{#see_also|Build Mode#}
{#header_close#}
@@ -6070,7 +6453,7 @@ hljs.registerLanguage("zig", function(t) {
a = t.IR + "\\s*\\(",
c = {
keyword: "const align var extern stdcallcc nakedcc volatile export pub noalias inline struct packed enum union break return try catch test continue unreachable comptime and or asm defer errdefer if else switch while for fn use bool f32 f64 void type noreturn error i8 u8 i16 u16 i32 u32 i64 u64 isize usize i8w u8w i16w i32w u32w i64w u64w isizew usizew c_short c_ushort c_int c_uint c_long c_ulong c_longlong c_ulonglong",
- built_in: "atomicLoad breakpoint returnAddress frameAddress fieldParentPtr setFloatMode IntType OpaqueType compileError compileLog setCold setRuntimeSafety setEvalBranchQuota offsetOf memcpy inlineCall setGlobalLinkage setGlobalSection divTrunc divFloor enumTagName intToPtr ptrToInt panic canImplicitCast ptrCast bitCast rem mod memset sizeOf alignOf alignCast maxValue minValue memberCount memberName memberType typeOf addWithOverflow subWithOverflow mulWithOverflow shlWithOverflow shlExact shrExact cInclude cDefine cUndef ctz clz import cImport errorName embedFile cmpxchgStrong cmpxchgWeak fence divExact truncate atomicRmw sqrt field",
+ built_in: "atomicLoad breakpoint returnAddress frameAddress fieldParentPtr setFloatMode IntType OpaqueType compileError compileLog setCold setRuntimeSafety setEvalBranchQuota offsetOf memcpy inlineCall setGlobalLinkage setGlobalSection divTrunc divFloor enumTagName intToPtr ptrToInt panic canImplicitCast ptrCast bitCast rem mod memset sizeOf alignOf alignCast maxValue minValue memberCount memberName memberType typeOf addWithOverflow subWithOverflow mulWithOverflow shlWithOverflow shlExact shrExact cInclude cDefine cUndef ctz clz import cImport errorName embedFile cmpxchgStrong cmpxchgWeak fence divExact truncate atomicRmw sqrt field typeInfo",
literal: "true false null undefined"
},
n = [e, t.CLCM, t.CBCM, s, r];
diff --git a/src/all_types.hpp b/src/all_types.hpp
index a25c99edda..6f48c4ed36 100644
--- a/src/all_types.hpp
+++ b/src/all_types.hpp
@@ -1298,6 +1298,7 @@ enum BuiltinFnId {
BuiltinFnIdMemberType,
BuiltinFnIdMemberName,
BuiltinFnIdField,
+ BuiltinFnIdTypeInfo,
BuiltinFnIdTypeof,
BuiltinFnIdAddWithOverflow,
BuiltinFnIdSubWithOverflow,
@@ -1511,6 +1512,7 @@ struct CodeGen {
HashMap exported_symbol_names;
HashMap external_prototypes;
HashMap string_literals_table;
+ HashMap type_info_cache;
ZigList import_queue;
@@ -2042,6 +2044,7 @@ enum IrInstructionId {
IrInstructionIdTagType,
IrInstructionIdFieldParentPtr,
IrInstructionIdOffsetOf,
+ IrInstructionIdTypeInfo,
IrInstructionIdTypeId,
IrInstructionIdSetEvalBranchQuota,
IrInstructionIdPtrTypeOf,
@@ -2863,6 +2866,12 @@ struct IrInstructionOffsetOf {
IrInstruction *field_name;
};
+struct IrInstructionTypeInfo {
+ IrInstruction base;
+
+ IrInstruction *type_value;
+};
+
struct IrInstructionTypeId {
IrInstruction base;
diff --git a/src/analyze.cpp b/src/analyze.cpp
index 99712cbfaf..d6137a4286 100644
--- a/src/analyze.cpp
+++ b/src/analyze.cpp
@@ -2325,8 +2325,14 @@ static void resolve_enum_zero_bits(CodeGen *g, TypeTableEntry *enum_type) {
HashMap occupied_tag_values = {};
occupied_tag_values.init(field_count);
- TypeTableEntry *tag_int_type = get_smallest_unsigned_int_type(g, field_count - 1);
+ TypeTableEntry *tag_int_type;
+ if (enum_type->data.enumeration.layout == ContainerLayoutExtern) {
+ tag_int_type = get_c_int_type(g, CIntTypeInt);
+ } else {
+ tag_int_type = get_smallest_unsigned_int_type(g, field_count - 1);
+ }
+ // TODO: Are extern enums allowed to have an init_arg_expr?
if (decl_node->data.container_decl.init_arg_expr != nullptr) {
TypeTableEntry *wanted_tag_int_type = analyze_type_expr(g, scope, decl_node->data.container_decl.init_arg_expr);
if (type_is_invalid(wanted_tag_int_type)) {
@@ -5926,8 +5932,8 @@ size_t type_id_len() {
return array_length(all_type_ids);
}
-size_t type_id_index(TypeTableEntryId id) {
- switch (id) {
+size_t type_id_index(TypeTableEntry *entry) {
+ switch (entry->id) {
case TypeTableEntryIdInvalid:
zig_unreachable();
case TypeTableEntryIdMetaType:
@@ -5947,6 +5953,8 @@ size_t type_id_index(TypeTableEntryId id) {
case TypeTableEntryIdArray:
return 7;
case TypeTableEntryIdStruct:
+ if (entry->data.structure.is_slice)
+ return 25;
return 8;
case TypeTableEntryIdNumLitFloat:
return 9;
diff --git a/src/analyze.hpp b/src/analyze.hpp
index aca78f4e25..56ca21a93f 100644
--- a/src/analyze.hpp
+++ b/src/analyze.hpp
@@ -174,7 +174,7 @@ void update_compile_var(CodeGen *g, Buf *name, ConstExprValue *value);
const char *type_id_name(TypeTableEntryId id);
TypeTableEntryId type_id_at_index(size_t index);
size_t type_id_len();
-size_t type_id_index(TypeTableEntryId id);
+size_t type_id_index(TypeTableEntry *entry);
TypeTableEntry *get_generic_fn_type(CodeGen *g, FnTypeId *fn_type_id);
bool type_is_copyable(CodeGen *g, TypeTableEntry *type_entry);
LinkLib *create_link_lib(Buf *name);
diff --git a/src/ast_render.cpp b/src/ast_render.cpp
index 0cb8bf4e93..5a1e81b36d 100644
--- a/src/ast_render.cpp
+++ b/src/ast_render.cpp
@@ -736,7 +736,7 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
render_node_grouped(ar, field_node->data.struct_field.type);
}
if (field_node->data.struct_field.value != nullptr) {
- fprintf(ar->f, "= ");
+ fprintf(ar->f, " = ");
render_node_grouped(ar, field_node->data.struct_field.value);
}
fprintf(ar->f, ",\n");
diff --git a/src/bigint.cpp b/src/bigint.cpp
index 2a688debd5..64bc59e5cf 100644
--- a/src/bigint.cpp
+++ b/src/bigint.cpp
@@ -1259,12 +1259,11 @@ void bigint_and(BigInt *dest, const BigInt *op1, const BigInt *op2) {
bigint_normalize(dest);
return;
}
- // TODO this code path is untested
- uint64_t first_digit = dest->data.digit;
+
dest->digit_count = max(op1->digit_count, op2->digit_count);
dest->data.digits = allocate_nonzero(dest->digit_count);
- dest->data.digits[0] = first_digit;
- size_t i = 1;
+
+ size_t i = 0;
for (; i < op1->digit_count && i < op2->digit_count; i += 1) {
dest->data.digits[i] = op1_digits[i] & op2_digits[i];
}
@@ -1412,7 +1411,6 @@ void bigint_shr(BigInt *dest, const BigInt *op1, const BigInt *op2) {
return;
}
- // TODO this code path is untested
size_t digit_shift_count = shift_amt / 64;
size_t leftover_shift_count = shift_amt % 64;
@@ -1427,7 +1425,7 @@ void bigint_shr(BigInt *dest, const BigInt *op1, const BigInt *op2) {
uint64_t digit = op1_digits[op_digit_index];
size_t dest_digit_index = op_digit_index - digit_shift_count;
dest->data.digits[dest_digit_index] = carry | (digit >> leftover_shift_count);
- carry = (0xffffffffffffffffULL << leftover_shift_count) & digit;
+ carry = digit << leftover_shift_count;
if (dest_digit_index == 0) { break; }
op_digit_index -= 1;
diff --git a/src/codegen.cpp b/src/codegen.cpp
index 2d8c385f44..4e58f86d4b 100644
--- a/src/codegen.cpp
+++ b/src/codegen.cpp
@@ -88,6 +88,7 @@ CodeGen *codegen_create(Buf *root_src_path, const ZigTarget *target, OutType out
g->exported_symbol_names.init(8);
g->external_prototypes.init(8);
g->string_literals_table.init(16);
+ g->type_info_cache.init(32);
g->is_test_build = false;
g->want_h_file = (out_type == OutTypeObj || out_type == OutTypeLib);
buf_resize(&g->global_asm, 0);
@@ -4502,6 +4503,7 @@ static LLVMValueRef ir_render_instruction(CodeGen *g, IrExecutable *executable,
case IrInstructionIdDeclRef:
case IrInstructionIdSwitchVar:
case IrInstructionIdOffsetOf:
+ case IrInstructionIdTypeInfo:
case IrInstructionIdTypeId:
case IrInstructionIdSetEvalBranchQuota:
case IrInstructionIdPtrTypeOf:
@@ -6125,6 +6127,7 @@ static void define_builtin_fns(CodeGen *g) {
create_builtin_fn(g, BuiltinFnIdMemberType, "memberType", 2);
create_builtin_fn(g, BuiltinFnIdMemberName, "memberName", 2);
create_builtin_fn(g, BuiltinFnIdField, "field", 2);
+ create_builtin_fn(g, BuiltinFnIdTypeInfo, "typeInfo", 1);
create_builtin_fn(g, BuiltinFnIdTypeof, "typeOf", 1); // TODO rename to TypeOf
create_builtin_fn(g, BuiltinFnIdAddWithOverflow, "addWithOverflow", 4);
create_builtin_fn(g, BuiltinFnIdSubWithOverflow, "subWithOverflow", 4);
@@ -6342,8 +6345,196 @@ static void define_builtin_compile_vars(CodeGen *g) {
const TypeTableEntryId id = type_id_at_index(i);
buf_appendf(contents, " %s,\n", type_id_name(id));
}
+ buf_appendf(contents, " Slice,\n");
buf_appendf(contents, "};\n\n");
}
+ {
+ buf_appendf(contents,
+ "pub const TypeInfo = union(TypeId) {\n"
+ " Type: void,\n"
+ " Void: void,\n"
+ " Bool: void,\n"
+ " NoReturn: void,\n"
+ " Int: Int,\n"
+ " Float: Float,\n"
+ " Pointer: Pointer,\n"
+ " Slice: Slice,\n"
+ " Array: Array,\n"
+ " Struct: Struct,\n"
+ " FloatLiteral: void,\n"
+ " IntLiteral: void,\n"
+ " UndefinedLiteral: void,\n"
+ " NullLiteral: void,\n"
+ " Nullable: Nullable,\n"
+ " ErrorUnion: ErrorUnion,\n"
+ " ErrorSet: ErrorSet,\n"
+ " Enum: Enum,\n"
+ " Union: Union,\n"
+ " Fn: Fn,\n"
+ " Namespace: void,\n"
+ " Block: void,\n"
+ " BoundFn: Fn,\n"
+ " ArgTuple: void,\n"
+ " Opaque: void,\n"
+ " Promise: Promise,\n"
+ "\n\n"
+ " pub const Int = struct {\n"
+ " is_signed: bool,\n"
+ " bits: u8,\n"
+ " };\n"
+ "\n"
+ " pub const Float = struct {\n"
+ " bits: u8,\n"
+ " };\n"
+ "\n"
+ " pub const Pointer = struct {\n"
+ " is_const: bool,\n"
+ " is_volatile: bool,\n"
+ " alignment: u32,\n"
+ " child: type,\n"
+ " };\n"
+ "\n"
+ " pub const Slice = Pointer;\n"
+ "\n"
+ " pub const Array = struct {\n"
+ " len: usize,\n"
+ " child: type,\n"
+ " };\n"
+ "\n"
+ " pub const ContainerLayout = enum {\n"
+ " Auto,\n"
+ " Extern,\n"
+ " Packed,\n"
+ " };\n"
+ "\n"
+ " pub const StructField = struct {\n"
+ " name: []const u8,\n"
+ " offset: ?usize,\n"
+ " field_type: type,\n"
+ " };\n"
+ "\n"
+ " pub const Struct = struct {\n"
+ " layout: ContainerLayout,\n"
+ " fields: []StructField,\n"
+ " defs: []Definition,\n"
+ " };\n"
+ "\n"
+ " pub const Nullable = struct {\n"
+ " child: type,\n"
+ " };\n"
+ "\n"
+ " pub const ErrorUnion = struct {\n"
+ " error_set: type,\n"
+ " payload: type,\n"
+ " };\n"
+ "\n"
+ " pub const Error = struct {\n"
+ " name: []const u8,\n"
+ " value: usize,\n"
+ " };\n"
+ "\n"
+ " pub const ErrorSet = struct {\n"
+ " errors: []Error,\n"
+ " };\n"
+ "\n"
+ " pub const EnumField = struct {\n"
+ " name: []const u8,\n"
+ " value: usize,\n"
+ " };\n"
+ "\n"
+ " pub const Enum = struct {\n"
+ " layout: ContainerLayout,\n"
+ " tag_type: type,\n"
+ " fields: []EnumField,\n"
+ " defs: []Definition,\n"
+ " };\n"
+ "\n"
+ " pub const UnionField = struct {\n"
+ " name: []const u8,\n"
+ " enum_field: ?EnumField,\n"
+ " field_type: type,\n"
+ " };\n"
+ "\n"
+ " pub const Union = struct {\n"
+ " layout: ContainerLayout,\n"
+ " tag_type: type,\n"
+ " fields: []UnionField,\n"
+ " defs: []Definition,\n"
+ " };\n"
+ "\n"
+ " pub const CallingConvention = enum {\n"
+ " Unspecified,\n"
+ " C,\n"
+ " Cold,\n"
+ " Naked,\n"
+ " Stdcall,\n"
+ " Async,\n"
+ " };\n"
+ "\n"
+ " pub const FnArg = struct {\n"
+ " is_generic: bool,\n"
+ " is_noalias: bool,\n"
+ " arg_type: type,\n"
+ " };\n"
+ "\n"
+ " pub const Fn = struct {\n"
+ " calling_convention: CallingConvention,\n"
+ " is_generic: bool,\n"
+ " is_var_args: bool,\n"
+ " return_type: type,\n"
+ " async_allocator_type: type,\n"
+ " args: []FnArg,\n"
+ " };\n"
+ "\n"
+ " pub const Promise = struct {\n"
+ " child: type,\n"
+ " };\n"
+ "\n"
+ " pub const Definition = struct {\n"
+ " name: []const u8,\n"
+ " is_pub: bool,\n"
+ " data: Data,\n"
+ "\n"
+ " pub const Data = union(enum) {\n"
+ " Type: type,\n"
+ " Var: type,\n"
+ " Fn: FnDef,\n"
+ "\n"
+ " pub const FnDef = struct {\n"
+ " fn_type: type,\n"
+ " inline_type: Inline,\n"
+ " calling_convention: CallingConvention,\n"
+ " is_var_args: bool,\n"
+ " is_extern: bool,\n"
+ " is_export: bool,\n"
+ " lib_name: ?[]const u8,\n"
+ " return_type: type,\n"
+ " arg_names: [][] const u8,\n"
+ "\n"
+ " pub const Inline = enum {\n"
+ " Auto,\n"
+ " Always,\n"
+ " Never,\n"
+ " };\n"
+ " };\n"
+ " };\n"
+ " };\n"
+ "};\n\n");
+ assert(ContainerLayoutAuto == 0);
+ assert(ContainerLayoutExtern == 1);
+ assert(ContainerLayoutPacked == 2);
+
+ assert(CallingConventionUnspecified == 0);
+ assert(CallingConventionC == 1);
+ assert(CallingConventionCold == 2);
+ assert(CallingConventionNaked == 3);
+ assert(CallingConventionStdcall == 4);
+ assert(CallingConventionAsync == 5);
+
+ assert(FnInlineAuto == 0);
+ assert(FnInlineAlways == 1);
+ assert(FnInlineNever == 2);
+ }
{
buf_appendf(contents,
"pub const FloatMode = enum {\n"
diff --git a/src/ir.cpp b/src/ir.cpp
index ff5afe138c..a069271af7 100644
--- a/src/ir.cpp
+++ b/src/ir.cpp
@@ -145,6 +145,8 @@ static bool ir_should_inline(IrExecutable *exec, Scope *scope) {
while (scope != nullptr) {
if (scope->id == ScopeIdCompTime)
return true;
+ if (scope->id == ScopeIdFnDef)
+ break;
scope = scope->parent;
}
return false;
@@ -615,6 +617,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionOffsetOf *) {
return IrInstructionIdOffsetOf;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionTypeInfo *) {
+ return IrInstructionIdTypeInfo;
+}
+
static constexpr IrInstructionId ir_instruction_id(IrInstructionTypeId *) {
return IrInstructionIdTypeId;
}
@@ -2440,6 +2446,16 @@ static IrInstruction *ir_build_offset_of(IrBuilder *irb, Scope *scope, AstNode *
return &instruction->base;
}
+static IrInstruction *ir_build_type_info(IrBuilder *irb, Scope *scope, AstNode *source_node,
+ IrInstruction *type_value) {
+ IrInstructionTypeInfo *instruction = ir_build_instruction(irb, scope, source_node);
+ instruction->type_value = type_value;
+
+ ir_ref_instruction(type_value, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
static IrInstruction *ir_build_type_id(IrBuilder *irb, Scope *scope, AstNode *source_node,
IrInstruction *type_value)
{
@@ -4083,6 +4099,16 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return ir_build_load_ptr(irb, scope, node, ptr_instruction);
}
+ case BuiltinFnIdTypeInfo:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ IrInstruction *type_info = ir_build_type_info(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, type_info, lval);
+ }
case BuiltinFnIdBreakpoint:
return ir_lval_wrap(irb, scope, ir_build_breakpoint(irb, scope, node), lval);
case BuiltinFnIdReturnAddress:
@@ -13392,7 +13418,6 @@ static IrInstruction *ir_analyze_container_member_access_inner(IrAnalyze *ira,
return ira->codegen->invalid_instruction;
}
-
static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_name,
IrInstruction *source_instr, IrInstruction *container_ptr, TypeTableEntry *container_type)
{
@@ -13454,6 +13479,51 @@ static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_
} else if (bare_type->id == TypeTableEntryIdUnion) {
TypeUnionField *field = find_union_type_field(bare_type, field_name);
if (field) {
+ if (instr_is_comptime(container_ptr)) {
+ ConstExprValue *ptr_val = ir_resolve_const(ira, container_ptr, UndefBad);
+ if (!ptr_val)
+ return ira->codegen->invalid_instruction;
+
+ if (ptr_val->data.x_ptr.special != ConstPtrSpecialHardCodedAddr) {
+ ConstExprValue *union_val = const_ptr_pointee(ira->codegen, ptr_val);
+ if (type_is_invalid(union_val->type))
+ return ira->codegen->invalid_instruction;
+
+ TypeUnionField *actual_field = find_union_field_by_tag(bare_type, &union_val->data.x_union.tag);
+ if (actual_field == nullptr)
+ zig_unreachable();
+
+ if (field != actual_field) {
+ ir_add_error_node(ira, source_instr->source_node,
+ buf_sprintf("accessing union field '%s' while field '%s' is set", buf_ptr(field_name),
+ buf_ptr(actual_field->name)));
+ return ira->codegen->invalid_instruction;
+ }
+
+ ConstExprValue *payload_val = union_val->data.x_union.payload;
+
+ TypeTableEntry *field_type = field->type_entry;
+ if (field_type->id == TypeTableEntryIdVoid)
+ {
+ assert(payload_val == nullptr);
+ payload_val = create_const_vals(1);
+ payload_val->special = ConstValSpecialStatic;
+ payload_val->type = field_type;
+ }
+
+ TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, field_type, is_const, is_volatile,
+ get_abi_alignment(ira->codegen, field_type), 0, 0);
+
+ IrInstruction *result = ir_get_const(ira, source_instr);
+ ConstExprValue *const_val = &result->value;
+ const_val->data.x_ptr.special = ConstPtrSpecialRef;
+ const_val->data.x_ptr.mut = container_ptr->value.data.x_ptr.mut;
+ const_val->data.x_ptr.data.ref.pointee = payload_val;
+ const_val->type = ptr_type;
+ return result;
+ }
+ }
+
IrInstruction *result = ir_build_union_field_ptr(&ira->new_irb, source_instr->scope, source_instr->source_node, container_ptr, field);
result->value.type = get_pointer_to_type_extra(ira->codegen, field->type_entry, is_const, is_volatile,
get_abi_alignment(ira->codegen, field->type_entry), 0, 0);
@@ -13672,7 +13742,16 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
create_const_enum(child_type, &field->value), child_type,
ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
}
- } else if (child_type->id == TypeTableEntryIdUnion &&
+ }
+ ScopeDecls *container_scope = get_container_scope(child_type);
+ if (container_scope != nullptr) {
+ auto entry = container_scope->decl_table.maybe_get(field_name);
+ Tld *tld = entry ? entry->value : nullptr;
+ if (tld) {
+ return ir_analyze_decl_ref(ira, &field_ptr_instruction->base, tld);
+ }
+ }
+ if (child_type->id == TypeTableEntryIdUnion &&
(child_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr ||
child_type->data.unionation.decl_node->data.container_decl.auto_enum))
{
@@ -13689,14 +13768,6 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
}
}
- ScopeDecls *container_scope = get_container_scope(child_type);
- if (container_scope != nullptr) {
- auto entry = container_scope->decl_table.maybe_get(field_name);
- Tld *tld = entry ? entry->value : nullptr;
- if (tld) {
- return ir_analyze_decl_ref(ira, &field_ptr_instruction->base, tld);
- }
- }
ir_add_error(ira, &field_ptr_instruction->base,
buf_sprintf("container '%s' has no member called '%s'",
buf_ptr(&child_type->name), buf_ptr(field_name)));
@@ -15683,6 +15754,910 @@ static TypeTableEntry *ir_analyze_instruction_offset_of(IrAnalyze *ira,
return ira->codegen->builtin_types.entry_num_lit_int;
}
+static void ensure_field_index(TypeTableEntry *type, const char *field_name, size_t index)
+{
+ Buf *field_name_buf;
+
+ assert(type != nullptr && !type_is_invalid(type));
+ // Check for our field by creating a buffer in place then using the comma operator to free it so that we don't
+ // leak memory in debug mode.
+ assert(find_struct_type_field(type, field_name_buf = buf_create_from_str(field_name))->src_index == index &&
+ (buf_deinit(field_name_buf), true));
+}
+
+static TypeTableEntry *ir_type_info_get_type(IrAnalyze *ira, const char *type_name, TypeTableEntry *root = nullptr)
+{
+ static ConstExprValue *type_info_var = nullptr;
+ static TypeTableEntry *type_info_type = nullptr;
+ if (type_info_var == nullptr)
+ {
+ type_info_var = get_builtin_value(ira->codegen, "TypeInfo");
+ assert(type_info_var->type->id == TypeTableEntryIdMetaType);
+
+ ensure_complete_type(ira->codegen, type_info_var->data.x_type);
+ type_info_type = type_info_var->data.x_type;
+ assert(type_info_type->id == TypeTableEntryIdUnion);
+ }
+
+ if (type_name == nullptr && root == nullptr)
+ return type_info_type;
+ else if (type_name == nullptr)
+ return root;
+
+ TypeTableEntry *root_type = (root == nullptr) ? type_info_type : root;
+
+ ScopeDecls *type_info_scope = get_container_scope(root_type);
+ assert(type_info_scope != nullptr);
+
+ Buf field_name = BUF_INIT;
+ buf_init_from_str(&field_name, type_name);
+ auto entry = type_info_scope->decl_table.get(&field_name);
+ buf_deinit(&field_name);
+
+ TldVar *tld = (TldVar *)entry;
+ assert(tld->base.id == TldIdVar);
+
+ VariableTableEntry *var = tld->var;
+
+ ensure_complete_type(ira->codegen, var->value->type);
+ assert(var->value->type->id == TypeTableEntryIdMetaType);
+ return var->value->data.x_type;
+}
+
+static void ir_make_type_info_defs(IrAnalyze *ira, ConstExprValue *out_val, ScopeDecls *decls_scope)
+{
+ TypeTableEntry *type_info_definition_type = ir_type_info_get_type(ira, "Definition");
+ ensure_complete_type(ira->codegen, type_info_definition_type);
+ ensure_field_index(type_info_definition_type, "name", 0);
+ ensure_field_index(type_info_definition_type, "is_pub", 1);
+ ensure_field_index(type_info_definition_type, "data", 2);
+
+ TypeTableEntry *type_info_definition_data_type = ir_type_info_get_type(ira, "Data", type_info_definition_type);
+ ensure_complete_type(ira->codegen, type_info_definition_data_type);
+
+ TypeTableEntry *type_info_fn_def_type = ir_type_info_get_type(ira, "FnDef", type_info_definition_data_type);
+ ensure_complete_type(ira->codegen, type_info_fn_def_type);
+
+ TypeTableEntry *type_info_fn_def_inline_type = ir_type_info_get_type(ira, "Inline", type_info_fn_def_type);
+ ensure_complete_type(ira->codegen, type_info_fn_def_inline_type);
+
+ // Loop through our definitions once to figure out how many definitions we will generate info for.
+ auto decl_it = decls_scope->decl_table.entry_iterator();
+ decltype(decls_scope->decl_table)::Entry *curr_entry = nullptr;
+ int definition_count = 0;
+
+ while ((curr_entry = decl_it.next()) != nullptr)
+ {
+ // If the definition is unresolved, force it to be resolved again.
+ if (curr_entry->value->resolution == TldResolutionUnresolved)
+ {
+ resolve_top_level_decl(ira->codegen, curr_entry->value, false, curr_entry->value->source_node);
+ if (curr_entry->value->resolution != TldResolutionOk)
+ {
+ return;
+ }
+ }
+
+ // Skip comptime blocks and test functions.
+ if (curr_entry->value->id != TldIdCompTime)
+ {
+ if (curr_entry->value->id == TldIdFn)
+ {
+ FnTableEntry *fn_entry = ((TldFn *)curr_entry->value)->fn_entry;
+ if (fn_entry->is_test)
+ continue;
+ }
+
+ definition_count += 1;
+ }
+ }
+
+ ConstExprValue *definition_array = create_const_vals(1);
+ definition_array->special = ConstValSpecialStatic;
+ definition_array->type = get_array_type(ira->codegen, type_info_definition_type, definition_count);
+ definition_array->data.x_array.special = ConstArraySpecialNone;
+ definition_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ definition_array->data.x_array.s_none.elements = create_const_vals(definition_count);
+ init_const_slice(ira->codegen, out_val, definition_array, 0, definition_count, false);
+
+ // Loop through the definitions and generate info.
+ decl_it = decls_scope->decl_table.entry_iterator();
+ curr_entry = nullptr;
+ int definition_index = 0;
+ while ((curr_entry = decl_it.next()) != nullptr)
+ {
+ // Skip comptime blocks and test functions.
+ if (curr_entry->value->id == TldIdCompTime)
+ continue;
+ else if (curr_entry->value->id == TldIdFn)
+ {
+ FnTableEntry *fn_entry = ((TldFn *)curr_entry->value)->fn_entry;
+ if (fn_entry->is_test)
+ continue;
+ }
+
+ ConstExprValue *definition_val = &definition_array->data.x_array.s_none.elements[definition_index];
+
+ definition_val->special = ConstValSpecialStatic;
+ definition_val->type = type_info_definition_type;
+
+ ConstExprValue *inner_fields = create_const_vals(3);
+ ConstExprValue *name = create_const_str_lit(ira->codegen, curr_entry->key);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(curr_entry->key), true);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = ira->codegen->builtin_types.entry_bool;
+ inner_fields[1].data.x_bool = curr_entry->value->visib_mod == VisibModPub;
+ inner_fields[2].special = ConstValSpecialStatic;
+ inner_fields[2].type = type_info_definition_data_type;
+ inner_fields[2].data.x_union.parent.id = ConstParentIdStruct;
+ inner_fields[2].data.x_union.parent.data.p_struct.struct_val = definition_val;
+ inner_fields[2].data.x_union.parent.data.p_struct.field_index = 1;
+
+ switch (curr_entry->value->id)
+ {
+ case TldIdVar:
+ {
+ VariableTableEntry *var = ((TldVar *)curr_entry->value)->var;
+ ensure_complete_type(ira->codegen, var->value->type);
+ if (var->value->type->id == TypeTableEntryIdMetaType)
+ {
+ // We have a variable of type 'type', so it's actually a type definition.
+ // 0: Data.Type: type
+ bigint_init_unsigned(&inner_fields[2].data.x_union.tag, 0);
+ inner_fields[2].data.x_union.payload = var->value;
+ }
+ else
+ {
+ // We have a variable of another type, so we store the type of the variable.
+ // 1: Data.Var: type
+ bigint_init_unsigned(&inner_fields[2].data.x_union.tag, 1);
+
+ ConstExprValue *payload = create_const_vals(1);
+ payload->type = ira->codegen->builtin_types.entry_type;
+ payload->data.x_type = var->value->type;
+
+ inner_fields[2].data.x_union.payload = payload;
+ }
+
+ break;
+ }
+ case TldIdFn:
+ {
+ // 2: Data.Fn: Data.FnDef
+ bigint_init_unsigned(&inner_fields[2].data.x_union.tag, 2);
+
+ FnTableEntry *fn_entry = ((TldFn *)curr_entry->value)->fn_entry;
+ assert(!fn_entry->is_test);
+
+ analyze_fn_body(ira->codegen, fn_entry);
+ if (fn_entry->anal_state == FnAnalStateInvalid)
+ return;
+
+ AstNodeFnProto *fn_node = (AstNodeFnProto *)(fn_entry->proto_node);
+
+ ConstExprValue *fn_def_val = create_const_vals(1);
+ fn_def_val->special = ConstValSpecialStatic;
+ fn_def_val->type = type_info_fn_def_type;
+ fn_def_val->data.x_struct.parent.id = ConstParentIdUnion;
+ fn_def_val->data.x_struct.parent.data.p_union.union_val = &inner_fields[2];
+
+ ConstExprValue *fn_def_fields = create_const_vals(9);
+ fn_def_val->data.x_struct.fields = fn_def_fields;
+
+ // fn_type: type
+ ensure_field_index(fn_def_val->type, "fn_type", 0);
+ fn_def_fields[0].special = ConstValSpecialStatic;
+ fn_def_fields[0].type = ira->codegen->builtin_types.entry_type;
+ fn_def_fields[0].data.x_type = fn_entry->type_entry;
+ // inline_type: Data.FnDef.Inline
+ ensure_field_index(fn_def_val->type, "inline_type", 1);
+ fn_def_fields[1].special = ConstValSpecialStatic;
+ fn_def_fields[1].type = type_info_fn_def_inline_type;
+ bigint_init_unsigned(&fn_def_fields[1].data.x_enum_tag, fn_entry->fn_inline);
+ // calling_convention: TypeInfo.CallingConvention
+ ensure_field_index(fn_def_val->type, "calling_convention", 2);
+ fn_def_fields[2].special = ConstValSpecialStatic;
+ fn_def_fields[2].type = ir_type_info_get_type(ira, "CallingConvention");
+ bigint_init_unsigned(&fn_def_fields[2].data.x_enum_tag, fn_node->cc);
+ // is_var_args: bool
+ ensure_field_index(fn_def_val->type, "is_var_args", 3);
+ bool is_varargs = fn_node->is_var_args;
+ fn_def_fields[3].special = ConstValSpecialStatic;
+ fn_def_fields[3].type = ira->codegen->builtin_types.entry_bool;
+ fn_def_fields[3].data.x_bool = is_varargs;
+ // is_extern: bool
+ ensure_field_index(fn_def_val->type, "is_extern", 4);
+ fn_def_fields[4].special = ConstValSpecialStatic;
+ fn_def_fields[4].type = ira->codegen->builtin_types.entry_bool;
+ fn_def_fields[4].data.x_bool = fn_node->is_extern;
+ // is_export: bool
+ ensure_field_index(fn_def_val->type, "is_export", 5);
+ fn_def_fields[5].special = ConstValSpecialStatic;
+ fn_def_fields[5].type = ira->codegen->builtin_types.entry_bool;
+ fn_def_fields[5].data.x_bool = fn_node->is_export;
+ // lib_name: ?[]const u8
+ ensure_field_index(fn_def_val->type, "lib_name", 6);
+ fn_def_fields[6].special = ConstValSpecialStatic;
+ fn_def_fields[6].type = get_maybe_type(ira->codegen,
+ get_slice_type(ira->codegen, get_pointer_to_type(ira->codegen,
+ ira->codegen->builtin_types.entry_u8, true)));
+ if (fn_node->is_extern && buf_len(fn_node->lib_name) > 0)
+ {
+ fn_def_fields[6].data.x_maybe = create_const_vals(1);
+ ConstExprValue *lib_name = create_const_str_lit(ira->codegen, fn_node->lib_name);
+ init_const_slice(ira->codegen, fn_def_fields[6].data.x_maybe, lib_name, 0, buf_len(fn_node->lib_name), true);
+ }
+ else
+ fn_def_fields[6].data.x_maybe = nullptr;
+ // return_type: type
+ ensure_field_index(fn_def_val->type, "return_type", 7);
+ fn_def_fields[7].special = ConstValSpecialStatic;
+ fn_def_fields[7].type = ira->codegen->builtin_types.entry_type;
+ if (fn_entry->src_implicit_return_type != nullptr)
+ fn_def_fields[7].data.x_type = fn_entry->src_implicit_return_type;
+ else if (fn_entry->type_entry->data.fn.gen_return_type != nullptr)
+ fn_def_fields[7].data.x_type = fn_entry->type_entry->data.fn.gen_return_type;
+ else
+ fn_def_fields[7].data.x_type = fn_entry->type_entry->data.fn.fn_type_id.return_type;
+ // arg_names: [][] const u8
+ ensure_field_index(fn_def_val->type, "arg_names", 8);
+ size_t fn_arg_count = fn_entry->variable_list.length;
+ ConstExprValue *fn_arg_name_array = create_const_vals(1);
+ fn_arg_name_array->special = ConstValSpecialStatic;
+ fn_arg_name_array->type = get_array_type(ira->codegen, get_slice_type(ira->codegen,
+ get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, true)), fn_arg_count);
+ fn_arg_name_array->data.x_array.special = ConstArraySpecialNone;
+ fn_arg_name_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ fn_arg_name_array->data.x_array.s_none.elements = create_const_vals(fn_arg_count);
+
+ init_const_slice(ira->codegen, &fn_def_fields[8], fn_arg_name_array, 0, fn_arg_count, false);
+
+ for (size_t fn_arg_index = 0; fn_arg_index < fn_arg_count; fn_arg_index++)
+ {
+ VariableTableEntry *arg_var = fn_entry->variable_list.at(fn_arg_index);
+ ConstExprValue *fn_arg_name_val = &fn_arg_name_array->data.x_array.s_none.elements[fn_arg_index];
+ ConstExprValue *arg_name = create_const_str_lit(ira->codegen, &arg_var->name);
+ init_const_slice(ira->codegen, fn_arg_name_val, arg_name, 0, buf_len(&arg_var->name), true);
+ fn_arg_name_val->data.x_struct.parent.id = ConstParentIdArray;
+ fn_arg_name_val->data.x_struct.parent.data.p_array.array_val = fn_arg_name_array;
+ fn_arg_name_val->data.x_struct.parent.data.p_array.elem_index = fn_arg_index;
+ }
+
+ inner_fields[2].data.x_union.payload = fn_def_val;
+ break;
+ }
+ case TldIdContainer:
+ {
+ TypeTableEntry *type_entry = ((TldContainer *)curr_entry->value)->type_entry;
+ ensure_complete_type(ira->codegen, type_entry);
+ // This is a type.
+ bigint_init_unsigned(&inner_fields[2].data.x_union.tag, 0);
+
+ ConstExprValue *payload = create_const_vals(1);
+ payload->type = ira->codegen->builtin_types.entry_type;
+ payload->data.x_type = type_entry;
+
+ inner_fields[2].data.x_union.payload = payload;
+
+ break;
+ }
+ default:
+ zig_unreachable();
+ }
+
+ definition_val->data.x_struct.fields = inner_fields;
+ definition_index++;
+ }
+
+ assert(definition_index == definition_count);
+}
+
+static ConstExprValue *ir_make_type_info_value(IrAnalyze *ira, TypeTableEntry *type_entry)
+{
+ assert(type_entry != nullptr);
+ assert(!type_is_invalid(type_entry));
+
+ ensure_complete_type(ira->codegen, type_entry);
+
+ const auto make_enum_field_val = [ira](ConstExprValue *enum_field_val, TypeEnumField *enum_field,
+ TypeTableEntry *type_info_enum_field_type) {
+ enum_field_val->special = ConstValSpecialStatic;
+ enum_field_val->type = type_info_enum_field_type;
+
+ ConstExprValue *inner_fields = create_const_vals(2);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = ira->codegen->builtin_types.entry_usize;
+
+ ConstExprValue *name = create_const_str_lit(ira->codegen, enum_field->name);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(enum_field->name), true);
+
+ bigint_init_bigint(&inner_fields[1].data.x_bigint, &enum_field->value);
+
+ enum_field_val->data.x_struct.fields = inner_fields;
+ };
+
+ const auto create_ptr_like_type_info = [ira](const char *name, TypeTableEntry *ptr_type_entry) {
+ ConstExprValue *result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, name);
+
+ ConstExprValue *fields = create_const_vals(4);
+ result->data.x_struct.fields = fields;
+
+ // is_const: bool
+ ensure_field_index(result->type, "is_const", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_bool;
+ fields[0].data.x_bool = ptr_type_entry->data.pointer.is_const;
+ // is_volatile: bool
+ ensure_field_index(result->type, "is_volatile", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_bool;
+ fields[1].data.x_bool = ptr_type_entry->data.pointer.is_volatile;
+ // alignment: u32
+ ensure_field_index(result->type, "alignment", 2);
+ fields[2].special = ConstValSpecialStatic;
+ fields[2].type = ira->codegen->builtin_types.entry_u32;
+ bigint_init_unsigned(&fields[2].data.x_bigint, ptr_type_entry->data.pointer.alignment);
+ // child: type
+ ensure_field_index(result->type, "child", 3);
+ fields[3].special = ConstValSpecialStatic;
+ fields[3].type = ira->codegen->builtin_types.entry_type;
+ fields[3].data.x_type = ptr_type_entry->data.pointer.child_type;
+
+ return result;
+ };
+
+ ConstExprValue *result = nullptr;
+ switch (type_entry->id)
+ {
+ case TypeTableEntryIdInvalid:
+ zig_unreachable();
+ case TypeTableEntryIdMetaType:
+ case TypeTableEntryIdVoid:
+ case TypeTableEntryIdBool:
+ case TypeTableEntryIdUnreachable:
+ case TypeTableEntryIdNumLitFloat:
+ case TypeTableEntryIdNumLitInt:
+ case TypeTableEntryIdUndefLit:
+ case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdNamespace:
+ case TypeTableEntryIdBlock:
+ case TypeTableEntryIdArgTuple:
+ case TypeTableEntryIdOpaque:
+ return nullptr;
+ default:
+ {
+ // Lookup an available value in our cache.
+ auto entry = ira->codegen->type_info_cache.maybe_get(type_entry);
+ if (entry != nullptr)
+ return entry->value;
+
+ // Fallthrough if we don't find one.
+ }
+ case TypeTableEntryIdInt:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Int");
+
+ ConstExprValue *fields = create_const_vals(2);
+ result->data.x_struct.fields = fields;
+
+ // is_signed: bool
+ ensure_field_index(result->type, "is_signed", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_bool;
+ fields[0].data.x_bool = type_entry->data.integral.is_signed;
+ // bits: u8
+ ensure_field_index(result->type, "bits", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_u8;
+ bigint_init_unsigned(&fields[1].data.x_bigint, type_entry->data.integral.bit_count);
+
+ break;
+ }
+ case TypeTableEntryIdFloat:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Float");
+
+ ConstExprValue *fields = create_const_vals(1);
+ result->data.x_struct.fields = fields;
+
+ // bits: u8
+ ensure_field_index(result->type, "bits", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_u8;
+ bigint_init_unsigned(&fields->data.x_bigint, type_entry->data.floating.bit_count);
+
+ break;
+ }
+ case TypeTableEntryIdPointer:
+ {
+ result = create_ptr_like_type_info("Pointer", type_entry);
+ break;
+ }
+ case TypeTableEntryIdArray:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Array");
+
+ ConstExprValue *fields = create_const_vals(2);
+ result->data.x_struct.fields = fields;
+
+ // len: usize
+ ensure_field_index(result->type, "len", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_usize;
+ bigint_init_unsigned(&fields[0].data.x_bigint, type_entry->data.array.len);
+ // child: type
+ ensure_field_index(result->type, "child", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_type;
+ fields[1].data.x_type = type_entry->data.array.child_type;
+
+ break;
+ }
+ case TypeTableEntryIdMaybe:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Nullable");
+
+ ConstExprValue *fields = create_const_vals(1);
+ result->data.x_struct.fields = fields;
+
+ // child: type
+ ensure_field_index(result->type, "child", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_type;
+ fields[0].data.x_type = type_entry->data.maybe.child_type;
+
+ break;
+ }
+ case TypeTableEntryIdPromise:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Promise");
+
+ ConstExprValue *fields = create_const_vals(1);
+ result->data.x_struct.fields = fields;
+
+ // @TODO ?type instead of using @typeOf(undefined) when we have no type.
+ // child: type
+ ensure_field_index(result->type, "child", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_type;
+
+ if (type_entry->data.promise.result_type == nullptr)
+ fields[0].data.x_type = ira->codegen->builtin_types.entry_undef;
+ else
+ fields[0].data.x_type = type_entry->data.promise.result_type;
+
+ break;
+ }
+ case TypeTableEntryIdEnum:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Enum");
+
+ ConstExprValue *fields = create_const_vals(4);
+ result->data.x_struct.fields = fields;
+
+ // layout: ContainerLayout
+ ensure_field_index(result->type, "layout", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ir_type_info_get_type(ira, "ContainerLayout");
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, type_entry->data.enumeration.layout);
+ // tag_type: type
+ ensure_field_index(result->type, "tag_type", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_type;
+ fields[1].data.x_type = type_entry->data.enumeration.tag_int_type;
+ // fields: []TypeInfo.EnumField
+ ensure_field_index(result->type, "fields", 2);
+
+ TypeTableEntry *type_info_enum_field_type = ir_type_info_get_type(ira, "EnumField");
+ uint32_t enum_field_count = type_entry->data.enumeration.src_field_count;
+
+ ConstExprValue *enum_field_array = create_const_vals(1);
+ enum_field_array->special = ConstValSpecialStatic;
+ enum_field_array->type = get_array_type(ira->codegen, type_info_enum_field_type, enum_field_count);
+ enum_field_array->data.x_array.special = ConstArraySpecialNone;
+ enum_field_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ enum_field_array->data.x_array.s_none.elements = create_const_vals(enum_field_count);
+
+ init_const_slice(ira->codegen, &fields[2], enum_field_array, 0, enum_field_count, false);
+
+ for (uint32_t enum_field_index = 0; enum_field_index < enum_field_count; enum_field_index++)
+ {
+ TypeEnumField *enum_field = &type_entry->data.enumeration.fields[enum_field_index];
+ ConstExprValue *enum_field_val = &enum_field_array->data.x_array.s_none.elements[enum_field_index];
+ make_enum_field_val(enum_field_val, enum_field, type_info_enum_field_type);
+ enum_field_val->data.x_struct.parent.id = ConstParentIdArray;
+ enum_field_val->data.x_struct.parent.data.p_array.array_val = enum_field_array;
+ enum_field_val->data.x_struct.parent.data.p_array.elem_index = enum_field_index;
+ }
+ // defs: []TypeInfo.Definition
+ ensure_field_index(result->type, "defs", 3);
+ ir_make_type_info_defs(ira, &fields[3], type_entry->data.enumeration.decls_scope);
+
+ break;
+ }
+ case TypeTableEntryIdErrorSet:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "ErrorSet");
+
+ ConstExprValue *fields = create_const_vals(1);
+ result->data.x_struct.fields = fields;
+
+ // errors: []TypeInfo.Error
+ ensure_field_index(result->type, "errors", 0);
+
+ TypeTableEntry *type_info_error_type = ir_type_info_get_type(ira, "Error");
+ uint32_t error_count = type_entry->data.error_set.err_count;
+ ConstExprValue *error_array = create_const_vals(1);
+ error_array->special = ConstValSpecialStatic;
+ error_array->type = get_array_type(ira->codegen, type_info_error_type, error_count);
+ error_array->data.x_array.special = ConstArraySpecialNone;
+ error_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ error_array->data.x_array.s_none.elements = create_const_vals(error_count);
+
+ init_const_slice(ira->codegen, &fields[0], error_array, 0, error_count, false);
+ for (uint32_t error_index = 0; error_index < error_count; error_index++)
+ {
+ ErrorTableEntry *error = type_entry->data.error_set.errors[error_index];
+ ConstExprValue *error_val = &error_array->data.x_array.s_none.elements[error_index];
+
+ error_val->special = ConstValSpecialStatic;
+ error_val->type = type_info_error_type;
+
+ ConstExprValue *inner_fields = create_const_vals(2);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = ira->codegen->builtin_types.entry_usize;
+
+ ConstExprValue *name = nullptr;
+ if (error->cached_error_name_val != nullptr)
+ name = error->cached_error_name_val;
+ if (name == nullptr)
+ name = create_const_str_lit(ira->codegen, &error->name);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(&error->name), true);
+ bigint_init_unsigned(&inner_fields[1].data.x_bigint, error->value);
+
+ error_val->data.x_struct.fields = inner_fields;
+ error_val->data.x_struct.parent.id = ConstParentIdArray;
+ error_val->data.x_struct.parent.data.p_array.array_val = error_array;
+ error_val->data.x_struct.parent.data.p_array.elem_index = error_index;
+ }
+
+ break;
+ }
+ case TypeTableEntryIdErrorUnion:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "ErrorUnion");
+
+ ConstExprValue *fields = create_const_vals(2);
+ result->data.x_struct.fields = fields;
+
+ // error_set: type
+ ensure_field_index(result->type, "error_set", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_type;
+ fields[0].data.x_type = type_entry->data.error_union.err_set_type;
+
+ // payload: type
+ ensure_field_index(result->type, "payload", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_type;
+ fields[1].data.x_type = type_entry->data.error_union.payload_type;
+
+ break;
+ }
+ case TypeTableEntryIdUnion:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Union");
+
+ ConstExprValue *fields = create_const_vals(4);
+ result->data.x_struct.fields = fields;
+
+ // layout: ContainerLayout
+ ensure_field_index(result->type, "layout", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ir_type_info_get_type(ira, "ContainerLayout");
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, type_entry->data.unionation.layout);
+ // tag_type: type
+ ensure_field_index(result->type, "tag_type", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_type;
+ // @TODO ?type instead of using @typeOf(undefined) when we have no type.
+ AstNode *union_decl_node = type_entry->data.unionation.decl_node;
+ if (union_decl_node->data.container_decl.auto_enum ||
+ union_decl_node->data.container_decl.init_arg_expr != nullptr)
+ {
+ fields[1].data.x_type = type_entry->data.unionation.tag_type;
+ }
+ else
+ fields[1].data.x_type = ira->codegen->builtin_types.entry_undef;
+ // fields: []TypeInfo.UnionField
+ ensure_field_index(result->type, "fields", 2);
+
+ TypeTableEntry *type_info_union_field_type = ir_type_info_get_type(ira, "UnionField");
+ uint32_t union_field_count = type_entry->data.unionation.src_field_count;
+
+ ConstExprValue *union_field_array = create_const_vals(1);
+ union_field_array->special = ConstValSpecialStatic;
+ union_field_array->type = get_array_type(ira->codegen, type_info_union_field_type, union_field_count);
+ union_field_array->data.x_array.special = ConstArraySpecialNone;
+ union_field_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ union_field_array->data.x_array.s_none.elements = create_const_vals(union_field_count);
+
+ init_const_slice(ira->codegen, &fields[2], union_field_array, 0, union_field_count, false);
+
+ TypeTableEntry *type_info_enum_field_type = ir_type_info_get_type(ira, "EnumField");
+
+ for (uint32_t union_field_index = 0; union_field_index < union_field_count; union_field_index++)
+ {
+ TypeUnionField *union_field = &type_entry->data.unionation.fields[union_field_index];
+ ConstExprValue *union_field_val = &union_field_array->data.x_array.s_none.elements[union_field_index];
+
+ union_field_val->special = ConstValSpecialStatic;
+ union_field_val->type = type_info_union_field_type;
+
+ ConstExprValue *inner_fields = create_const_vals(3);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = get_maybe_type(ira->codegen, type_info_enum_field_type);
+
+ if (fields[1].data.x_type == ira->codegen->builtin_types.entry_undef)
+ inner_fields[1].data.x_maybe = nullptr;
+ else
+ {
+ inner_fields[1].data.x_maybe = create_const_vals(1);
+ make_enum_field_val(inner_fields[1].data.x_maybe, union_field->enum_field, type_info_enum_field_type);
+ }
+
+ inner_fields[2].special = ConstValSpecialStatic;
+ inner_fields[2].type = ira->codegen->builtin_types.entry_type;
+ inner_fields[2].data.x_type = union_field->type_entry;
+
+ ConstExprValue *name = create_const_str_lit(ira->codegen, union_field->name);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(union_field->name), true);
+
+ union_field_val->data.x_struct.fields = inner_fields;
+ union_field_val->data.x_struct.parent.id = ConstParentIdArray;
+ union_field_val->data.x_struct.parent.data.p_array.array_val = union_field_array;
+ union_field_val->data.x_struct.parent.data.p_array.elem_index = union_field_index;
+ }
+ // defs: []TypeInfo.Definition
+ ensure_field_index(result->type, "defs", 3);
+ ir_make_type_info_defs(ira, &fields[3], type_entry->data.unionation.decls_scope);
+
+ break;
+ }
+ case TypeTableEntryIdStruct:
+ {
+ if (type_entry->data.structure.is_slice) {
+ Buf ptr_field_name = BUF_INIT;
+ buf_init_from_str(&ptr_field_name, "ptr");
+ TypeTableEntry *ptr_type = type_entry->data.structure.fields_by_name.get(&ptr_field_name)->type_entry;
+ ensure_complete_type(ira->codegen, ptr_type);
+ buf_deinit(&ptr_field_name);
+
+ result = create_ptr_like_type_info("Slice", ptr_type);
+ break;
+ }
+
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Struct");
+
+ ConstExprValue *fields = create_const_vals(3);
+ result->data.x_struct.fields = fields;
+
+ // layout: ContainerLayout
+ ensure_field_index(result->type, "layout", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ir_type_info_get_type(ira, "ContainerLayout");
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, type_entry->data.structure.layout);
+ // fields: []TypeInfo.StructField
+ ensure_field_index(result->type, "fields", 1);
+
+ TypeTableEntry *type_info_struct_field_type = ir_type_info_get_type(ira, "StructField");
+ uint32_t struct_field_count = type_entry->data.structure.src_field_count;
+
+ ConstExprValue *struct_field_array = create_const_vals(1);
+ struct_field_array->special = ConstValSpecialStatic;
+ struct_field_array->type = get_array_type(ira->codegen, type_info_struct_field_type, struct_field_count);
+ struct_field_array->data.x_array.special = ConstArraySpecialNone;
+ struct_field_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ struct_field_array->data.x_array.s_none.elements = create_const_vals(struct_field_count);
+
+ init_const_slice(ira->codegen, &fields[1], struct_field_array, 0, struct_field_count, false);
+
+ for (uint32_t struct_field_index = 0; struct_field_index < struct_field_count; struct_field_index++)
+ {
+ TypeStructField *struct_field = &type_entry->data.structure.fields[struct_field_index];
+ ConstExprValue *struct_field_val = &struct_field_array->data.x_array.s_none.elements[struct_field_index];
+
+ struct_field_val->special = ConstValSpecialStatic;
+ struct_field_val->type = type_info_struct_field_type;
+
+ ConstExprValue *inner_fields = create_const_vals(3);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = get_maybe_type(ira->codegen, ira->codegen->builtin_types.entry_usize);
+
+ if (!type_has_bits(struct_field->type_entry))
+ inner_fields[1].data.x_maybe = nullptr;
+ else
+ {
+ size_t byte_offset = LLVMOffsetOfElement(ira->codegen->target_data_ref, type_entry->type_ref, struct_field->gen_index);
+ inner_fields[1].data.x_maybe = create_const_vals(1);
+ inner_fields[1].data.x_maybe->type = ira->codegen->builtin_types.entry_usize;
+ bigint_init_unsigned(&inner_fields[1].data.x_maybe->data.x_bigint, byte_offset);
+ }
+
+ inner_fields[2].special = ConstValSpecialStatic;
+ inner_fields[2].type = ira->codegen->builtin_types.entry_type;
+ inner_fields[2].data.x_type = struct_field->type_entry;
+
+ ConstExprValue *name = create_const_str_lit(ira->codegen, struct_field->name);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(struct_field->name), true);
+
+ struct_field_val->data.x_struct.fields = inner_fields;
+ struct_field_val->data.x_struct.parent.id = ConstParentIdArray;
+ struct_field_val->data.x_struct.parent.data.p_array.array_val = struct_field_array;
+ struct_field_val->data.x_struct.parent.data.p_array.elem_index = struct_field_index;
+ }
+ // defs: []TypeInfo.Definition
+ ensure_field_index(result->type, "defs", 2);
+ ir_make_type_info_defs(ira, &fields[2], type_entry->data.structure.decls_scope);
+
+ break;
+ }
+ case TypeTableEntryIdFn:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Fn");
+
+ ConstExprValue *fields = create_const_vals(6);
+ result->data.x_struct.fields = fields;
+
+ // @TODO Fix type = undefined with ?type
+
+ // calling_convention: TypeInfo.CallingConvention
+ ensure_field_index(result->type, "calling_convention", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ir_type_info_get_type(ira, "CallingConvention");
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, type_entry->data.fn.fn_type_id.cc);
+ // is_generic: bool
+ ensure_field_index(result->type, "is_generic", 1);
+ bool is_generic = type_entry->data.fn.is_generic;
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_bool;
+ fields[1].data.x_bool = is_generic;
+ // is_varargs: bool
+ ensure_field_index(result->type, "is_var_args", 2);
+ bool is_varargs = type_entry->data.fn.fn_type_id.is_var_args;
+ fields[2].special = ConstValSpecialStatic;
+ fields[2].type = ira->codegen->builtin_types.entry_bool;
+ fields[2].data.x_bool = type_entry->data.fn.fn_type_id.is_var_args;
+ // return_type: type
+ ensure_field_index(result->type, "return_type", 3);
+ fields[3].special = ConstValSpecialStatic;
+ fields[3].type = ira->codegen->builtin_types.entry_type;
+ if (type_entry->data.fn.fn_type_id.return_type == nullptr)
+ fields[3].data.x_type = ira->codegen->builtin_types.entry_undef;
+ else
+ fields[3].data.x_type = type_entry->data.fn.fn_type_id.return_type;
+ // async_allocator_type: type
+ ensure_field_index(result->type, "async_allocator_type", 4);
+ fields[4].special = ConstValSpecialStatic;
+ fields[4].type = ira->codegen->builtin_types.entry_type;
+ if (type_entry->data.fn.fn_type_id.async_allocator_type == nullptr)
+ fields[4].data.x_type = ira->codegen->builtin_types.entry_undef;
+ else
+ fields[4].data.x_type = type_entry->data.fn.fn_type_id.async_allocator_type;
+ // args: []TypeInfo.FnArg
+ TypeTableEntry *type_info_fn_arg_type = ir_type_info_get_type(ira, "FnArg");
+ size_t fn_arg_count = type_entry->data.fn.fn_type_id.param_count -
+ (is_varargs && type_entry->data.fn.fn_type_id.cc != CallingConventionC);
+
+ ConstExprValue *fn_arg_array = create_const_vals(1);
+ fn_arg_array->special = ConstValSpecialStatic;
+ fn_arg_array->type = get_array_type(ira->codegen, type_info_fn_arg_type, fn_arg_count);
+ fn_arg_array->data.x_array.special = ConstArraySpecialNone;
+ fn_arg_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ fn_arg_array->data.x_array.s_none.elements = create_const_vals(fn_arg_count);
+
+ init_const_slice(ira->codegen, &fields[5], fn_arg_array, 0, fn_arg_count, false);
+
+ for (size_t fn_arg_index = 0; fn_arg_index < fn_arg_count; fn_arg_index++)
+ {
+ FnTypeParamInfo *fn_param_info = &type_entry->data.fn.fn_type_id.param_info[fn_arg_index];
+ ConstExprValue *fn_arg_val = &fn_arg_array->data.x_array.s_none.elements[fn_arg_index];
+
+ fn_arg_val->special = ConstValSpecialStatic;
+ fn_arg_val->type = type_info_fn_arg_type;
+
+ bool arg_is_generic = fn_param_info->type == nullptr;
+ if (arg_is_generic) assert(is_generic);
+
+ ConstExprValue *inner_fields = create_const_vals(3);
+ inner_fields[0].special = ConstValSpecialStatic;
+ inner_fields[0].type = ira->codegen->builtin_types.entry_bool;
+ inner_fields[0].data.x_bool = arg_is_generic;
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = ira->codegen->builtin_types.entry_bool;
+ inner_fields[1].data.x_bool = fn_param_info->is_noalias;
+ inner_fields[2].special = ConstValSpecialStatic;
+ inner_fields[2].type = ira->codegen->builtin_types.entry_type;
+
+ if (arg_is_generic)
+ inner_fields[2].data.x_type = ira->codegen->builtin_types.entry_undef;
+ else
+ inner_fields[2].data.x_type = fn_param_info->type;
+
+ fn_arg_val->data.x_struct.fields = inner_fields;
+ fn_arg_val->data.x_struct.parent.id = ConstParentIdArray;
+ fn_arg_val->data.x_struct.parent.data.p_array.array_val = fn_arg_array;
+ fn_arg_val->data.x_struct.parent.data.p_array.elem_index = fn_arg_index;
+ }
+
+ break;
+ }
+ case TypeTableEntryIdBoundFn:
+ {
+ TypeTableEntry *fn_type = type_entry->data.bound_fn.fn_type;
+ assert(fn_type->id == TypeTableEntryIdFn);
+ result = ir_make_type_info_value(ira, fn_type);
+
+ break;
+ }
+ }
+
+ assert(result != nullptr);
+ ira->codegen->type_info_cache.put(type_entry, result);
+ return result;
+}
+
+static TypeTableEntry *ir_analyze_instruction_type_info(IrAnalyze *ira,
+ IrInstructionTypeInfo *instruction)
+{
+ IrInstruction *type_value = instruction->type_value->other;
+ TypeTableEntry *type_entry = ir_resolve_type(ira, type_value);
+ if (type_is_invalid(type_entry))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ TypeTableEntry *result_type = ir_type_info_get_type(ira, nullptr);
+
+ ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
+ out_val->type = result_type;
+ bigint_init_unsigned(&out_val->data.x_union.tag, type_id_index(type_entry));
+
+ ConstExprValue *payload = ir_make_type_info_value(ira, type_entry);
+ out_val->data.x_union.payload = payload;
+
+ if (payload != nullptr)
+ {
+ assert(payload->type->id == TypeTableEntryIdStruct);
+ payload->data.x_struct.parent.id = ConstParentIdUnion;
+ payload->data.x_struct.parent.data.p_union.union_val = out_val;
+ }
+
+ return result_type;
+}
+
static TypeTableEntry *ir_analyze_instruction_type_id(IrAnalyze *ira,
IrInstructionTypeId *instruction)
{
@@ -15696,7 +16671,7 @@ static TypeTableEntry *ir_analyze_instruction_type_id(IrAnalyze *ira,
TypeTableEntry *result_type = var_value->data.x_type;
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
- bigint_init_unsigned(&out_val->data.x_enum_tag, type_id_index(type_entry->id));
+ bigint_init_unsigned(&out_val->data.x_enum_tag, type_id_index(type_entry));
return result_type;
}
@@ -18584,6 +19559,8 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_field_parent_ptr(ira, (IrInstructionFieldParentPtr *)instruction);
case IrInstructionIdOffsetOf:
return ir_analyze_instruction_offset_of(ira, (IrInstructionOffsetOf *)instruction);
+ case IrInstructionIdTypeInfo:
+ return ir_analyze_instruction_type_info(ira, (IrInstructionTypeInfo *) instruction);
case IrInstructionIdTypeId:
return ir_analyze_instruction_type_id(ira, (IrInstructionTypeId *)instruction);
case IrInstructionIdSetEvalBranchQuota:
@@ -18850,6 +19827,7 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdTagName:
case IrInstructionIdFieldParentPtr:
case IrInstructionIdOffsetOf:
+ case IrInstructionIdTypeInfo:
case IrInstructionIdTypeId:
case IrInstructionIdAlignCast:
case IrInstructionIdOpaqueType:
diff --git a/src/ir_print.cpp b/src/ir_print.cpp
index a77ae244d4..9678120f1d 100644
--- a/src/ir_print.cpp
+++ b/src/ir_print.cpp
@@ -966,6 +966,12 @@ static void ir_print_offset_of(IrPrint *irp, IrInstructionOffsetOf *instruction)
fprintf(irp->f, ")");
}
+static void ir_print_type_info(IrPrint *irp, IrInstructionTypeInfo *instruction) {
+ fprintf(irp->f, "@typeInfo(");
+ ir_print_other_instruction(irp, instruction->type_value);
+ fprintf(irp->f, ")");
+}
+
static void ir_print_type_id(IrPrint *irp, IrInstructionTypeId *instruction) {
fprintf(irp->f, "@typeId(");
ir_print_other_instruction(irp, instruction->type_value);
@@ -1536,6 +1542,9 @@ static void ir_print_instruction(IrPrint *irp, IrInstruction *instruction) {
case IrInstructionIdOffsetOf:
ir_print_offset_of(irp, (IrInstructionOffsetOf *)instruction);
break;
+ case IrInstructionIdTypeInfo:
+ ir_print_type_info(irp, (IrInstructionTypeInfo *)instruction);
+ break;
case IrInstructionIdTypeId:
ir_print_type_id(irp, (IrInstructionTypeId *)instruction);
break;
diff --git a/src/translate_c.cpp b/src/translate_c.cpp
index 70a98dcc2e..50ff073008 100644
--- a/src/translate_c.cpp
+++ b/src/translate_c.cpp
@@ -3672,6 +3672,7 @@ static AstNode *resolve_typedef_decl(Context *c, const TypedefNameDecl *typedef_
if (existing_entry) {
return existing_entry->value;
}
+
QualType child_qt = typedef_decl->getUnderlyingType();
Buf *type_name = buf_create_from_str(decl_name(typedef_decl));
@@ -3705,16 +3706,19 @@ static AstNode *resolve_typedef_decl(Context *c, const TypedefNameDecl *typedef_
// use the name of this typedef
// TODO
+ // trans_qual_type here might cause us to look at this typedef again so we put the item in the map first
+ AstNode *symbol_node = trans_create_node_symbol(c, type_name);
+ c->decl_table.put(typedef_decl->getCanonicalDecl(), symbol_node);
+
AstNode *type_node = trans_qual_type(c, child_qt, typedef_decl->getLocation());
if (type_node == nullptr) {
emit_warning(c, typedef_decl->getLocation(), "typedef %s - unresolved child type", buf_ptr(type_name));
c->decl_table.put(typedef_decl, nullptr);
+ // TODO add global var with type_name equal to @compileError("unable to resolve C type")
return nullptr;
}
add_global_var(c, type_name, type_node);
- AstNode *symbol_node = trans_create_node_symbol(c, type_name);
- c->decl_table.put(typedef_decl->getCanonicalDecl(), symbol_node);
return symbol_node;
}
@@ -3749,6 +3753,7 @@ static AstNode *resolve_enum_decl(Context *c, const EnumDecl *enum_decl) {
return demote_enum_to_opaque(c, enum_decl, full_type_name, bare_name);
}
+
bool pure_enum = true;
uint32_t field_count = 0;
for (auto it = enum_def->enumerator_begin(),
@@ -3760,84 +3765,53 @@ static AstNode *resolve_enum_decl(Context *c, const EnumDecl *enum_decl) {
pure_enum = false;
}
}
-
AstNode *tag_int_type = trans_qual_type(c, enum_decl->getIntegerType(), enum_decl->getLocation());
assert(tag_int_type);
- if (pure_enum) {
- AstNode *enum_node = trans_create_node(c, NodeTypeContainerDecl);
- enum_node->data.container_decl.kind = ContainerKindEnum;
- enum_node->data.container_decl.layout = ContainerLayoutExtern;
- // TODO only emit this tag type if the enum tag type is not the default.
- // I don't know what the default is, need to figure out how clang is deciding.
- // it appears to at least be different across gcc/msvc
- if (!c_is_builtin_type(c, enum_decl->getIntegerType(), BuiltinType::UInt) &&
- !c_is_builtin_type(c, enum_decl->getIntegerType(), BuiltinType::Int))
- {
- enum_node->data.container_decl.init_arg_expr = tag_int_type;
- }
-
- enum_node->data.container_decl.fields.resize(field_count);
- uint32_t i = 0;
- for (auto it = enum_def->enumerator_begin(),
- it_end = enum_def->enumerator_end();
- it != it_end; ++it, i += 1)
- {
- const EnumConstantDecl *enum_const = *it;
-
- Buf *enum_val_name = buf_create_from_str(decl_name(enum_const));
- Buf *field_name;
- if (bare_name != nullptr && buf_starts_with_buf(enum_val_name, bare_name)) {
- field_name = buf_slice(enum_val_name, buf_len(bare_name), buf_len(enum_val_name));
- } else {
- field_name = enum_val_name;
- }
-
- AstNode *field_node = trans_create_node(c, NodeTypeStructField);
- field_node->data.struct_field.name = field_name;
- field_node->data.struct_field.type = nullptr;
- enum_node->data.container_decl.fields.items[i] = field_node;
-
- // in C each enum value is in the global namespace. so we put them there too.
- // at this point we can rely on the enum emitting successfully
- if (is_anonymous) {
- AstNode *lit_node = trans_create_node_unsigned(c, i);
- add_global_var(c, enum_val_name, lit_node);
- } else {
- AstNode *field_access_node = trans_create_node_field_access(c,
- trans_create_node_symbol(c, full_type_name), field_name);
- add_global_var(c, enum_val_name, field_access_node);
- }
- }
-
- if (is_anonymous) {
- c->decl_table.put(enum_decl->getCanonicalDecl(), enum_node);
- return enum_node;
- } else {
- AstNode *symbol_node = trans_create_node_symbol(c, full_type_name);
- add_global_weak_alias(c, bare_name, full_type_name);
- add_global_var(c, full_type_name, enum_node);
- c->decl_table.put(enum_decl->getCanonicalDecl(), symbol_node);
- return enum_node;
- }
+ AstNode *enum_node = trans_create_node(c, NodeTypeContainerDecl);
+ enum_node->data.container_decl.kind = ContainerKindEnum;
+ enum_node->data.container_decl.layout = ContainerLayoutExtern;
+ // TODO only emit this tag type if the enum tag type is not the default.
+ // I don't know what the default is, need to figure out how clang is deciding.
+ // it appears to at least be different across gcc/msvc
+ if (!c_is_builtin_type(c, enum_decl->getIntegerType(), BuiltinType::UInt) &&
+ !c_is_builtin_type(c, enum_decl->getIntegerType(), BuiltinType::Int))
+ {
+ enum_node->data.container_decl.init_arg_expr = tag_int_type;
}
-
- // TODO after issue #305 is solved, make this be an enum with tag_int_type
- // as the integer type and set the custom enum values
- AstNode *enum_node = tag_int_type;
-
-
- // add variables for all the values with enum_node
+ enum_node->data.container_decl.fields.resize(field_count);
+ uint32_t i = 0;
for (auto it = enum_def->enumerator_begin(),
it_end = enum_def->enumerator_end();
- it != it_end; ++it)
+ it != it_end; ++it, i += 1)
{
const EnumConstantDecl *enum_const = *it;
Buf *enum_val_name = buf_create_from_str(decl_name(enum_const));
- AstNode *int_node = trans_create_node_apint(c, enum_const->getInitVal());
- AstNode *var_node = add_global_var(c, enum_val_name, int_node);
- var_node->data.variable_declaration.type = tag_int_type;
+ Buf *field_name;
+ if (bare_name != nullptr && buf_starts_with_buf(enum_val_name, bare_name)) {
+ field_name = buf_slice(enum_val_name, buf_len(bare_name), buf_len(enum_val_name));
+ } else {
+ field_name = enum_val_name;
+ }
+
+ AstNode *int_node = pure_enum && !is_anonymous ? nullptr : trans_create_node_apint(c, enum_const->getInitVal());
+ AstNode *field_node = trans_create_node(c, NodeTypeStructField);
+ field_node->data.struct_field.name = field_name;
+ field_node->data.struct_field.type = nullptr;
+ field_node->data.struct_field.value = int_node;
+ enum_node->data.container_decl.fields.items[i] = field_node;
+
+ // in C each enum value is in the global namespace. so we put them there too.
+ // at this point we can rely on the enum emitting successfully
+ if (is_anonymous) {
+ Buf *enum_val_name = buf_create_from_str(decl_name(enum_const));
+ add_global_var(c, enum_val_name, int_node);
+ } else {
+ AstNode *field_access_node = trans_create_node_field_access(c,
+ trans_create_node_symbol(c, full_type_name), field_name);
+ add_global_var(c, enum_val_name, field_access_node);
+ }
}
if (is_anonymous) {
@@ -3848,7 +3822,7 @@ static AstNode *resolve_enum_decl(Context *c, const EnumDecl *enum_decl) {
add_global_weak_alias(c, bare_name, full_type_name);
add_global_var(c, full_type_name, enum_node);
c->decl_table.put(enum_decl->getCanonicalDecl(), symbol_node);
- return symbol_node;
+ return enum_node;
}
}
diff --git a/std/array_list.zig b/std/array_list.zig
index 8c8426e1e5..d1165c626d 100644
--- a/std/array_list.zig
+++ b/std/array_list.zig
@@ -28,11 +28,11 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
};
}
- pub fn deinit(l: &Self) void {
+ pub fn deinit(l: &const Self) void {
l.allocator.free(l.items);
}
- pub fn toSlice(l: &Self) []align(A) T {
+ pub fn toSlice(l: &const Self) []align(A) T {
return l.items[0..l.len];
}
@@ -44,6 +44,10 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
return l.toSliceConst()[n];
}
+ pub fn count(self: &const Self) usize {
+ return self.len;
+ }
+
/// ArrayList takes ownership of the passed in slice. The slice must have been
/// allocated with `allocator`.
/// Deinitialize with `deinit` or use `toOwnedSlice`.
@@ -127,6 +131,27 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
if (self.len == 0) return null;
return self.pop();
}
+
+ pub const Iterator = struct {
+ list: &const Self,
+ // how many items have we returned
+ count: usize,
+
+ pub fn next(it: &Iterator) ?T {
+ if (it.count >= it.list.len) return null;
+ const val = it.list.at(it.count);
+ it.count += 1;
+ return val;
+ }
+
+ pub fn reset(it: &Iterator) void {
+ it.count = 0;
+ }
+ };
+
+ pub fn iterator(self: &const Self) Iterator {
+ return Iterator { .list = self, .count = 0 };
+ }
};
}
@@ -148,6 +173,14 @@ test "basic ArrayList test" {
}
}
+ for (list.toSlice()) |v, i| {
+ assert(v == i32(i + 1));
+ }
+
+ for (list.toSliceConst()) |v, i| {
+ assert(v == i32(i + 1));
+ }
+
assert(list.pop() == 10);
assert(list.len == 9);
@@ -166,6 +199,35 @@ test "basic ArrayList test" {
assert(list.len == 9);
}
+test "iterator ArrayList test" {
+ var list = ArrayList(i32).init(debug.global_allocator);
+ defer list.deinit();
+
+ try list.append(1);
+ try list.append(2);
+ try list.append(3);
+
+ var count : i32 = 0;
+ var it = list.iterator();
+ while (it.next()) |next| {
+ assert(next == count + 1);
+ count += 1;
+ }
+
+ assert(count == 3);
+ assert(it.next() == null);
+ it.reset();
+ count = 0;
+ while (it.next()) |next| {
+ assert(next == count + 1);
+ count += 1;
+ if (count == 2) break;
+ }
+
+ it.reset();
+ assert(?? it.next() == 1);
+}
+
test "insert ArrayList test" {
var list = ArrayList(i32).init(debug.global_allocator);
defer list.deinit();
diff --git a/std/atomic/queue.zig b/std/atomic/queue.zig
index 1acecbab2c..e25c8e6b17 100644
--- a/std/atomic/queue.zig
+++ b/std/atomic/queue.zig
@@ -31,10 +31,10 @@ pub fn Queue(comptime T: type) type {
}
pub fn get(self: &Self) ?&Node {
- var head = @atomicLoad(&Node, &self.head, AtomicOrder.Acquire);
+ var head = @atomicLoad(&Node, &self.head, AtomicOrder.SeqCst);
while (true) {
const node = head.next ?? return null;
- head = @cmpxchgWeak(&Node, &self.head, head, node, AtomicOrder.Release, AtomicOrder.Acquire) ?? return node;
+ head = @cmpxchgWeak(&Node, &self.head, head, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return node;
}
}
};
@@ -49,14 +49,20 @@ const Context = struct {
get_count: usize,
puts_done: u8, // TODO make this a bool
};
-const puts_per_thread = 10000;
+
+// TODO add lazy evaluated build options and then put puts_per_thread behind
+// some option such as: "AggressiveMultithreadedFuzzTest". In the AppVeyor
+// CI we would use a less aggressive setting since at 1 core, while we still
+// want this test to pass, we need a smaller value since there is so much thrashing
+// we would also use a less aggressive setting when running in valgrind
+const puts_per_thread = 500;
const put_thread_count = 3;
test "std.atomic.queue" {
var direct_allocator = std.heap.DirectAllocator.init();
defer direct_allocator.deinit();
- var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 64 * 1024 * 1024);
+ var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 300 * 1024);
defer direct_allocator.allocator.free(plenty_of_memory);
var fixed_buffer_allocator = std.heap.ThreadSafeFixedBufferAllocator.init(plenty_of_memory);
diff --git a/std/atomic/stack.zig b/std/atomic/stack.zig
index accbcc942a..4a3dbef32b 100644
--- a/std/atomic/stack.zig
+++ b/std/atomic/stack.zig
@@ -35,7 +35,7 @@ pub fn Stack(comptime T: type) type {
}
pub fn pop(self: &Self) ?&Node {
- var root = @atomicLoad(?&Node, &self.root, AtomicOrder.Acquire);
+ var root = @atomicLoad(?&Node, &self.root, AtomicOrder.SeqCst);
while (true) {
root = @cmpxchgWeak(?&Node, &self.root, root, (root ?? return null).next, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return root;
}
@@ -56,14 +56,19 @@ const Context = struct {
get_count: usize,
puts_done: u8, // TODO make this a bool
};
-const puts_per_thread = 1000;
+// TODO add lazy evaluated build options and then put puts_per_thread behind
+// some option such as: "AggressiveMultithreadedFuzzTest". In the AppVeyor
+// CI we would use a less aggressive setting since at 1 core, while we still
+// want this test to pass, we need a smaller value since there is so much thrashing
+// we would also use a less aggressive setting when running in valgrind
+const puts_per_thread = 500;
const put_thread_count = 3;
test "std.atomic.stack" {
var direct_allocator = std.heap.DirectAllocator.init();
defer direct_allocator.deinit();
- var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 64 * 1024 * 1024);
+ var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 300 * 1024);
defer direct_allocator.allocator.free(plenty_of_memory);
var fixed_buffer_allocator = std.heap.ThreadSafeFixedBufferAllocator.init(plenty_of_memory);
diff --git a/std/buf_map.zig b/std/buf_map.zig
index 3e12d9a7d9..57c5830bbe 100644
--- a/std/buf_map.zig
+++ b/std/buf_map.zig
@@ -18,10 +18,10 @@ pub const BufMap = struct {
return self;
}
- pub fn deinit(self: &BufMap) void {
+ pub fn deinit(self: &const BufMap) void {
var it = self.hash_map.iterator();
while (true) {
- const entry = it.next() ?? break;
+ const entry = it.next() ?? break;
self.free(entry.key);
self.free(entry.value);
}
@@ -38,7 +38,7 @@ pub const BufMap = struct {
_ = try self.hash_map.put(key_copy, value_copy);
}
- pub fn get(self: &BufMap, key: []const u8) ?[]const u8 {
+ pub fn get(self: &const BufMap, key: []const u8) ?[]const u8 {
const entry = self.hash_map.get(key) ?? return null;
return entry.value;
}
@@ -50,18 +50,18 @@ pub const BufMap = struct {
}
pub fn count(self: &const BufMap) usize {
- return self.hash_map.size;
+ return self.hash_map.count();
}
pub fn iterator(self: &const BufMap) BufMapHashMap.Iterator {
return self.hash_map.iterator();
}
- fn free(self: &BufMap, value: []const u8) void {
+ fn free(self: &const BufMap, value: []const u8) void {
self.hash_map.allocator.free(value);
}
- fn copy(self: &BufMap, value: []const u8) ![]const u8 {
+ fn copy(self: &const BufMap, value: []const u8) ![]const u8 {
return mem.dupe(self.hash_map.allocator, u8, value);
}
};
diff --git a/std/buf_set.zig b/std/buf_set.zig
index 618b985c41..1badb5bf18 100644
--- a/std/buf_set.zig
+++ b/std/buf_set.zig
@@ -1,6 +1,8 @@
+const std = @import("index.zig");
const HashMap = @import("hash_map.zig").HashMap;
const mem = @import("mem.zig");
const Allocator = mem.Allocator;
+const assert = std.debug.assert;
pub const BufSet = struct {
hash_map: BufSetHashMap,
@@ -14,10 +16,10 @@ pub const BufSet = struct {
return self;
}
- pub fn deinit(self: &BufSet) void {
+ pub fn deinit(self: &const BufSet) void {
var it = self.hash_map.iterator();
while (true) {
- const entry = it.next() ?? break;
+ const entry = it.next() ?? break;
self.free(entry.key);
}
@@ -38,7 +40,7 @@ pub const BufSet = struct {
}
pub fn count(self: &const BufSet) usize {
- return self.hash_map.size;
+ return self.hash_map.count();
}
pub fn iterator(self: &const BufSet) BufSetHashMap.Iterator {
@@ -49,14 +51,30 @@ pub const BufSet = struct {
return self.hash_map.allocator;
}
- fn free(self: &BufSet, value: []const u8) void {
+ fn free(self: &const BufSet, value: []const u8) void {
self.hash_map.allocator.free(value);
}
- fn copy(self: &BufSet, value: []const u8) ![]const u8 {
+ fn copy(self: &const BufSet, value: []const u8) ![]const u8 {
const result = try self.hash_map.allocator.alloc(u8, value.len);
mem.copy(u8, result, value);
return result;
}
};
+test "BufSet" {
+ var direct_allocator = std.heap.DirectAllocator.init();
+ defer direct_allocator.deinit();
+
+ var bufset = BufSet.init(&direct_allocator.allocator);
+ defer bufset.deinit();
+
+ try bufset.put("x");
+ assert(bufset.count() == 1);
+ bufset.delete("x");
+ assert(bufset.count() == 0);
+
+ try bufset.put("x");
+ try bufset.put("y");
+ try bufset.put("z");
+}
diff --git a/std/buffer.zig b/std/buffer.zig
index e0892d5933..041d891dec 100644
--- a/std/buffer.zig
+++ b/std/buffer.zig
@@ -66,7 +66,7 @@ pub const Buffer = struct {
self.list.deinit();
}
- pub fn toSlice(self: &Buffer) []u8 {
+ pub fn toSlice(self: &const Buffer) []u8 {
return self.list.toSlice()[0..self.len()];
}
@@ -166,5 +166,5 @@ test "simple Buffer" {
assert(buf.endsWith("orld"));
try buf2.resize(4);
- assert(buf.startsWith(buf2.toSliceConst()));
+ assert(buf.startsWith(buf2.toSlice()));
}
diff --git a/std/hash_map.zig b/std/hash_map.zig
index 29dd233753..2a178d9d44 100644
--- a/std/hash_map.zig
+++ b/std/hash_map.zig
@@ -54,6 +54,14 @@ pub fn HashMap(comptime K: type, comptime V: type,
}
unreachable; // no next item
}
+
+ // Reset the iterator to the initial index
+ pub fn reset(it: &Iterator) void {
+ it.count = 0;
+ it.index = 0;
+ // Resetting the modification count too
+ it.initial_modification_count = it.hm.modification_count;
+ }
};
pub fn init(allocator: &Allocator) Self {
@@ -66,7 +74,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
};
}
- pub fn deinit(hm: &Self) void {
+ pub fn deinit(hm: &const Self) void {
hm.allocator.free(hm.entries);
}
@@ -79,6 +87,10 @@ pub fn HashMap(comptime K: type, comptime V: type,
hm.incrementModificationCount();
}
+ pub fn count(hm: &const Self) usize {
+ return hm.size;
+ }
+
/// Returns the value that was already there.
pub fn put(hm: &Self, key: K, value: &const V) !?V {
if (hm.entries.len == 0) {
@@ -102,14 +114,14 @@ pub fn HashMap(comptime K: type, comptime V: type,
return hm.internalPut(key, value);
}
- pub fn get(hm: &Self, key: K) ?&Entry {
+ pub fn get(hm: &const Self, key: K) ?&Entry {
if (hm.entries.len == 0) {
return null;
}
return hm.internalGet(key);
}
- pub fn contains(hm: &Self, key: K) bool {
+ pub fn contains(hm: &const Self, key: K) bool {
return hm.get(key) != null;
}
@@ -218,7 +230,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
unreachable; // put into a full map
}
- fn internalGet(hm: &Self, key: K) ?&Entry {
+ fn internalGet(hm: &const Self, key: K) ?&Entry {
const start_index = hm.keyToIndex(key);
{var roll_over: usize = 0; while (roll_over <= hm.max_distance_from_start_index) : (roll_over += 1) {
const index = (start_index + roll_over) % hm.entries.len;
@@ -230,7 +242,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
return null;
}
- fn keyToIndex(hm: &Self, key: K) usize {
+ fn keyToIndex(hm: &const Self, key: K) usize {
return usize(hash(key)) % hm.entries.len;
}
};
@@ -252,12 +264,52 @@ test "basic hash map usage" {
assert(??(map.put(5, 66) catch unreachable) == 55);
assert(??(map.put(5, 55) catch unreachable) == 66);
+ assert(map.contains(2));
assert((??map.get(2)).value == 22);
_ = map.remove(2);
assert(map.remove(2) == null);
assert(map.get(2) == null);
}
+test "iterator hash map" {
+ var direct_allocator = std.heap.DirectAllocator.init();
+ defer direct_allocator.deinit();
+
+ var reset_map = HashMap(i32, i32, hash_i32, eql_i32).init(&direct_allocator.allocator);
+ defer reset_map.deinit();
+
+ assert((reset_map.put(1, 11) catch unreachable) == null);
+ assert((reset_map.put(2, 22) catch unreachable) == null);
+ assert((reset_map.put(3, 33) catch unreachable) == null);
+
+ var keys = []i32 { 1, 2, 3 };
+ var values = []i32 { 11, 22, 33 };
+
+ var it = reset_map.iterator();
+ var count : usize = 0;
+ while (it.next()) |next| {
+ assert(next.key == keys[count]);
+ assert(next.value == values[count]);
+ count += 1;
+ }
+
+ assert(count == 3);
+ assert(it.next() == null);
+ it.reset();
+ count = 0;
+ while (it.next()) |next| {
+ assert(next.key == keys[count]);
+ assert(next.value == values[count]);
+ count += 1;
+ if (count == 2) break;
+ }
+
+ it.reset();
+ var entry = ?? it.next();
+ assert(entry.key == keys[0]);
+ assert(entry.value == values[0]);
+}
+
fn hash_i32(x: i32) u32 {
return @bitCast(u32, x);
}
diff --git a/std/index.zig b/std/index.zig
index d6a1e3c94d..8abfa3db88 100644
--- a/std/index.zig
+++ b/std/index.zig
@@ -7,6 +7,7 @@ pub const BufferOutStream = @import("buffer.zig").BufferOutStream;
pub const HashMap = @import("hash_map.zig").HashMap;
pub const LinkedList = @import("linked_list.zig").LinkedList;
pub const IntrusiveLinkedList = @import("linked_list.zig").IntrusiveLinkedList;
+pub const SegmentedList = @import("segmented_list.zig").SegmentedList;
pub const atomic = @import("atomic/index.zig");
pub const base64 = @import("base64.zig");
@@ -23,6 +24,7 @@ pub const fmt = @import("fmt/index.zig");
pub const hash = @import("hash/index.zig");
pub const heap = @import("heap.zig");
pub const io = @import("io.zig");
+pub const json = @import("json.zig");
pub const macho = @import("macho.zig");
pub const math = @import("math/index.zig");
pub const mem = @import("mem.zig");
@@ -42,6 +44,7 @@ test "std" {
_ = @import("buffer.zig");
_ = @import("hash_map.zig");
_ = @import("linked_list.zig");
+ _ = @import("segmented_list.zig");
_ = @import("base64.zig");
_ = @import("build.zig");
@@ -56,6 +59,7 @@ test "std" {
_ = @import("fmt/index.zig");
_ = @import("hash/index.zig");
_ = @import("io.zig");
+ _ = @import("json.zig");
_ = @import("macho.zig");
_ = @import("math/index.zig");
_ = @import("mem.zig");
diff --git a/std/json.zig b/std/json.zig
new file mode 100644
index 0000000000..6f853501ed
--- /dev/null
+++ b/std/json.zig
@@ -0,0 +1,1304 @@
+// JSON parser conforming to RFC8259.
+//
+// https://tools.ietf.org/html/rfc8259
+
+const std = @import("index.zig");
+const mem = std.mem;
+
+const u1 = @IntType(false, 1);
+const u256 = @IntType(false, 256);
+
+// A single token slice into the parent string.
+//
+// Use `token.slice()` on the inptu at the current position to get the current slice.
+pub const Token = struct {
+ id: Id,
+ // How many bytes do we skip before counting
+ offset: u1,
+ // Whether string contains a \uXXXX sequence and cannot be zero-copied
+ string_has_escape: bool,
+ // Whether number is simple and can be represented by an integer (i.e. no `.` or `e`)
+ number_is_integer: bool,
+ // How many bytes from the current position behind the start of this token is.
+ count: usize,
+
+ pub const Id = enum {
+ ObjectBegin,
+ ObjectEnd,
+ ArrayBegin,
+ ArrayEnd,
+ String,
+ Number,
+ True,
+ False,
+ Null,
+ };
+
+ pub fn init(id: Id, count: usize, offset: u1) Token {
+ return Token {
+ .id = id,
+ .offset = offset,
+ .string_has_escape = false,
+ .number_is_integer = true,
+ .count = count,
+ };
+ }
+
+ pub fn initString(count: usize, has_unicode_escape: bool) Token {
+ return Token {
+ .id = Id.String,
+ .offset = 0,
+ .string_has_escape = has_unicode_escape,
+ .number_is_integer = true,
+ .count = count,
+ };
+ }
+
+ pub fn initNumber(count: usize, number_is_integer: bool) Token {
+ return Token {
+ .id = Id.Number,
+ .offset = 0,
+ .string_has_escape = false,
+ .number_is_integer = number_is_integer,
+ .count = count,
+ };
+ }
+
+ // A marker token is a zero-length
+ pub fn initMarker(id: Id) Token {
+ return Token {
+ .id = id,
+ .offset = 0,
+ .string_has_escape = false,
+ .number_is_integer = true,
+ .count = 0,
+ };
+ }
+
+ // Slice into the underlying input string.
+ pub fn slice(self: &const Token, input: []const u8, i: usize) []const u8 {
+ return input[i + self.offset - self.count .. i + self.offset];
+ }
+};
+
+// A small streaming JSON parser. This accepts input one byte at a time and returns tokens as
+// they are encountered. No copies or allocations are performed during parsing and the entire
+// parsing state requires ~40-50 bytes of stack space.
+//
+// Conforms strictly to RFC8529.
+const StreamingJsonParser = struct {
+ // Current state
+ state: State,
+ // How many bytes we have counted for the current token
+ count: usize,
+ // What state to follow after parsing a string (either property or value string)
+ after_string_state: State,
+ // What state to follow after parsing a value (either top-level or value end)
+ after_value_state: State,
+ // If we stopped now, would the complete parsed string to now be a valid json string
+ complete: bool,
+ // Current token flags to pass through to the next generated, see Token.
+ string_has_escape: bool,
+ number_is_integer: bool,
+
+ // Bit-stack for nested object/map literals (max 255 nestings).
+ stack: u256,
+ stack_used: u8,
+
+ const object_bit = 0;
+ const array_bit = 1;
+ const max_stack_size = @maxValue(u8);
+
+ pub fn init() StreamingJsonParser {
+ var p: StreamingJsonParser = undefined;
+ p.reset();
+ return p;
+ }
+
+ pub fn reset(p: &StreamingJsonParser) void {
+ p.state = State.TopLevelBegin;
+ p.count = 0;
+ // Set before ever read in main transition function
+ p.after_string_state = undefined;
+ p.after_value_state = State.ValueEnd; // handle end of values normally
+ p.stack = 0;
+ p.stack_used = 0;
+ p.complete = false;
+ p.string_has_escape = false;
+ p.number_is_integer = true;
+ }
+
+ pub const State = enum {
+ // These must be first with these explicit values as we rely on them for indexing the
+ // bit-stack directly and avoiding a branch.
+ ObjectSeparator = 0,
+ ValueEnd = 1,
+
+ TopLevelBegin,
+ TopLevelEnd,
+
+ ValueBegin,
+ ValueBeginNoClosing,
+
+ String,
+ StringUtf8Byte3,
+ StringUtf8Byte2,
+ StringUtf8Byte1,
+ StringEscapeCharacter,
+ StringEscapeHexUnicode4,
+ StringEscapeHexUnicode3,
+ StringEscapeHexUnicode2,
+ StringEscapeHexUnicode1,
+
+ Number,
+ NumberMaybeDotOrExponent,
+ NumberMaybeDigitOrDotOrExponent,
+ NumberFractionalRequired,
+ NumberFractional,
+ NumberMaybeExponent,
+ NumberExponent,
+ NumberExponentDigitsRequired,
+ NumberExponentDigits,
+
+ TrueLiteral1,
+ TrueLiteral2,
+ TrueLiteral3,
+
+ FalseLiteral1,
+ FalseLiteral2,
+ FalseLiteral3,
+ FalseLiteral4,
+
+ NullLiteral1,
+ NullLiteral2,
+ NullLiteral3,
+
+ // Only call this function to generate array/object final state.
+ pub fn fromInt(x: var) State {
+ std.debug.assert(x == 0 or x == 1);
+ const T = @TagType(State);
+ return State(T(x));
+ }
+ };
+
+ pub const Error = error {
+ InvalidTopLevel,
+ TooManyNestedItems,
+ TooManyClosingItems,
+ InvalidValueBegin,
+ InvalidValueEnd,
+ UnbalancedBrackets,
+ UnbalancedBraces,
+ UnexpectedClosingBracket,
+ UnexpectedClosingBrace,
+ InvalidNumber,
+ InvalidSeparator,
+ InvalidLiteral,
+ InvalidEscapeCharacter,
+ InvalidUnicodeHexSymbol,
+ InvalidUtf8Byte,
+ InvalidTopLevelTrailing,
+ InvalidControlCharacter,
+ };
+
+ // Give another byte to the parser and obtain any new tokens. This may (rarely) return two
+ // tokens. token2 is always null if token1 is null.
+ //
+ // There is currently no error recovery on a bad stream.
+ pub fn feed(p: &StreamingJsonParser, c: u8, token1: &?Token, token2: &?Token) Error!void {
+ *token1 = null;
+ *token2 = null;
+ p.count += 1;
+
+ // unlikely
+ if (try p.transition(c, token1)) {
+ _ = try p.transition(c, token2);
+ }
+ }
+
+ // Perform a single transition on the state machine and return any possible token.
+ fn transition(p: &StreamingJsonParser, c: u8, token: &?Token) Error!bool {
+ switch (p.state) {
+ State.TopLevelBegin => switch (c) {
+ '{' => {
+ p.stack <<= 1;
+ p.stack |= object_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ObjectSeparator;
+
+ *token = Token.initMarker(Token.Id.ObjectBegin);
+ },
+ '[' => {
+ p.stack <<= 1;
+ p.stack |= array_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ValueEnd;
+
+ *token = Token.initMarker(Token.Id.ArrayBegin);
+ },
+ '-' => {
+ p.number_is_integer = true;
+ p.state = State.Number;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ '0' => {
+ p.number_is_integer = true;
+ p.state = State.NumberMaybeDotOrExponent;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ '1' ... '9' => {
+ p.number_is_integer = true;
+ p.state = State.NumberMaybeDigitOrDotOrExponent;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ '"' => {
+ p.state = State.String;
+ p.after_value_state = State.TopLevelEnd;
+ // We don't actually need the following since after_value_state should override.
+ p.after_string_state = State.ValueEnd;
+ p.string_has_escape = false;
+ p.count = 0;
+ },
+ 't' => {
+ p.state = State.TrueLiteral1;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ 'f' => {
+ p.state = State.FalseLiteral1;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ 'n' => {
+ p.state = State.NullLiteral1;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidTopLevel;
+ },
+ },
+
+ State.TopLevelEnd => switch (c) {
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidTopLevelTrailing;
+ },
+ },
+
+ State.ValueBegin => switch (c) {
+ // NOTE: These are shared in ValueEnd as well, think we can reorder states to
+ // be a bit clearer and avoid this duplication.
+ '}' => {
+ // unlikely
+ if (p.stack & 1 != object_bit) {
+ return error.UnexpectedClosingBracket;
+ }
+ if (p.stack_used == 0) {
+ return error.TooManyClosingItems;
+ }
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.fromInt(p.stack & 1);
+
+ p.stack >>= 1;
+ p.stack_used -= 1;
+
+ switch (p.stack_used) {
+ 0 => {
+ p.complete = true;
+ p.state = State.TopLevelEnd;
+ },
+ else => {},
+ }
+
+ *token = Token.initMarker(Token.Id.ObjectEnd);
+ },
+ ']' => {
+ if (p.stack & 1 != array_bit) {
+ return error.UnexpectedClosingBrace;
+ }
+ if (p.stack_used == 0) {
+ return error.TooManyClosingItems;
+ }
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.fromInt(p.stack & 1);
+
+ p.stack >>= 1;
+ p.stack_used -= 1;
+
+ switch (p.stack_used) {
+ 0 => {
+ p.complete = true;
+ p.state = State.TopLevelEnd;
+ },
+ else => {},
+ }
+
+ *token = Token.initMarker(Token.Id.ArrayEnd);
+ },
+ '{' => {
+ if (p.stack_used == max_stack_size) {
+ return error.TooManyNestedItems;
+ }
+
+ p.stack <<= 1;
+ p.stack |= object_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ObjectSeparator;
+
+ *token = Token.initMarker(Token.Id.ObjectBegin);
+ },
+ '[' => {
+ if (p.stack_used == max_stack_size) {
+ return error.TooManyNestedItems;
+ }
+
+ p.stack <<= 1;
+ p.stack |= array_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ValueEnd;
+
+ *token = Token.initMarker(Token.Id.ArrayBegin);
+ },
+ '-' => {
+ p.state = State.Number;
+ p.count = 0;
+ },
+ '0' => {
+ p.state = State.NumberMaybeDotOrExponent;
+ p.count = 0;
+ },
+ '1' ... '9' => {
+ p.state = State.NumberMaybeDigitOrDotOrExponent;
+ p.count = 0;
+ },
+ '"' => {
+ p.state = State.String;
+ p.count = 0;
+ },
+ 't' => {
+ p.state = State.TrueLiteral1;
+ p.count = 0;
+ },
+ 'f' => {
+ p.state = State.FalseLiteral1;
+ p.count = 0;
+ },
+ 'n' => {
+ p.state = State.NullLiteral1;
+ p.count = 0;
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidValueBegin;
+ },
+ },
+
+ // TODO: A bit of duplication here and in the following state, redo.
+ State.ValueBeginNoClosing => switch (c) {
+ '{' => {
+ if (p.stack_used == max_stack_size) {
+ return error.TooManyNestedItems;
+ }
+
+ p.stack <<= 1;
+ p.stack |= object_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ObjectSeparator;
+
+ *token = Token.initMarker(Token.Id.ObjectBegin);
+ },
+ '[' => {
+ if (p.stack_used == max_stack_size) {
+ return error.TooManyNestedItems;
+ }
+
+ p.stack <<= 1;
+ p.stack |= array_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ValueEnd;
+
+ *token = Token.initMarker(Token.Id.ArrayBegin);
+ },
+ '-' => {
+ p.state = State.Number;
+ p.count = 0;
+ },
+ '0' => {
+ p.state = State.NumberMaybeDotOrExponent;
+ p.count = 0;
+ },
+ '1' ... '9' => {
+ p.state = State.NumberMaybeDigitOrDotOrExponent;
+ p.count = 0;
+ },
+ '"' => {
+ p.state = State.String;
+ p.count = 0;
+ },
+ 't' => {
+ p.state = State.TrueLiteral1;
+ p.count = 0;
+ },
+ 'f' => {
+ p.state = State.FalseLiteral1;
+ p.count = 0;
+ },
+ 'n' => {
+ p.state = State.NullLiteral1;
+ p.count = 0;
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidValueBegin;
+ },
+ },
+
+ State.ValueEnd => switch (c) {
+ ',' => {
+ p.after_string_state = State.fromInt(p.stack & 1);
+ p.state = State.ValueBeginNoClosing;
+ },
+ ']' => {
+ if (p.stack_used == 0) {
+ return error.UnbalancedBrackets;
+ }
+
+ p.state = State.ValueEnd;
+ p.after_string_state = State.fromInt(p.stack & 1);
+
+ p.stack >>= 1;
+ p.stack_used -= 1;
+
+ if (p.stack_used == 0) {
+ p.complete = true;
+ p.state = State.TopLevelEnd;
+ }
+
+ *token = Token.initMarker(Token.Id.ArrayEnd);
+ },
+ '}' => {
+ if (p.stack_used == 0) {
+ return error.UnbalancedBraces;
+ }
+
+ p.state = State.ValueEnd;
+ p.after_string_state = State.fromInt(p.stack & 1);
+
+ p.stack >>= 1;
+ p.stack_used -= 1;
+
+ if (p.stack_used == 0) {
+ p.complete = true;
+ p.state = State.TopLevelEnd;
+ }
+
+ *token = Token.initMarker(Token.Id.ObjectEnd);
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidValueEnd;
+ },
+ },
+
+ State.ObjectSeparator => switch (c) {
+ ':' => {
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ValueEnd;
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidSeparator;
+ },
+ },
+
+ State.String => switch (c) {
+ 0x00 ... 0x1F => {
+ return error.InvalidControlCharacter;
+ },
+ '"' => {
+ p.state = p.after_string_state;
+ if (p.after_value_state == State.TopLevelEnd) {
+ p.state = State.TopLevelEnd;
+ p.complete = true;
+ }
+
+ *token = Token.initString(p.count - 1, p.string_has_escape);
+ },
+ '\\' => {
+ p.state = State.StringEscapeCharacter;
+ },
+ 0x20, 0x21, 0x23 ... 0x5B, 0x5D ... 0x7F => {
+ // non-control ascii
+ },
+ 0xC0 ... 0xDF => {
+ p.state = State.StringUtf8Byte1;
+ },
+ 0xE0 ... 0xEF => {
+ p.state = State.StringUtf8Byte2;
+ },
+ 0xF0 ... 0xFF => {
+ p.state = State.StringUtf8Byte3;
+ },
+ else => {
+ return error.InvalidUtf8Byte;
+ },
+ },
+
+ State.StringUtf8Byte3 => switch (c >> 6) {
+ 0b10 => p.state = State.StringUtf8Byte2,
+ else => return error.InvalidUtf8Byte,
+ },
+
+ State.StringUtf8Byte2 => switch (c >> 6) {
+ 0b10 => p.state = State.StringUtf8Byte1,
+ else => return error.InvalidUtf8Byte,
+ },
+
+ State.StringUtf8Byte1 => switch (c >> 6) {
+ 0b10 => p.state = State.String,
+ else => return error.InvalidUtf8Byte,
+ },
+
+ State.StringEscapeCharacter => switch (c) {
+ // NOTE: '/' is allowed as an escaped character but it also is allowed
+ // as unescaped according to the RFC. There is a reported errata which suggests
+ // removing the non-escaped variant but it makes more sense to simply disallow
+ // it as an escape code here.
+ //
+ // The current JSONTestSuite tests rely on both of this behaviour being present
+ // however, so we default to the status quo where both are accepted until this
+ // is further clarified.
+ '"', '\\', '/', 'b', 'f', 'n', 'r', 't' => {
+ p.string_has_escape = true;
+ p.state = State.String;
+ },
+ 'u' => {
+ p.string_has_escape = true;
+ p.state = State.StringEscapeHexUnicode4;
+ },
+ else => {
+ return error.InvalidEscapeCharacter;
+ },
+ },
+
+ State.StringEscapeHexUnicode4 => switch (c) {
+ '0' ... '9', 'A' ... 'F', 'a' ... 'f' => {
+ p.state = State.StringEscapeHexUnicode3;
+ },
+ else => return error.InvalidUnicodeHexSymbol,
+ },
+
+ State.StringEscapeHexUnicode3 => switch (c) {
+ '0' ... '9', 'A' ... 'F', 'a' ... 'f' => {
+ p.state = State.StringEscapeHexUnicode2;
+ },
+ else => return error.InvalidUnicodeHexSymbol,
+ },
+
+ State.StringEscapeHexUnicode2 => switch (c) {
+ '0' ... '9', 'A' ... 'F', 'a' ... 'f' => {
+ p.state = State.StringEscapeHexUnicode1;
+ },
+ else => return error.InvalidUnicodeHexSymbol,
+ },
+
+ State.StringEscapeHexUnicode1 => switch (c) {
+ '0' ... '9', 'A' ... 'F', 'a' ... 'f' => {
+ p.state = State.String;
+ },
+ else => return error.InvalidUnicodeHexSymbol,
+ },
+
+ State.Number => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '0' => {
+ p.state = State.NumberMaybeDotOrExponent;
+ },
+ '1' ... '9' => {
+ p.state = State.NumberMaybeDigitOrDotOrExponent;
+ },
+ else => {
+ return error.InvalidNumber;
+ },
+ }
+ },
+
+ State.NumberMaybeDotOrExponent => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '.' => {
+ p.number_is_integer = false;
+ p.state = State.NumberFractionalRequired;
+ },
+ 'e', 'E' => {
+ p.number_is_integer = false;
+ p.state = State.NumberExponent;
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.NumberMaybeDigitOrDotOrExponent => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '.' => {
+ p.number_is_integer = false;
+ p.state = State.NumberFractionalRequired;
+ },
+ 'e', 'E' => {
+ p.number_is_integer = false;
+ p.state = State.NumberExponent;
+ },
+ '0' ... '9' => {
+ // another digit
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.NumberFractionalRequired => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '0' ... '9' => {
+ p.state = State.NumberFractional;
+ },
+ else => {
+ return error.InvalidNumber;
+ },
+ }
+ },
+
+ State.NumberFractional => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '0' ... '9' => {
+ // another digit
+ },
+ 'e', 'E' => {
+ p.number_is_integer = false;
+ p.state = State.NumberExponent;
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.NumberMaybeExponent => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ 'e', 'E' => {
+ p.number_is_integer = false;
+ p.state = State.NumberExponent;
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.NumberExponent => switch (c) {
+ '-', '+', => {
+ p.complete = false;
+ p.state = State.NumberExponentDigitsRequired;
+ },
+ '0' ... '9' => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ p.state = State.NumberExponentDigits;
+ },
+ else => {
+ return error.InvalidNumber;
+ },
+ },
+
+ State.NumberExponentDigitsRequired => switch (c) {
+ '0' ... '9' => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ p.state = State.NumberExponentDigits;
+ },
+ else => {
+ return error.InvalidNumber;
+ },
+ },
+
+ State.NumberExponentDigits => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '0' ... '9' => {
+ // another digit
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.TrueLiteral1 => switch (c) {
+ 'r' => p.state = State.TrueLiteral2,
+ else => return error.InvalidLiteral,
+ },
+
+ State.TrueLiteral2 => switch (c) {
+ 'u' => p.state = State.TrueLiteral3,
+ else => return error.InvalidLiteral,
+ },
+
+ State.TrueLiteral3 => switch (c) {
+ 'e' => {
+ p.state = p.after_value_state;
+ p.complete = p.state == State.TopLevelEnd;
+ *token = Token.init(Token.Id.True, p.count + 1, 1);
+ },
+ else => {
+ return error.InvalidLiteral;
+ },
+ },
+
+ State.FalseLiteral1 => switch (c) {
+ 'a' => p.state = State.FalseLiteral2,
+ else => return error.InvalidLiteral,
+ },
+
+ State.FalseLiteral2 => switch (c) {
+ 'l' => p.state = State.FalseLiteral3,
+ else => return error.InvalidLiteral,
+ },
+
+ State.FalseLiteral3 => switch (c) {
+ 's' => p.state = State.FalseLiteral4,
+ else => return error.InvalidLiteral,
+ },
+
+ State.FalseLiteral4 => switch (c) {
+ 'e' => {
+ p.state = p.after_value_state;
+ p.complete = p.state == State.TopLevelEnd;
+ *token = Token.init(Token.Id.False, p.count + 1, 1);
+ },
+ else => {
+ return error.InvalidLiteral;
+ },
+ },
+
+ State.NullLiteral1 => switch (c) {
+ 'u' => p.state = State.NullLiteral2,
+ else => return error.InvalidLiteral,
+ },
+
+ State.NullLiteral2 => switch (c) {
+ 'l' => p.state = State.NullLiteral3,
+ else => return error.InvalidLiteral,
+ },
+
+ State.NullLiteral3 => switch (c) {
+ 'l' => {
+ p.state = p.after_value_state;
+ p.complete = p.state == State.TopLevelEnd;
+ *token = Token.init(Token.Id.Null, p.count + 1, 1);
+ },
+ else => {
+ return error.InvalidLiteral;
+ },
+ },
+ }
+
+ return false;
+ }
+};
+
+// Validate a JSON string. This does not limit number precision so a decoder may not necessarily
+// be able to decode the string even if this returns true.
+pub fn validate(s: []const u8) bool {
+ var p = StreamingJsonParser.init();
+
+ for (s) |c, i| {
+ var token1: ?Token = undefined;
+ var token2: ?Token = undefined;
+
+ p.feed(c, &token1, &token2) catch |err| {
+ return false;
+ };
+ }
+
+ return p.complete;
+}
+
+const Allocator = std.mem.Allocator;
+const ArenaAllocator = std.heap.ArenaAllocator;
+const ArrayList = std.ArrayList;
+const HashMap = std.HashMap;
+
+pub const ValueTree = struct {
+ arena: ArenaAllocator,
+ root: Value,
+
+ pub fn deinit(self: &ValueTree) void {
+ self.arena.deinit();
+ }
+};
+
+pub const ObjectMap = HashMap([]const u8, Value, mem.hash_slice_u8, mem.eql_slice_u8);
+
+pub const Value = union(enum) {
+ Null,
+ Bool: bool,
+ Integer: i64,
+ Float: f64,
+ String: []const u8,
+ Array: ArrayList(Value),
+ Object: ObjectMap,
+
+ pub fn dump(self: &const Value) void {
+ switch (*self) {
+ Value.Null => {
+ std.debug.warn("null");
+ },
+ Value.Bool => |inner| {
+ std.debug.warn("{}", inner);
+ },
+ Value.Integer => |inner| {
+ std.debug.warn("{}", inner);
+ },
+ Value.Float => |inner| {
+ std.debug.warn("{.5}", inner);
+ },
+ Value.String => |inner| {
+ std.debug.warn("\"{}\"", inner);
+ },
+ Value.Array => |inner| {
+ var not_first = false;
+ std.debug.warn("[");
+ for (inner.toSliceConst()) |value| {
+ if (not_first) {
+ std.debug.warn(",");
+ }
+ not_first = true;
+ value.dump();
+ }
+ std.debug.warn("]");
+ },
+ Value.Object => |inner| {
+ var not_first = false;
+ std.debug.warn("{{");
+ var it = inner.iterator();
+
+ while (it.next()) |entry| {
+ if (not_first) {
+ std.debug.warn(",");
+ }
+ not_first = true;
+ std.debug.warn("\"{}\":", entry.key);
+ entry.value.dump();
+ }
+ std.debug.warn("}}");
+ },
+ }
+ }
+
+ pub fn dumpIndent(self: &const Value, indent: usize) void {
+ if (indent == 0) {
+ self.dump();
+ } else {
+ self.dumpIndentLevel(indent, 0);
+ }
+ }
+
+ fn dumpIndentLevel(self: &const Value, indent: usize, level: usize) void {
+ switch (*self) {
+ Value.Null => {
+ std.debug.warn("null");
+ },
+ Value.Bool => |inner| {
+ std.debug.warn("{}", inner);
+ },
+ Value.Integer => |inner| {
+ std.debug.warn("{}", inner);
+ },
+ Value.Float => |inner| {
+ std.debug.warn("{.5}", inner);
+ },
+ Value.String => |inner| {
+ std.debug.warn("\"{}\"", inner);
+ },
+ Value.Array => |inner| {
+ var not_first = false;
+ std.debug.warn("[\n");
+
+ for (inner.toSliceConst()) |value| {
+ if (not_first) {
+ std.debug.warn(",\n");
+ }
+ not_first = true;
+ padSpace(level + indent);
+ value.dumpIndentLevel(indent, level + indent);
+ }
+ std.debug.warn("\n");
+ padSpace(level);
+ std.debug.warn("]");
+ },
+ Value.Object => |inner| {
+ var not_first = false;
+ std.debug.warn("{{\n");
+ var it = inner.iterator();
+
+ while (it.next()) |entry| {
+ if (not_first) {
+ std.debug.warn(",\n");
+ }
+ not_first = true;
+ padSpace(level + indent);
+ std.debug.warn("\"{}\": ", entry.key);
+ entry.value.dumpIndentLevel(indent, level + indent);
+ }
+ std.debug.warn("\n");
+ padSpace(level);
+ std.debug.warn("}}");
+ },
+ }
+ }
+
+ fn padSpace(indent: usize) void {
+ var i: usize = 0;
+ while (i < indent) : (i += 1) {
+ std.debug.warn(" ");
+ }
+ }
+};
+
+// A non-stream JSON parser which constructs a tree of Value's.
+const JsonParser = struct {
+ allocator: &Allocator,
+ state: State,
+ copy_strings: bool,
+ // Stores parent nodes and un-combined Values.
+ stack: ArrayList(Value),
+
+ const State = enum {
+ ObjectKey,
+ ObjectValue,
+ ArrayValue,
+ Simple,
+ };
+
+ pub fn init(allocator: &Allocator, copy_strings: bool) JsonParser {
+ return JsonParser {
+ .allocator = allocator,
+ .state = State.Simple,
+ .copy_strings = copy_strings,
+ .stack = ArrayList(Value).init(allocator),
+ };
+ }
+
+ pub fn deinit(p: &JsonParser) void {
+ p.stack.deinit();
+ }
+
+ pub fn reset(p: &JsonParser) void {
+ p.state = State.Simple;
+ p.stack.shrink(0);
+ }
+
+ pub fn parse(p: &JsonParser, input: []const u8) !ValueTree {
+ var mp = StreamingJsonParser.init();
+
+ var arena = ArenaAllocator.init(p.allocator);
+ errdefer arena.deinit();
+
+ for (input) |c, i| {
+ var mt1: ?Token = undefined;
+ var mt2: ?Token = undefined;
+
+ try mp.feed(c, &mt1, &mt2);
+ if (mt1) |t1| {
+ try p.transition(&arena.allocator, input, i, t1);
+
+ if (mt2) |t2| {
+ try p.transition(&arena.allocator, input, i, t2);
+ }
+ }
+ }
+
+ // Handle top-level lonely number values.
+ {
+ const i = input.len;
+ var mt1: ?Token = undefined;
+ var mt2: ?Token = undefined;
+
+ try mp.feed(' ', &mt1, &mt2);
+ if (mt1) |t1| {
+ try p.transition(&arena.allocator, input, i, t1);
+ }
+ }
+
+ if (!mp.complete) {
+ return error.IncompleteJsonInput;
+ }
+
+ std.debug.assert(p.stack.len == 1);
+
+ return ValueTree {
+ .arena = arena,
+ .root = p.stack.at(0),
+ };
+ }
+
+ // Even though p.allocator exists, we take an explicit allocator so that allocation state
+ // can be cleaned up on error correctly during a `parse` on call.
+ fn transition(p: &JsonParser, allocator: &Allocator, input: []const u8, i: usize, token: &const Token) !void {
+ switch (p.state) {
+ State.ObjectKey => switch (token.id) {
+ Token.Id.ObjectEnd => {
+ if (p.stack.len == 1) {
+ return;
+ }
+
+ var value = p.stack.pop();
+ try p.pushToParent(value);
+ },
+ Token.Id.String => {
+ try p.stack.append(try p.parseString(allocator, token, input, i));
+ p.state = State.ObjectValue;
+ },
+ else => {
+ unreachable;
+ },
+ },
+ State.ObjectValue => {
+ var object = &p.stack.items[p.stack.len - 2].Object;
+ var key = p.stack.items[p.stack.len - 1].String;
+
+ switch (token.id) {
+ Token.Id.ObjectBegin => {
+ try p.stack.append(Value { .Object = ObjectMap.init(allocator) });
+ p.state = State.ObjectKey;
+ },
+ Token.Id.ArrayBegin => {
+ try p.stack.append(Value { .Array = ArrayList(Value).init(allocator) });
+ p.state = State.ArrayValue;
+ },
+ Token.Id.String => {
+ _ = try object.put(key, try p.parseString(allocator, token, input, i));
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ Token.Id.Number => {
+ _ = try object.put(key, try p.parseNumber(token, input, i));
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ Token.Id.True => {
+ _ = try object.put(key, Value { .Bool = true });
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ Token.Id.False => {
+ _ = try object.put(key, Value { .Bool = false });
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ Token.Id.Null => {
+ _ = try object.put(key, Value.Null);
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ else => {
+ unreachable;
+ },
+ }
+ },
+ State.ArrayValue => {
+ var array = &p.stack.items[p.stack.len - 1].Array;
+
+ switch (token.id) {
+ Token.Id.ArrayEnd => {
+ if (p.stack.len == 1) {
+ return;
+ }
+
+ var value = p.stack.pop();
+ try p.pushToParent(value);
+ },
+ Token.Id.ObjectBegin => {
+ try p.stack.append(Value { .Object = ObjectMap.init(allocator) });
+ p.state = State.ObjectKey;
+ },
+ Token.Id.ArrayBegin => {
+ try p.stack.append(Value { .Array = ArrayList(Value).init(allocator) });
+ p.state = State.ArrayValue;
+ },
+ Token.Id.String => {
+ try array.append(try p.parseString(allocator, token, input, i));
+ },
+ Token.Id.Number => {
+ try array.append(try p.parseNumber(token, input, i));
+ },
+ Token.Id.True => {
+ try array.append(Value { .Bool = true });
+ },
+ Token.Id.False => {
+ try array.append(Value { .Bool = false });
+ },
+ Token.Id.Null => {
+ try array.append(Value.Null);
+ },
+ else => {
+ unreachable;
+ },
+ }
+ },
+ State.Simple => switch (token.id) {
+ Token.Id.ObjectBegin => {
+ try p.stack.append(Value { .Object = ObjectMap.init(allocator) });
+ p.state = State.ObjectKey;
+ },
+ Token.Id.ArrayBegin => {
+ try p.stack.append(Value { .Array = ArrayList(Value).init(allocator) });
+ p.state = State.ArrayValue;
+ },
+ Token.Id.String => {
+ try p.stack.append(try p.parseString(allocator, token, input, i));
+ },
+ Token.Id.Number => {
+ try p.stack.append(try p.parseNumber(token, input, i));
+ },
+ Token.Id.True => {
+ try p.stack.append(Value { .Bool = true });
+ },
+ Token.Id.False => {
+ try p.stack.append(Value { .Bool = false });
+ },
+ Token.Id.Null => {
+ try p.stack.append(Value.Null);
+ },
+ Token.Id.ObjectEnd, Token.Id.ArrayEnd => {
+ unreachable;
+ },
+ },
+ }
+ }
+
+ fn pushToParent(p: &JsonParser, value: &const Value) !void {
+ switch (p.stack.at(p.stack.len - 1)) {
+ // Object Parent -> [ ..., object, , value ]
+ Value.String => |key| {
+ _ = p.stack.pop();
+
+ var object = &p.stack.items[p.stack.len - 1].Object;
+ _ = try object.put(key, value);
+ p.state = State.ObjectKey;
+ },
+ // Array Parent -> [ ..., , value ]
+ Value.Array => |*array| {
+ try array.append(value);
+ p.state = State.ArrayValue;
+ },
+ else => {
+ unreachable;
+ },
+ }
+ }
+
+ fn parseString(p: &JsonParser, allocator: &Allocator, token: &const Token, input: []const u8, i: usize) !Value {
+ // TODO: We don't strictly have to copy values which do not contain any escape
+ // characters if flagged with the option.
+ const slice = token.slice(input, i);
+ return Value { .String = try mem.dupe(p.allocator, u8, slice) };
+ }
+
+ fn parseNumber(p: &JsonParser, token: &const Token, input: []const u8, i: usize) !Value {
+ return if (token.number_is_integer)
+ Value { .Integer = try std.fmt.parseInt(i64, token.slice(input, i), 10) }
+ else
+ @panic("TODO: fmt.parseFloat not yet implemented")
+ ;
+ }
+};
+
+const debug = std.debug;
+
+test "json parser dynamic" {
+ var p = JsonParser.init(std.debug.global_allocator, false);
+ defer p.deinit();
+
+ const s =
+ \\{
+ \\ "Image": {
+ \\ "Width": 800,
+ \\ "Height": 600,
+ \\ "Title": "View from 15th Floor",
+ \\ "Thumbnail": {
+ \\ "Url": "http://www.example.com/image/481989943",
+ \\ "Height": 125,
+ \\ "Width": 100
+ \\ },
+ \\ "Animated" : false,
+ \\ "IDs": [116, 943, 234, 38793]
+ \\ }
+ \\}
+ ;
+
+ var tree = try p.parse(s);
+ defer tree.deinit();
+
+ var root = tree.root;
+
+ var image = (??root.Object.get("Image")).value;
+
+ const width = (??image.Object.get("Width")).value;
+ debug.assert(width.Integer == 800);
+
+ const height = (??image.Object.get("Height")).value;
+ debug.assert(height.Integer == 600);
+
+ const title = (??image.Object.get("Title")).value;
+ debug.assert(mem.eql(u8, title.String, "View from 15th Floor"));
+
+ const animated = (??image.Object.get("Animated")).value;
+ debug.assert(animated.Bool == false);
+}
diff --git a/std/json_test.zig b/std/json_test.zig
new file mode 100644
index 0000000000..90a2ddbd50
--- /dev/null
+++ b/std/json_test.zig
@@ -0,0 +1,1942 @@
+// RFC 8529 conformance tests.
+//
+// Tests are taken from https://github.com/nst/JSONTestSuite
+// Read also http://seriot.ch/parsing_json.php for a good overview.
+
+const std = @import("index.zig");
+
+fn ok(comptime s: []const u8) void {
+ std.debug.assert(std.json.validate(s));
+}
+
+fn err(comptime s: []const u8) void {
+ std.debug.assert(!std.json.validate(s));
+}
+
+fn any(comptime s: []const u8) void {
+ std.debug.assert(true);
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+test "y_array_arraysWithSpaces" {
+ ok(
+ \\[[] ]
+ );
+}
+
+test "y_array_empty" {
+ ok(
+ \\[]
+ );
+}
+
+test "y_array_empty-string" {
+ ok(
+ \\[""]
+ );
+}
+
+test "y_array_ending_with_newline" {
+ ok(
+ \\["a"]
+ );
+}
+
+test "y_array_false" {
+ ok(
+ \\[false]
+ );
+}
+
+test "y_array_heterogeneous" {
+ ok(
+ \\[null, 1, "1", {}]
+ );
+}
+
+test "y_array_null" {
+ ok(
+ \\[null]
+ );
+}
+
+test "y_array_with_1_and_newline" {
+ ok(
+ \\[1
+ \\]
+ );
+}
+
+test "y_array_with_leading_space" {
+ ok(
+ \\ [1]
+ );
+}
+
+test "y_array_with_several_null" {
+ ok(
+ \\[1,null,null,null,2]
+ );
+}
+
+test "y_array_with_trailing_space" {
+ ok(
+ "[2] "
+ );
+}
+
+test "y_number_0e+1" {
+ ok(
+ \\[0e+1]
+ );
+}
+
+test "y_number_0e1" {
+ ok(
+ \\[0e1]
+ );
+}
+
+test "y_number_after_space" {
+ ok(
+ \\[ 4]
+ );
+}
+
+test "y_number_double_close_to_zero" {
+ ok(
+ \\[-0.000000000000000000000000000000000000000000000000000000000000000000000000000001]
+ );
+}
+
+test "y_number_int_with_exp" {
+ ok(
+ \\[20e1]
+ );
+}
+
+test "y_number" {
+ ok(
+ \\[123e65]
+ );
+}
+
+test "y_number_minus_zero" {
+ ok(
+ \\[-0]
+ );
+}
+
+test "y_number_negative_int" {
+ ok(
+ \\[-123]
+ );
+}
+
+test "y_number_negative_one" {
+ ok(
+ \\[-1]
+ );
+}
+
+test "y_number_negative_zero" {
+ ok(
+ \\[-0]
+ );
+}
+
+test "y_number_real_capital_e" {
+ ok(
+ \\[1E22]
+ );
+}
+
+test "y_number_real_capital_e_neg_exp" {
+ ok(
+ \\[1E-2]
+ );
+}
+
+test "y_number_real_capital_e_pos_exp" {
+ ok(
+ \\[1E+2]
+ );
+}
+
+test "y_number_real_exponent" {
+ ok(
+ \\[123e45]
+ );
+}
+
+test "y_number_real_fraction_exponent" {
+ ok(
+ \\[123.456e78]
+ );
+}
+
+test "y_number_real_neg_exp" {
+ ok(
+ \\[1e-2]
+ );
+}
+
+test "y_number_real_pos_exponent" {
+ ok(
+ \\[1e+2]
+ );
+}
+
+test "y_number_simple_int" {
+ ok(
+ \\[123]
+ );
+}
+
+test "y_number_simple_real" {
+ ok(
+ \\[123.456789]
+ );
+}
+
+test "y_object_basic" {
+ ok(
+ \\{"asd":"sdf"}
+ );
+}
+
+test "y_object_duplicated_key_and_value" {
+ ok(
+ \\{"a":"b","a":"b"}
+ );
+}
+
+test "y_object_duplicated_key" {
+ ok(
+ \\{"a":"b","a":"c"}
+ );
+}
+
+test "y_object_empty" {
+ ok(
+ \\{}
+ );
+}
+
+test "y_object_empty_key" {
+ ok(
+ \\{"":0}
+ );
+}
+
+test "y_object_escaped_null_in_key" {
+ ok(
+ \\{"foo\u0000bar": 42}
+ );
+}
+
+test "y_object_extreme_numbers" {
+ ok(
+ \\{ "min": -1.0e+28, "max": 1.0e+28 }
+ );
+}
+
+test "y_object" {
+ ok(
+ \\{"asd":"sdf", "dfg":"fgh"}
+ );
+}
+
+test "y_object_long_strings" {
+ ok(
+ \\{"x":[{"id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}], "id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}
+ );
+}
+
+test "y_object_simple" {
+ ok(
+ \\{"a":[]}
+ );
+}
+
+test "y_object_string_unicode" {
+ ok(
+ \\{"title":"\u041f\u043e\u043b\u0442\u043e\u0440\u0430 \u0417\u0435\u043c\u043b\u0435\u043a\u043e\u043f\u0430" }
+ );
+}
+
+test "y_object_with_newlines" {
+ ok(
+ \\{
+ \\"a": "b"
+ \\}
+ );
+}
+
+test "y_string_1_2_3_bytes_UTF-8_sequences" {
+ ok(
+ \\["\u0060\u012a\u12AB"]
+ );
+}
+
+test "y_string_accepted_surrogate_pair" {
+ ok(
+ \\["\uD801\udc37"]
+ );
+}
+
+test "y_string_accepted_surrogate_pairs" {
+ ok(
+ \\["\ud83d\ude39\ud83d\udc8d"]
+ );
+}
+
+test "y_string_allowed_escapes" {
+ ok(
+ \\["\"\\\/\b\f\n\r\t"]
+ );
+}
+
+test "y_string_backslash_and_u_escaped_zero" {
+ ok(
+ \\["\\u0000"]
+ );
+}
+
+test "y_string_backslash_doublequotes" {
+ ok(
+ \\["\""]
+ );
+}
+
+test "y_string_comments" {
+ ok(
+ \\["a/*b*/c/*d//e"]
+ );
+}
+
+test "y_string_double_escape_a" {
+ ok(
+ \\["\\a"]
+ );
+}
+
+test "y_string_double_escape_n" {
+ ok(
+ \\["\\n"]
+ );
+}
+
+test "y_string_escaped_control_character" {
+ ok(
+ \\["\u0012"]
+ );
+}
+
+test "y_string_escaped_noncharacter" {
+ ok(
+ \\["\uFFFF"]
+ );
+}
+
+test "y_string_in_array" {
+ ok(
+ \\["asd"]
+ );
+}
+
+test "y_string_in_array_with_leading_space" {
+ ok(
+ \\[ "asd"]
+ );
+}
+
+test "y_string_last_surrogates_1_and_2" {
+ ok(
+ \\["\uDBFF\uDFFF"]
+ );
+}
+
+test "y_string_nbsp_uescaped" {
+ ok(
+ \\["new\u00A0line"]
+ );
+}
+
+test "y_string_nonCharacterInUTF-8_U+10FFFF" {
+ ok(
+ \\[""]
+ );
+}
+
+test "y_string_nonCharacterInUTF-8_U+FFFF" {
+ ok(
+ \\[""]
+ );
+}
+
+test "y_string_null_escape" {
+ ok(
+ \\["\u0000"]
+ );
+}
+
+test "y_string_one-byte-utf-8" {
+ ok(
+ \\["\u002c"]
+ );
+}
+
+test "y_string_pi" {
+ ok(
+ \\["π"]
+ );
+}
+
+test "y_string_reservedCharacterInUTF-8_U+1BFFF" {
+ ok(
+ \\[""]
+ );
+}
+
+test "y_string_simple_ascii" {
+ ok(
+ \\["asd "]
+ );
+}
+
+test "y_string_space" {
+ ok(
+ \\" "
+ );
+}
+
+test "y_string_surrogates_U+1D11E_MUSICAL_SYMBOL_G_CLEF" {
+ ok(
+ \\["\uD834\uDd1e"]
+ );
+}
+
+test "y_string_three-byte-utf-8" {
+ ok(
+ \\["\u0821"]
+ );
+}
+
+test "y_string_two-byte-utf-8" {
+ ok(
+ \\["\u0123"]
+ );
+}
+
+test "y_string_u+2028_line_sep" {
+ ok(
+ \\["
"]
+ );
+}
+
+test "y_string_u+2029_par_sep" {
+ ok(
+ \\["
"]
+ );
+}
+
+test "y_string_uescaped_newline" {
+ ok(
+ \\["new\u000Aline"]
+ );
+}
+
+test "y_string_uEscape" {
+ ok(
+ \\["\u0061\u30af\u30EA\u30b9"]
+ );
+}
+
+test "y_string_unescaped_char_delete" {
+ ok(
+ \\[""]
+ );
+}
+
+test "y_string_unicode_2" {
+ ok(
+ \\["⍂㈴⍂"]
+ );
+}
+
+test "y_string_unicodeEscapedBackslash" {
+ ok(
+ \\["\u005C"]
+ );
+}
+
+test "y_string_unicode_escaped_double_quote" {
+ ok(
+ \\["\u0022"]
+ );
+}
+
+test "y_string_unicode" {
+ ok(
+ \\["\uA66D"]
+ );
+}
+
+test "y_string_unicode_U+10FFFE_nonchar" {
+ ok(
+ \\["\uDBFF\uDFFE"]
+ );
+}
+
+test "y_string_unicode_U+1FFFE_nonchar" {
+ ok(
+ \\["\uD83F\uDFFE"]
+ );
+}
+
+test "y_string_unicode_U+200B_ZERO_WIDTH_SPACE" {
+ ok(
+ \\["\u200B"]
+ );
+}
+
+test "y_string_unicode_U+2064_invisible_plus" {
+ ok(
+ \\["\u2064"]
+ );
+}
+
+test "y_string_unicode_U+FDD0_nonchar" {
+ ok(
+ \\["\uFDD0"]
+ );
+}
+
+test "y_string_unicode_U+FFFE_nonchar" {
+ ok(
+ \\["\uFFFE"]
+ );
+}
+
+test "y_string_utf8" {
+ ok(
+ \\["€𝄞"]
+ );
+}
+
+test "y_string_with_del_character" {
+ ok(
+ \\["aa"]
+ );
+}
+
+test "y_structure_lonely_false" {
+ ok(
+ \\false
+ );
+}
+
+test "y_structure_lonely_int" {
+ ok(
+ \\42
+ );
+}
+
+test "y_structure_lonely_negative_real" {
+ ok(
+ \\-0.1
+ );
+}
+
+test "y_structure_lonely_null" {
+ ok(
+ \\null
+ );
+}
+
+test "y_structure_lonely_string" {
+ ok(
+ \\"asd"
+ );
+}
+
+test "y_structure_lonely_true" {
+ ok(
+ \\true
+ );
+}
+
+test "y_structure_string_empty" {
+ ok(
+ \\""
+ );
+}
+
+test "y_structure_trailing_newline" {
+ ok(
+ \\["a"]
+ );
+}
+
+test "y_structure_true_in_array" {
+ ok(
+ \\[true]
+ );
+}
+
+test "y_structure_whitespace_array" {
+ ok(
+ " [] "
+ );
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+test "n_array_1_true_without_comma" {
+ err(
+ \\[1 true]
+ );
+}
+
+test "n_array_a_invalid_utf8" {
+ err(
+ \\[aå]
+ );
+}
+
+test "n_array_colon_instead_of_comma" {
+ err(
+ \\["": 1]
+ );
+}
+
+test "n_array_comma_after_close" {
+ //err(
+ // \\[""],
+ //);
+}
+
+test "n_array_comma_and_number" {
+ err(
+ \\[,1]
+ );
+}
+
+test "n_array_double_comma" {
+ err(
+ \\[1,,2]
+ );
+}
+
+test "n_array_double_extra_comma" {
+ err(
+ \\["x",,]
+ );
+}
+
+test "n_array_extra_close" {
+ err(
+ \\["x"]]
+ );
+}
+
+test "n_array_extra_comma" {
+ //err(
+ // \\["",]
+ //);
+}
+
+test "n_array_incomplete_invalid_value" {
+ err(
+ \\[x
+ );
+}
+
+test "n_array_incomplete" {
+ err(
+ \\["x"
+ );
+}
+
+test "n_array_inner_array_no_comma" {
+ err(
+ \\[3[4]]
+ );
+}
+
+test "n_array_invalid_utf8" {
+ err(
+ \\[ÿ]
+ );
+}
+
+test "n_array_items_separated_by_semicolon" {
+ err(
+ \\[1:2]
+ );
+}
+
+test "n_array_just_comma" {
+ err(
+ \\[,]
+ );
+}
+
+test "n_array_just_minus" {
+ err(
+ \\[-]
+ );
+}
+
+test "n_array_missing_value" {
+ err(
+ \\[ , ""]
+ );
+}
+
+test "n_array_newlines_unclosed" {
+ err(
+ \\["a",
+ \\4
+ \\,1,
+ );
+}
+
+
+test "n_array_number_and_comma" {
+ err(
+ \\[1,]
+ );
+}
+
+test "n_array_number_and_several_commas" {
+ err(
+ \\[1,,]
+ );
+}
+
+test "n_array_spaces_vertical_tab_formfeed" {
+ err(
+ \\["a"\f]
+ );
+}
+
+test "n_array_star_inside" {
+ err(
+ \\[*]
+ );
+}
+
+test "n_array_unclosed" {
+ err(
+ \\[""
+ );
+}
+
+test "n_array_unclosed_trailing_comma" {
+ err(
+ \\[1,
+ );
+}
+
+test "n_array_unclosed_with_new_lines" {
+ err(
+ \\[1,
+ \\1
+ \\,1
+ );
+}
+
+test "n_array_unclosed_with_object_inside" {
+ err(
+ \\[{}
+ );
+}
+
+test "n_incomplete_false" {
+ err(
+ \\[fals]
+ );
+}
+
+test "n_incomplete_null" {
+ err(
+ \\[nul]
+ );
+}
+
+test "n_incomplete_true" {
+ err(
+ \\[tru]
+ );
+}
+
+test "n_multidigit_number_then_00" {
+ err(
+ \\123
+ );
+}
+
+test "n_number_0.1.2" {
+ err(
+ \\[0.1.2]
+ );
+}
+
+test "n_number_-01" {
+ err(
+ \\[-01]
+ );
+}
+
+test "n_number_0.3e" {
+ err(
+ \\[0.3e]
+ );
+}
+
+test "n_number_0.3e+" {
+ err(
+ \\[0.3e+]
+ );
+}
+
+test "n_number_0_capital_E" {
+ err(
+ \\[0E]
+ );
+}
+
+test "n_number_0_capital_E+" {
+ err(
+ \\[0E+]
+ );
+}
+
+test "n_number_0.e1" {
+ err(
+ \\[0.e1]
+ );
+}
+
+test "n_number_0e" {
+ err(
+ \\[0e]
+ );
+}
+
+test "n_number_0e+" {
+ err(
+ \\[0e+]
+ );
+}
+
+test "n_number_1_000" {
+ err(
+ \\[1 000.0]
+ );
+}
+
+test "n_number_1.0e-" {
+ err(
+ \\[1.0e-]
+ );
+}
+
+test "n_number_1.0e" {
+ err(
+ \\[1.0e]
+ );
+}
+
+test "n_number_1.0e+" {
+ err(
+ \\[1.0e+]
+ );
+}
+
+test "n_number_-1.0." {
+ err(
+ \\[-1.0.]
+ );
+}
+
+test "n_number_1eE2" {
+ err(
+ \\[1eE2]
+ );
+}
+
+test "n_number_.-1" {
+ err(
+ \\[.-1]
+ );
+}
+
+test "n_number_+1" {
+ err(
+ \\[+1]
+ );
+}
+
+test "n_number_.2e-3" {
+ err(
+ \\[.2e-3]
+ );
+}
+
+test "n_number_2.e-3" {
+ err(
+ \\[2.e-3]
+ );
+}
+
+test "n_number_2.e+3" {
+ err(
+ \\[2.e+3]
+ );
+}
+
+test "n_number_2.e3" {
+ err(
+ \\[2.e3]
+ );
+}
+
+test "n_number_-2." {
+ err(
+ \\[-2.]
+ );
+}
+
+test "n_number_9.e+" {
+ err(
+ \\[9.e+]
+ );
+}
+
+test "n_number_expression" {
+ err(
+ \\[1+2]
+ );
+}
+
+test "n_number_hex_1_digit" {
+ err(
+ \\[0x1]
+ );
+}
+
+test "n_number_hex_2_digits" {
+ err(
+ \\[0x42]
+ );
+}
+
+test "n_number_infinity" {
+ err(
+ \\[Infinity]
+ );
+}
+
+test "n_number_+Inf" {
+ err(
+ \\[+Inf]
+ );
+}
+
+test "n_number_Inf" {
+ err(
+ \\[Inf]
+ );
+}
+
+test "n_number_invalid+-" {
+ err(
+ \\[0e+-1]
+ );
+}
+
+test "n_number_invalid-negative-real" {
+ err(
+ \\[-123.123foo]
+ );
+}
+
+test "n_number_invalid-utf-8-in-bigger-int" {
+ err(
+ \\[123å]
+ );
+}
+
+test "n_number_invalid-utf-8-in-exponent" {
+ err(
+ \\[1e1å]
+ );
+}
+
+test "n_number_invalid-utf-8-in-int" {
+ err(
+ \\[0å]
+ );
+}
+
+
+test "n_number_++" {
+ err(
+ \\[++1234]
+ );
+}
+
+test "n_number_minus_infinity" {
+ err(
+ \\[-Infinity]
+ );
+}
+
+test "n_number_minus_sign_with_trailing_garbage" {
+ err(
+ \\[-foo]
+ );
+}
+
+test "n_number_minus_space_1" {
+ err(
+ \\[- 1]
+ );
+}
+
+test "n_number_-NaN" {
+ err(
+ \\[-NaN]
+ );
+}
+
+test "n_number_NaN" {
+ err(
+ \\[NaN]
+ );
+}
+
+test "n_number_neg_int_starting_with_zero" {
+ err(
+ \\[-012]
+ );
+}
+
+test "n_number_neg_real_without_int_part" {
+ err(
+ \\[-.123]
+ );
+}
+
+test "n_number_neg_with_garbage_at_end" {
+ err(
+ \\[-1x]
+ );
+}
+
+test "n_number_real_garbage_after_e" {
+ err(
+ \\[1ea]
+ );
+}
+
+test "n_number_real_with_invalid_utf8_after_e" {
+ err(
+ \\[1eå]
+ );
+}
+
+test "n_number_real_without_fractional_part" {
+ err(
+ \\[1.]
+ );
+}
+
+test "n_number_starting_with_dot" {
+ err(
+ \\[.123]
+ );
+}
+
+test "n_number_U+FF11_fullwidth_digit_one" {
+ err(
+ \\[ï¼]
+ );
+}
+
+test "n_number_with_alpha_char" {
+ err(
+ \\[1.8011670033376514H-308]
+ );
+}
+
+test "n_number_with_alpha" {
+ err(
+ \\[1.2a-3]
+ );
+}
+
+test "n_number_with_leading_zero" {
+ err(
+ \\[012]
+ );
+}
+
+test "n_object_bad_value" {
+ err(
+ \\["x", truth]
+ );
+}
+
+test "n_object_bracket_key" {
+ err(
+ \\{[: "x"}
+ );
+}
+
+test "n_object_comma_instead_of_colon" {
+ err(
+ \\{"x", null}
+ );
+}
+
+test "n_object_double_colon" {
+ err(
+ \\{"x"::"b"}
+ );
+}
+
+test "n_object_emoji" {
+ err(
+ \\{ð¨ð}
+ );
+}
+
+test "n_object_garbage_at_end" {
+ err(
+ \\{"a":"a" 123}
+ );
+}
+
+test "n_object_key_with_single_quotes" {
+ err(
+ \\{key: 'value'}
+ );
+}
+
+test "n_object_lone_continuation_byte_in_key_and_trailing_comma" {
+ err(
+ \\{"¹":"0",}
+ );
+}
+
+test "n_object_missing_colon" {
+ err(
+ \\{"a" b}
+ );
+}
+
+test "n_object_missing_key" {
+ err(
+ \\{:"b"}
+ );
+}
+
+test "n_object_missing_semicolon" {
+ err(
+ \\{"a" "b"}
+ );
+}
+
+test "n_object_missing_value" {
+ err(
+ \\{"a":
+ );
+}
+
+test "n_object_no-colon" {
+ err(
+ \\{"a"
+ );
+}
+
+test "n_object_non_string_key_but_huge_number_instead" {
+ err(
+ \\{9999E9999:1}
+ );
+}
+
+test "n_object_non_string_key" {
+ err(
+ \\{1:1}
+ );
+}
+
+test "n_object_repeated_null_null" {
+ err(
+ \\{null:null,null:null}
+ );
+}
+
+test "n_object_several_trailing_commas" {
+ err(
+ \\{"id":0,,,,,}
+ );
+}
+
+test "n_object_single_quote" {
+ err(
+ \\{'a':0}
+ );
+}
+
+test "n_object_trailing_comma" {
+ err(
+ \\{"id":0,}
+ );
+}
+
+test "n_object_trailing_comment" {
+ err(
+ \\{"a":"b"}/**/
+ );
+}
+
+test "n_object_trailing_comment_open" {
+ err(
+ \\{"a":"b"}/**//
+ );
+}
+
+test "n_object_trailing_comment_slash_open_incomplete" {
+ err(
+ \\{"a":"b"}/
+ );
+}
+
+test "n_object_trailing_comment_slash_open" {
+ err(
+ \\{"a":"b"}//
+ );
+}
+
+test "n_object_two_commas_in_a_row" {
+ err(
+ \\{"a":"b",,"c":"d"}
+ );
+}
+
+test "n_object_unquoted_key" {
+ err(
+ \\{a: "b"}
+ );
+}
+
+test "n_object_unterminated-value" {
+ err(
+ \\{"a":"a
+ );
+ }
+
+test "n_object_with_single_string" {
+ err(
+ \\{ "foo" : "bar", "a" }
+ );
+}
+
+test "n_object_with_trailing_garbage" {
+ err(
+ \\{"a":"b"}#
+ );
+}
+
+test "n_single_space" {
+ err(
+ " "
+ );
+}
+
+test "n_string_1_surrogate_then_escape" {
+ err(
+ \\["\uD800\"]
+ );
+}
+
+test "n_string_1_surrogate_then_escape_u1" {
+ err(
+ \\["\uD800\u1"]
+ );
+}
+
+test "n_string_1_surrogate_then_escape_u1x" {
+ err(
+ \\["\uD800\u1x"]
+ );
+}
+
+test "n_string_1_surrogate_then_escape_u" {
+ err(
+ \\["\uD800\u"]
+ );
+}
+
+test "n_string_accentuated_char_no_quotes" {
+ err(
+ \\[é]
+ );
+}
+
+test "n_string_backslash_00" {
+ err(
+ \\["\ "]
+ );
+}
+
+test "n_string_escaped_backslash_bad" {
+ err(
+ \\["\\\"]
+ );
+}
+
+test "n_string_escaped_ctrl_char_tab" {
+ err(
+ \\["\ "]
+ );
+}
+
+test "n_string_escaped_emoji" {
+ err(
+ \\["\ð"]
+ );
+}
+
+test "n_string_escape_x" {
+ err(
+ \\["\x00"]
+ );
+}
+
+test "n_string_incomplete_escaped_character" {
+ err(
+ \\["\u00A"]
+ );
+}
+
+test "n_string_incomplete_escape" {
+ err(
+ \\["\"]
+ );
+}
+
+test "n_string_incomplete_surrogate_escape_invalid" {
+ err(
+ \\["\uD800\uD800\x"]
+ );
+}
+
+test "n_string_incomplete_surrogate" {
+ err(
+ \\["\uD834\uDd"]
+ );
+}
+
+test "n_string_invalid_backslash_esc" {
+ err(
+ \\["\a"]
+ );
+}
+
+test "n_string_invalid_unicode_escape" {
+ err(
+ \\["\uqqqq"]
+ );
+}
+
+test "n_string_invalid_utf8_after_escape" {
+ err(
+ \\["\å"]
+ );
+}
+
+test "n_string_invalid-utf-8-in-escape" {
+ err(
+ \\["\uå"]
+ );
+}
+
+test "n_string_leading_uescaped_thinspace" {
+ err(
+ \\[\u0020"asd"]
+ );
+}
+
+test "n_string_no_quotes_with_bad_escape" {
+ err(
+ \\[\n]
+ );
+}
+
+test "n_string_single_doublequote" {
+ err(
+ \\"
+ );
+}
+
+test "n_string_single_quote" {
+ err(
+ \\['single quote']
+ );
+}
+
+test "n_string_single_string_no_double_quotes" {
+ err(
+ \\abc
+ );
+}
+
+test "n_string_start_escape_unclosed" {
+ err(
+ \\["\
+ );
+}
+
+test "n_string_unescaped_crtl_char" {
+ err(
+ \\["a a"]
+ );
+}
+
+test "n_string_unescaped_newline" {
+ err(
+ \\["new
+ \\line"]
+ );
+}
+
+test "n_string_unescaped_tab" {
+ err(
+ \\[" "]
+ );
+}
+
+test "n_string_unicode_CapitalU" {
+ err(
+ \\"\UA66D"
+ );
+}
+
+test "n_string_with_trailing_garbage" {
+ err(
+ \\""x
+ );
+}
+
+test "n_structure_100000_opening_arrays" {
+ err(
+ "[" ** 100000
+ );
+}
+
+test "n_structure_angle_bracket_." {
+ err(
+ \\<.>
+ );
+}
+
+test "n_structure_angle_bracket_null" {
+ err(
+ \\[]
+ );
+}
+
+test "n_structure_array_trailing_garbage" {
+ err(
+ \\[1]x
+ );
+}
+
+test "n_structure_array_with_extra_array_close" {
+ err(
+ \\[1]]
+ );
+}
+
+test "n_structure_array_with_unclosed_string" {
+ err(
+ \\["asd]
+ );
+}
+
+test "n_structure_ascii-unicode-identifier" {
+ err(
+ \\aå
+ );
+}
+
+test "n_structure_capitalized_True" {
+ err(
+ \\[True]
+ );
+}
+
+test "n_structure_close_unopened_array" {
+ err(
+ \\1]
+ );
+}
+
+test "n_structure_comma_instead_of_closing_brace" {
+ err(
+ \\{"x": true,
+ );
+}
+
+test "n_structure_double_array" {
+ err(
+ \\[][]
+ );
+}
+
+test "n_structure_end_array" {
+ err(
+ \\]
+ );
+}
+
+test "n_structure_incomplete_UTF8_BOM" {
+ err(
+ \\ï»{}
+ );
+}
+
+test "n_structure_lone-invalid-utf-8" {
+ err(
+ \\å
+ );
+}
+
+test "n_structure_lone-open-bracket" {
+ err(
+ \\[
+ );
+}
+
+test "n_structure_no_data" {
+ err(
+ \\
+ );
+}
+
+test "n_structure_null-byte-outside-string" {
+ err(
+ \\[ ]
+ );
+}
+
+test "n_structure_number_with_trailing_garbage" {
+ err(
+ \\2@
+ );
+}
+
+test "n_structure_object_followed_by_closing_object" {
+ err(
+ \\{}}
+ );
+}
+
+test "n_structure_object_unclosed_no_value" {
+ err(
+ \\{"":
+ );
+}
+
+test "n_structure_object_with_comment" {
+ err(
+ \\{"a":/*comment*/"b"}
+ );
+}
+
+test "n_structure_object_with_trailing_garbage" {
+ err(
+ \\{"a": true} "x"
+ );
+}
+
+test "n_structure_open_array_apostrophe" {
+ err(
+ \\['
+ );
+}
+
+test "n_structure_open_array_comma" {
+ err(
+ \\[,
+ );
+}
+
+test "n_structure_open_array_object" {
+ err(
+ "[{\"\":" ** 50000
+ );
+}
+
+test "n_structure_open_array_open_object" {
+ err(
+ \\[{
+ );
+}
+
+test "n_structure_open_array_open_string" {
+ err(
+ \\["a
+ );
+}
+
+test "n_structure_open_array_string" {
+ err(
+ \\["a"
+ );
+}
+
+test "n_structure_open_object_close_array" {
+ err(
+ \\{]
+ );
+}
+
+test "n_structure_open_object_comma" {
+ err(
+ \\{,
+ );
+}
+
+test "n_structure_open_object" {
+ err(
+ \\{
+ );
+}
+
+test "n_structure_open_object_open_array" {
+ err(
+ \\{[
+ );
+}
+
+test "n_structure_open_object_open_string" {
+ err(
+ \\{"a
+ );
+}
+
+test "n_structure_open_object_string_with_apostrophes" {
+ err(
+ \\{'a'
+ );
+}
+
+test "n_structure_open_open" {
+ err(
+ \\["\{["\{["\{["\{
+ );
+}
+
+test "n_structure_single_eacute" {
+ err(
+ \\é
+ );
+}
+
+test "n_structure_single_star" {
+ err(
+ \\*
+ );
+}
+
+test "n_structure_trailing_#" {
+ err(
+ \\{"a":"b"}#{}
+ );
+}
+
+test "n_structure_U+2060_word_joined" {
+ err(
+ \\[â ]
+ );
+}
+
+test "n_structure_uescaped_LF_before_string" {
+ err(
+ \\[\u000A""]
+ );
+}
+
+test "n_structure_unclosed_array" {
+ err(
+ \\[1
+ );
+}
+
+test "n_structure_unclosed_array_partial_null" {
+ err(
+ \\[ false, nul
+ );
+}
+
+test "n_structure_unclosed_array_unfinished_false" {
+ err(
+ \\[ true, fals
+ );
+}
+
+test "n_structure_unclosed_array_unfinished_true" {
+ err(
+ \\[ false, tru
+ );
+}
+
+test "n_structure_unclosed_object" {
+ err(
+ \\{"asd":"asd"
+ );
+}
+
+test "n_structure_unicode-identifier" {
+ err(
+ \\Ã¥
+ );
+}
+
+test "n_structure_UTF8_BOM_no_data" {
+ err(
+ \\
+ );
+}
+
+test "n_structure_whitespace_formfeed" {
+ err(
+ \\[]
+ );
+}
+
+test "n_structure_whitespace_U+2060_word_joiner" {
+ err(
+ \\[â ]
+ );
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+test "i_number_double_huge_neg_exp" {
+ any(
+ \\[123.456e-789]
+ );
+}
+
+test "i_number_huge_exp" {
+ any(
+ \\[0.4e00669999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999969999999006]
+ );
+}
+
+test "i_number_neg_int_huge_exp" {
+ any(
+ \\[-1e+9999]
+ );
+}
+
+test "i_number_pos_double_huge_exp" {
+ any(
+ \\[1.5e+9999]
+ );
+}
+
+test "i_number_real_neg_overflow" {
+ any(
+ \\[-123123e100000]
+ );
+}
+
+test "i_number_real_pos_overflow" {
+ any(
+ \\[123123e100000]
+ );
+}
+
+test "i_number_real_underflow" {
+ any(
+ \\[123e-10000000]
+ );
+}
+
+test "i_number_too_big_neg_int" {
+ any(
+ \\[-123123123123123123123123123123]
+ );
+}
+
+test "i_number_too_big_pos_int" {
+ any(
+ \\[100000000000000000000]
+ );
+}
+
+test "i_number_very_big_negative_int" {
+ any(
+ \\[-237462374673276894279832749832423479823246327846]
+ );
+}
+
+test "i_object_key_lone_2nd_surrogate" {
+ any(
+ \\{"\uDFAA":0}
+ );
+}
+
+test "i_string_1st_surrogate_but_2nd_missing" {
+ any(
+ \\["\uDADA"]
+ );
+}
+
+test "i_string_1st_valid_surrogate_2nd_invalid" {
+ any(
+ \\["\uD888\u1234"]
+ );
+}
+
+test "i_string_incomplete_surrogate_and_escape_valid" {
+ any(
+ \\["\uD800\n"]
+ );
+}
+
+test "i_string_incomplete_surrogate_pair" {
+ any(
+ \\["\uDd1ea"]
+ );
+}
+
+test "i_string_incomplete_surrogates_escape_valid" {
+ any(
+ \\["\uD800\uD800\n"]
+ );
+}
+
+test "i_string_invalid_lonely_surrogate" {
+ any(
+ \\["\ud800"]
+ );
+}
+
+test "i_string_invalid_surrogate" {
+ any(
+ \\["\ud800abc"]
+ );
+}
+
+test "i_string_invalid_utf-8" {
+ any(
+ \\["ÿ"]
+ );
+}
+
+test "i_string_inverted_surrogates_U+1D11E" {
+ any(
+ \\["\uDd1e\uD834"]
+ );
+}
+
+test "i_string_iso_latin_1" {
+ any(
+ \\["é"]
+ );
+}
+
+test "i_string_lone_second_surrogate" {
+ any(
+ \\["\uDFAA"]
+ );
+}
+
+test "i_string_lone_utf8_continuation_byte" {
+ any(
+ \\[""]
+ );
+}
+
+test "i_string_not_in_unicode_range" {
+ any(
+ \\["ô¿¿¿"]
+ );
+}
+
+test "i_string_overlong_sequence_2_bytes" {
+ any(
+ \\["À¯"]
+ );
+}
+
+test "i_string_overlong_sequence_6_bytes" {
+ any(
+ \\["ü¿¿¿¿"]
+ );
+}
+
+test "i_string_overlong_sequence_6_bytes_null" {
+ any(
+ \\["ü"]
+ );
+}
+
+test "i_string_truncated-utf-8" {
+ any(
+ \\["àÿ"]
+ );
+}
+
+test "i_string_utf16BE_no_BOM" {
+ any(
+ \\ [ " é " ]
+ );
+}
+
+test "i_string_utf16LE_no_BOM" {
+ any(
+ \\[ " é " ]
+ );
+}
+
+test "i_string_UTF-16LE_with_BOM" {
+ any(
+ \\ÿþ[ " é " ]
+ );
+}
+
+test "i_string_UTF-8_invalid_sequence" {
+ any(
+ \\["æ¥Ñú"]
+ );
+}
+
+test "i_string_UTF8_surrogate_U+D800" {
+ any(
+ \\["í "]
+ );
+}
+
+test "i_structure_500_nested_arrays" {
+ any(
+ ("[" ** 500) ++ ("]" ** 500)
+ );
+}
+
+test "i_structure_UTF-8_BOM_empty_object" {
+ any(
+ \\{}
+ );
+}
diff --git a/std/math/index.zig b/std/math/index.zig
index 05de604c6c..8fcd05d760 100644
--- a/std/math/index.zig
+++ b/std/math/index.zig
@@ -541,6 +541,32 @@ test "math.floorPowerOfTwo" {
comptime testFloorPowerOfTwo();
}
+pub fn log2_int(comptime T: type, x: T) Log2Int(T) {
+ assert(x != 0);
+ return Log2Int(T)(T.bit_count - 1 - @clz(x));
+}
+
+pub fn log2_int_ceil(comptime T: type, x: T) Log2Int(T) {
+ assert(x != 0);
+ const log2_val = log2_int(T, x);
+ if (T(1) << log2_val == x)
+ return log2_val;
+ return log2_val + 1;
+}
+
+test "std.math.log2_int_ceil" {
+ assert(log2_int_ceil(u32, 1) == 0);
+ assert(log2_int_ceil(u32, 2) == 1);
+ assert(log2_int_ceil(u32, 3) == 2);
+ assert(log2_int_ceil(u32, 4) == 2);
+ assert(log2_int_ceil(u32, 5) == 3);
+ assert(log2_int_ceil(u32, 6) == 3);
+ assert(log2_int_ceil(u32, 7) == 3);
+ assert(log2_int_ceil(u32, 8) == 3);
+ assert(log2_int_ceil(u32, 9) == 4);
+ assert(log2_int_ceil(u32, 10) == 4);
+}
+
fn testFloorPowerOfTwo() void {
assert(floorPowerOfTwo(u32, 63) == 32);
assert(floorPowerOfTwo(u32, 64) == 64);
diff --git a/std/math/log2.zig b/std/math/log2.zig
index 998d6d6c5e..d5bbe385c2 100644
--- a/std/math/log2.zig
+++ b/std/math/log2.zig
@@ -31,17 +31,12 @@ pub fn log2(x: var) @typeOf(x) {
return result;
},
TypeId.Int => {
- return log2_int(T, x);
+ return math.log2_int(T, x);
},
else => @compileError("log2 not implemented for " ++ @typeName(T)),
}
}
-pub fn log2_int(comptime T: type, x: T) T {
- assert(x != 0);
- return T.bit_count - 1 - T(@clz(x));
-}
-
pub fn log2_32(x_: f32) f32 {
const ivln2hi: f32 = 1.4428710938e+00;
const ivln2lo: f32 = -1.7605285393e-04;
diff --git a/std/os/index.zig b/std/os/index.zig
index 8728f4a6f6..7d19cd82c6 100644
--- a/std/os/index.zig
+++ b/std/os/index.zig
@@ -2473,6 +2473,7 @@ pub const Thread = struct {
},
builtin.Os.windows => {
assert(windows.WaitForSingleObject(self.data.handle, windows.INFINITE) == windows.WAIT_OBJECT_0);
+ assert(windows.CloseHandle(self.data.handle) != 0);
assert(windows.HeapFree(self.data.heap_handle, 0, self.data.alloc_start) != 0);
},
else => @compileError("Unsupported OS"),
diff --git a/std/segmented_list.zig b/std/segmented_list.zig
new file mode 100644
index 0000000000..6c7c879919
--- /dev/null
+++ b/std/segmented_list.zig
@@ -0,0 +1,368 @@
+const std = @import("index.zig");
+const assert = std.debug.assert;
+const Allocator = std.mem.Allocator;
+
+// Imagine that `fn at(self: &Self, index: usize) &T` is a customer asking for a box
+// from a warehouse, based on a flat array, boxes ordered from 0 to N - 1.
+// But the warehouse actually stores boxes in shelves of increasing powers of 2 sizes.
+// So when the customer requests a box index, we have to translate it to shelf index
+// and box index within that shelf. Illustration:
+//
+// customer indexes:
+// shelf 0: 0
+// shelf 1: 1 2
+// shelf 2: 3 4 5 6
+// shelf 3: 7 8 9 10 11 12 13 14
+// shelf 4: 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
+// shelf 5: 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
+// ...
+//
+// warehouse indexes:
+// shelf 0: 0
+// shelf 1: 0 1
+// shelf 2: 0 1 2 3
+// shelf 3: 0 1 2 3 4 5 6 7
+// shelf 4: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+// shelf 5: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
+// ...
+//
+// With this arrangement, here are the equations to get the shelf index and
+// box index based on customer box index:
+//
+// shelf_index = floor(log2(customer_index + 1))
+// shelf_count = ceil(log2(box_count + 1))
+// box_index = customer_index + 1 - 2 ** shelf
+// shelf_size = 2 ** shelf_index
+//
+// Now we complicate it a little bit further by adding a preallocated shelf, which must be
+// a power of 2:
+// prealloc=4
+//
+// customer indexes:
+// prealloc: 0 1 2 3
+// shelf 0: 4 5 6 7 8 9 10 11
+// shelf 1: 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
+// shelf 2: 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
+// ...
+//
+// warehouse indexes:
+// prealloc: 0 1 2 3
+// shelf 0: 0 1 2 3 4 5 6 7
+// shelf 1: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+// shelf 2: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
+// ...
+//
+// Now the equations are:
+//
+// shelf_index = floor(log2(customer_index + prealloc)) - log2(prealloc) - 1
+// shelf_count = ceil(log2(box_count + prealloc)) - log2(prealloc) - 1
+// box_index = customer_index + prealloc - 2 ** (log2(prealloc) + 1 + shelf)
+// shelf_size = prealloc * 2 ** (shelf_index + 1)
+
+/// This is a stack data structure where pointers to indexes have the same lifetime as the data structure
+/// itself, unlike ArrayList where push() invalidates all existing element pointers.
+/// The tradeoff is that elements are not guaranteed to be contiguous. For that, use ArrayList.
+/// Note however that most elements are contiguous, making this data structure cache-friendly.
+///
+/// Because it never has to copy elements from an old location to a new location, it does not require
+/// its elements to be copyable, and it avoids wasting memory when backed by an ArenaAllocator.
+/// Note that the push() and pop() convenience methods perform a copy, but you can instead use
+/// addOne(), at(), setCapacity(), and shrinkCapacity() to avoid copying items.
+///
+/// This data structure has O(1) push and O(1) pop.
+///
+/// It supports preallocated elements, making it especially well suited when the expected maximum
+/// size is small. `prealloc_item_count` must be 0, or a power of 2.
+pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type {
+ return struct {
+ const Self = this;
+ const prealloc_exp = blk: {
+ // we don't use the prealloc_exp constant when prealloc_item_count is 0.
+ assert(prealloc_item_count != 0);
+
+ const value = std.math.log2_int(usize, prealloc_item_count);
+ assert((1 << value) == prealloc_item_count); // prealloc_item_count must be a power of 2
+ break :blk @typeOf(1)(value);
+ };
+ const ShelfIndex = std.math.Log2Int(usize);
+
+ prealloc_segment: [prealloc_item_count]T,
+ dynamic_segments: []&T,
+ allocator: &Allocator,
+ len: usize,
+
+ /// Deinitialize with `deinit`
+ pub fn init(allocator: &Allocator) Self {
+ return Self {
+ .allocator = allocator,
+ .len = 0,
+ .prealloc_segment = undefined,
+ .dynamic_segments = []&T{},
+ };
+ }
+
+ pub fn deinit(self: &Self) void {
+ self.freeShelves(ShelfIndex(self.dynamic_segments.len), 0);
+ self.allocator.free(self.dynamic_segments);
+ *self = undefined;
+ }
+
+ pub fn at(self: &Self, i: usize) &T {
+ assert(i < self.len);
+ return self.uncheckedAt(i);
+ }
+
+ pub fn count(self: &const Self) usize {
+ return self.len;
+ }
+
+ pub fn push(self: &Self, item: &const T) !void {
+ const new_item_ptr = try self.addOne();
+ *new_item_ptr = *item;
+ }
+
+ pub fn pushMany(self: &Self, items: []const T) !void {
+ for (items) |item| {
+ try self.push(item);
+ }
+ }
+
+ pub fn pop(self: &Self) ?T {
+ if (self.len == 0)
+ return null;
+
+ const index = self.len - 1;
+ const result = *self.uncheckedAt(index);
+ self.len = index;
+ return result;
+ }
+
+ pub fn addOne(self: &Self) !&T {
+ const new_length = self.len + 1;
+ try self.growCapacity(new_length);
+ const result = self.uncheckedAt(self.len);
+ self.len = new_length;
+ return result;
+ }
+
+ /// Grows or shrinks capacity to match usage.
+ pub fn setCapacity(self: &Self, new_capacity: usize) !void {
+ if (new_capacity <= usize(1) << (prealloc_exp + self.dynamic_segments.len)) {
+ return self.shrinkCapacity(new_capacity);
+ } else {
+ return self.growCapacity(new_capacity);
+ }
+ }
+
+ /// Only grows capacity, or retains current capacity
+ pub fn growCapacity(self: &Self, new_capacity: usize) !void {
+ const new_cap_shelf_count = shelfCount(new_capacity);
+ const old_shelf_count = ShelfIndex(self.dynamic_segments.len);
+ if (new_cap_shelf_count > old_shelf_count) {
+ self.dynamic_segments = try self.allocator.realloc(&T, self.dynamic_segments, new_cap_shelf_count);
+ var i = old_shelf_count;
+ errdefer {
+ self.freeShelves(i, old_shelf_count);
+ self.dynamic_segments = self.allocator.shrink(&T, self.dynamic_segments, old_shelf_count);
+ }
+ while (i < new_cap_shelf_count) : (i += 1) {
+ self.dynamic_segments[i] = (try self.allocator.alloc(T, shelfSize(i))).ptr;
+ }
+ }
+ }
+
+ /// Only shrinks capacity or retains current capacity
+ pub fn shrinkCapacity(self: &Self, new_capacity: usize) void {
+ if (new_capacity <= prealloc_item_count) {
+ const len = ShelfIndex(self.dynamic_segments.len);
+ self.freeShelves(len, 0);
+ self.allocator.free(self.dynamic_segments);
+ self.dynamic_segments = []&T{};
+ return;
+ }
+
+ const new_cap_shelf_count = shelfCount(new_capacity);
+ const old_shelf_count = ShelfIndex(self.dynamic_segments.len);
+ assert(new_cap_shelf_count <= old_shelf_count);
+ if (new_cap_shelf_count == old_shelf_count) {
+ return;
+ }
+
+ self.freeShelves(old_shelf_count, new_cap_shelf_count);
+ self.dynamic_segments = self.allocator.shrink(&T, self.dynamic_segments, new_cap_shelf_count);
+ }
+
+ pub fn uncheckedAt(self: &Self, index: usize) &T {
+ if (index < prealloc_item_count) {
+ return &self.prealloc_segment[index];
+ }
+ const shelf_index = shelfIndex(index);
+ const box_index = boxIndex(index, shelf_index);
+ return &self.dynamic_segments[shelf_index][box_index];
+ }
+
+ fn shelfCount(box_count: usize) ShelfIndex {
+ if (prealloc_item_count == 0) {
+ return std.math.log2_int_ceil(usize, box_count + 1);
+ }
+ return std.math.log2_int_ceil(usize, box_count + prealloc_item_count) - prealloc_exp - 1;
+ }
+
+ fn shelfSize(shelf_index: ShelfIndex) usize {
+ if (prealloc_item_count == 0) {
+ return usize(1) << shelf_index;
+ }
+ return usize(1) << (shelf_index + (prealloc_exp + 1));
+ }
+
+ fn shelfIndex(list_index: usize) ShelfIndex {
+ if (prealloc_item_count == 0) {
+ return std.math.log2_int(usize, list_index + 1);
+ }
+ return std.math.log2_int(usize, list_index + prealloc_item_count) - prealloc_exp - 1;
+ }
+
+ fn boxIndex(list_index: usize, shelf_index: ShelfIndex) usize {
+ if (prealloc_item_count == 0) {
+ return (list_index + 1) - (usize(1) << shelf_index);
+ }
+ return list_index + prealloc_item_count - (usize(1) << ((prealloc_exp + 1) + shelf_index));
+ }
+
+ fn freeShelves(self: &Self, from_count: ShelfIndex, to_count: ShelfIndex) void {
+ var i = from_count;
+ while (i != to_count) {
+ i -= 1;
+ self.allocator.free(self.dynamic_segments[i][0..shelfSize(i)]);
+ }
+ }
+
+ pub const Iterator = struct {
+ list: &Self,
+ index: usize,
+ box_index: usize,
+ shelf_index: ShelfIndex,
+ shelf_size: usize,
+
+ pub fn next(it: &Iterator) ?&T {
+ if (it.index >= it.list.len)
+ return null;
+ if (it.index < prealloc_item_count) {
+ const ptr = &it.list.prealloc_segment[it.index];
+ it.index += 1;
+ if (it.index == prealloc_item_count) {
+ it.box_index = 0;
+ it.shelf_index = 0;
+ it.shelf_size = prealloc_item_count * 2;
+ }
+ return ptr;
+ }
+
+ const ptr = &it.list.dynamic_segments[it.shelf_index][it.box_index];
+ it.index += 1;
+ it.box_index += 1;
+ if (it.box_index == it.shelf_size) {
+ it.shelf_index += 1;
+ it.box_index = 0;
+ it.shelf_size *= 2;
+ }
+ return ptr;
+ }
+
+ pub fn prev(it: &Iterator) ?&T {
+ if (it.index == 0)
+ return null;
+
+ it.index -= 1;
+ if (it.index < prealloc_item_count)
+ return &it.list.prealloc_segment[it.index];
+
+ if (it.box_index == 0) {
+ it.shelf_index -= 1;
+ it.shelf_size /= 2;
+ it.box_index = it.shelf_size - 1;
+ } else {
+ it.box_index -= 1;
+ }
+
+ return &it.list.dynamic_segments[it.shelf_index][it.box_index];
+ }
+ };
+
+ pub fn iterator(self: &Self, start_index: usize) Iterator {
+ var it = Iterator {
+ .list = self,
+ .index = start_index,
+ .shelf_index = undefined,
+ .box_index = undefined,
+ .shelf_size = undefined,
+ };
+ if (start_index >= prealloc_item_count) {
+ it.shelf_index = shelfIndex(start_index);
+ it.box_index = boxIndex(start_index, it.shelf_index);
+ it.shelf_size = shelfSize(it.shelf_index);
+ }
+ return it;
+ }
+ };
+}
+
+test "std.SegmentedList" {
+ var da = std.heap.DirectAllocator.init();
+ defer da.deinit();
+ var a = &da.allocator;
+
+ try testSegmentedList(0, a);
+ try testSegmentedList(1, a);
+ try testSegmentedList(2, a);
+ try testSegmentedList(4, a);
+ try testSegmentedList(8, a);
+ try testSegmentedList(16, a);
+}
+
+fn testSegmentedList(comptime prealloc: usize, allocator: &Allocator) !void {
+ var list = SegmentedList(i32, prealloc).init(allocator);
+ defer list.deinit();
+
+ {var i: usize = 0; while (i < 100) : (i += 1) {
+ try list.push(i32(i + 1));
+ assert(list.len == i + 1);
+ }}
+
+ {var i: usize = 0; while (i < 100) : (i += 1) {
+ assert(*list.at(i) == i32(i + 1));
+ }}
+
+ {
+ var it = list.iterator(0);
+ var x: i32 = 0;
+ while (it.next()) |item| {
+ x += 1;
+ assert(*item == x);
+ }
+ assert(x == 100);
+ while (it.prev()) |item| : (x -= 1) {
+ assert(*item == x);
+ }
+ assert(x == 0);
+ }
+
+ assert(??list.pop() == 100);
+ assert(list.len == 99);
+
+ try list.pushMany([]i32 { 1, 2, 3 });
+ assert(list.len == 102);
+ assert(??list.pop() == 3);
+ assert(??list.pop() == 2);
+ assert(??list.pop() == 1);
+ assert(list.len == 99);
+
+ try list.pushMany([]const i32 {});
+ assert(list.len == 99);
+
+ var i: i32 = 99;
+ while (list.pop()) |item| : (i -= 1) {
+ assert(item == i);
+ list.shrinkCapacity(list.len);
+ }
+}
diff --git a/std/special/compiler_rt/fixuint.zig b/std/special/compiler_rt/fixuint.zig
index d9a7393fe4..bd9b2fc395 100644
--- a/std/special/compiler_rt/fixuint.zig
+++ b/std/special/compiler_rt/fixuint.zig
@@ -1,5 +1,5 @@
const is_test = @import("builtin").is_test;
-const Log2Int = @import("../../math/index.zig").Log2Int;
+const Log2Int = @import("std").math.Log2Int;
pub fn fixuint(comptime fp_t: type, comptime fixuint_t: type, a: fp_t) fixuint_t {
@setRuntimeSafety(is_test);
diff --git a/std/special/compiler_rt/fixunsdfdi_test.zig b/std/special/compiler_rt/fixunsdfdi_test.zig
index 3443a4938e..e59d09f8de 100644
--- a/std/special/compiler_rt/fixunsdfdi_test.zig
+++ b/std/special/compiler_rt/fixunsdfdi_test.zig
@@ -1,5 +1,5 @@
const __fixunsdfdi = @import("fixunsdfdi.zig").__fixunsdfdi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunsdfdi(a: f64, expected: u64) void {
const x = __fixunsdfdi(a);
diff --git a/std/special/compiler_rt/fixunsdfsi_test.zig b/std/special/compiler_rt/fixunsdfsi_test.zig
index 3c74bc5f4c..db6e32e23d 100644
--- a/std/special/compiler_rt/fixunsdfsi_test.zig
+++ b/std/special/compiler_rt/fixunsdfsi_test.zig
@@ -1,5 +1,5 @@
const __fixunsdfsi = @import("fixunsdfsi.zig").__fixunsdfsi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunsdfsi(a: f64, expected: u32) void {
const x = __fixunsdfsi(a);
diff --git a/std/special/compiler_rt/fixunsdfti_test.zig b/std/special/compiler_rt/fixunsdfti_test.zig
index 3cb7687887..7283b35c0e 100644
--- a/std/special/compiler_rt/fixunsdfti_test.zig
+++ b/std/special/compiler_rt/fixunsdfti_test.zig
@@ -1,5 +1,5 @@
const __fixunsdfti = @import("fixunsdfti.zig").__fixunsdfti;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunsdfti(a: f64, expected: u128) void {
const x = __fixunsdfti(a);
diff --git a/std/special/compiler_rt/fixunssfdi_test.zig b/std/special/compiler_rt/fixunssfdi_test.zig
index de27323777..e4e6c1736d 100644
--- a/std/special/compiler_rt/fixunssfdi_test.zig
+++ b/std/special/compiler_rt/fixunssfdi_test.zig
@@ -1,5 +1,5 @@
const __fixunssfdi = @import("fixunssfdi.zig").__fixunssfdi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunssfdi(a: f32, expected: u64) void {
const x = __fixunssfdi(a);
diff --git a/std/special/compiler_rt/fixunssfsi_test.zig b/std/special/compiler_rt/fixunssfsi_test.zig
index 47ed21d4f4..614c648dfe 100644
--- a/std/special/compiler_rt/fixunssfsi_test.zig
+++ b/std/special/compiler_rt/fixunssfsi_test.zig
@@ -1,5 +1,5 @@
const __fixunssfsi = @import("fixunssfsi.zig").__fixunssfsi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunssfsi(a: f32, expected: u32) void {
const x = __fixunssfsi(a);
diff --git a/std/special/compiler_rt/fixunssfti_test.zig b/std/special/compiler_rt/fixunssfti_test.zig
index 3033eb0def..43ad527f53 100644
--- a/std/special/compiler_rt/fixunssfti_test.zig
+++ b/std/special/compiler_rt/fixunssfti_test.zig
@@ -1,5 +1,5 @@
const __fixunssfti = @import("fixunssfti.zig").__fixunssfti;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunssfti(a: f32, expected: u128) void {
const x = __fixunssfti(a);
diff --git a/std/special/compiler_rt/fixunstfdi_test.zig b/std/special/compiler_rt/fixunstfdi_test.zig
index d1f5f6496a..dd0869195a 100644
--- a/std/special/compiler_rt/fixunstfdi_test.zig
+++ b/std/special/compiler_rt/fixunstfdi_test.zig
@@ -1,5 +1,5 @@
const __fixunstfdi = @import("fixunstfdi.zig").__fixunstfdi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunstfdi(a: f128, expected: u64) void {
const x = __fixunstfdi(a);
diff --git a/std/special/compiler_rt/fixunstfsi_test.zig b/std/special/compiler_rt/fixunstfsi_test.zig
index 8bdf36d9d4..f682191994 100644
--- a/std/special/compiler_rt/fixunstfsi_test.zig
+++ b/std/special/compiler_rt/fixunstfsi_test.zig
@@ -1,5 +1,5 @@
const __fixunstfsi = @import("fixunstfsi.zig").__fixunstfsi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunstfsi(a: f128, expected: u32) void {
const x = __fixunstfsi(a);
diff --git a/std/special/compiler_rt/fixunstfti_test.zig b/std/special/compiler_rt/fixunstfti_test.zig
index d9eb60e59b..9128ac6c08 100644
--- a/std/special/compiler_rt/fixunstfti_test.zig
+++ b/std/special/compiler_rt/fixunstfti_test.zig
@@ -1,5 +1,5 @@
const __fixunstfti = @import("fixunstfti.zig").__fixunstfti;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunstfti(a: f128, expected: u128) void {
const x = __fixunstfti(a);
diff --git a/std/special/compiler_rt/index.zig b/std/special/compiler_rt/index.zig
index 44466a407d..b051ccfc9d 100644
--- a/std/special/compiler_rt/index.zig
+++ b/std/special/compiler_rt/index.zig
@@ -71,7 +71,8 @@ comptime {
}
}
-const assert = @import("../../index.zig").debug.assert;
+const std = @import("std");
+const assert = std.debug.assert;
const __udivmoddi4 = @import("udivmoddi4.zig").__udivmoddi4;
@@ -80,7 +81,7 @@ const __udivmoddi4 = @import("udivmoddi4.zig").__udivmoddi4;
pub fn panic(msg: []const u8, error_return_trace: ?&builtin.StackTrace) noreturn {
@setCold(true);
if (is_test) {
- @import("std").debug.panic("{}", msg);
+ std.debug.panic("{}", msg);
} else {
unreachable;
}
diff --git a/std/special/compiler_rt/udivmod.zig b/std/special/compiler_rt/udivmod.zig
index e8a86c5c44..0dee5e45f6 100644
--- a/std/special/compiler_rt/udivmod.zig
+++ b/std/special/compiler_rt/udivmod.zig
@@ -12,7 +12,7 @@ pub fn udivmod(comptime DoubleInt: type, a: DoubleInt, b: DoubleInt, maybe_rem:
const SingleInt = @IntType(false, @divExact(DoubleInt.bit_count, 2));
const SignedDoubleInt = @IntType(true, DoubleInt.bit_count);
- const Log2SingleInt = @import("../../math/index.zig").Log2Int(SingleInt);
+ const Log2SingleInt = @import("std").math.Log2Int(SingleInt);
const n = @ptrCast(&const [2]SingleInt, &a).*; // TODO issue #421
const d = @ptrCast(&const [2]SingleInt, &b).*; // TODO issue #421
diff --git a/std/zig/ast.zig b/std/zig/ast.zig
index 716ed8eb7d..d1d7fe7914 100644
--- a/std/zig/ast.zig
+++ b/std/zig/ast.zig
@@ -6,7 +6,6 @@ const mem = std.mem;
pub const Node = struct {
id: Id,
- doc_comments: ?&DocComment,
same_line_comment: ?&Token,
pub const Id = enum {
@@ -36,6 +35,7 @@ pub const Node = struct {
VarType,
ErrorType,
FnProto,
+ PromiseType,
// Primary expressions
IntegerLiteral,
@@ -69,6 +69,7 @@ pub const Node = struct {
StructField,
UnionTag,
EnumTag,
+ ErrorTag,
AsmInput,
AsmOutput,
AsyncAttribute,
@@ -76,6 +77,13 @@ pub const Node = struct {
FieldInitializer,
};
+ pub fn cast(base: &Node, comptime T: type) ?&T {
+ if (base.id == comptime typeToId(T)) {
+ return @fieldParentPtr(T, "base", base);
+ }
+ return null;
+ }
+
pub fn iterate(base: &Node, index: usize) ?&Node {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
@@ -121,6 +129,7 @@ pub const Node = struct {
pub const Root = struct {
base: Node,
+ doc_comments: ?&DocComment,
decls: ArrayList(&Node),
eof_token: Token,
@@ -142,6 +151,7 @@ pub const Node = struct {
pub const VarDecl = struct {
base: Node,
+ doc_comments: ?&DocComment,
visib_token: ?Token,
name_token: Token,
eq_token: Token,
@@ -190,6 +200,7 @@ pub const Node = struct {
pub const Use = struct {
base: Node,
+ doc_comments: ?&DocComment,
visib_token: ?Token,
expr: &Node,
semicolon_token: Token,
@@ -260,7 +271,7 @@ pub const Node = struct {
const InitArg = union(enum) {
None,
- Enum,
+ Enum: ?&Node,
Type: &Node,
};
@@ -293,6 +304,7 @@ pub const Node = struct {
pub const StructField = struct {
base: Node,
+ doc_comments: ?&DocComment,
visib_token: ?Token,
name_token: Token,
type_expr: &Node,
@@ -318,8 +330,10 @@ pub const Node = struct {
pub const UnionTag = struct {
base: Node,
+ doc_comments: ?&DocComment,
name_token: Token,
type_expr: ?&Node,
+ value_expr: ?&Node,
pub fn iterate(self: &UnionTag, index: usize) ?&Node {
var i = index;
@@ -329,6 +343,11 @@ pub const Node = struct {
i -= 1;
}
+ if (self.value_expr) |value_expr| {
+ if (i < 1) return value_expr;
+ i -= 1;
+ }
+
return null;
}
@@ -337,6 +356,9 @@ pub const Node = struct {
}
pub fn lastToken(self: &UnionTag) Token {
+ if (self.value_expr) |value_expr| {
+ return value_expr.lastToken();
+ }
if (self.type_expr) |type_expr| {
return type_expr.lastToken();
}
@@ -347,6 +369,7 @@ pub const Node = struct {
pub const EnumTag = struct {
base: Node,
+ doc_comments: ?&DocComment,
name_token: Token,
value: ?&Node,
@@ -374,6 +397,31 @@ pub const Node = struct {
}
};
+ pub const ErrorTag = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ name_token: Token,
+
+ pub fn iterate(self: &ErrorTag, index: usize) ?&Node {
+ var i = index;
+
+ if (self.doc_comments) |comments| {
+ if (i < 1) return &comments.base;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &ErrorTag) Token {
+ return self.name_token;
+ }
+
+ pub fn lastToken(self: &ErrorTag) Token {
+ return self.name_token;
+ }
+ };
+
pub const Identifier = struct {
base: Node,
token: Token,
@@ -423,6 +471,7 @@ pub const Node = struct {
pub const FnProto = struct {
base: Node,
+ doc_comments: ?&DocComment,
visib_token: ?Token,
fn_token: Token,
name_token: ?Token,
@@ -495,6 +544,37 @@ pub const Node = struct {
}
};
+ pub const PromiseType = struct {
+ base: Node,
+ promise_token: Token,
+ result: ?Result,
+
+ pub const Result = struct {
+ arrow_token: Token,
+ return_type: &Node,
+ };
+
+ pub fn iterate(self: &PromiseType, index: usize) ?&Node {
+ var i = index;
+
+ if (self.result) |result| {
+ if (i < 1) return result.return_type;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &PromiseType) Token {
+ return self.promise_token;
+ }
+
+ pub fn lastToken(self: &PromiseType) Token {
+ if (self.result) |result| return result.return_type.lastToken();
+ return self.promise_token;
+ }
+ };
+
pub const ParamDecl = struct {
base: Node,
comptime_token: ?Token,
@@ -585,6 +665,7 @@ pub const Node = struct {
pub const Comptime = struct {
base: Node,
+ doc_comments: ?&DocComment,
comptime_token: Token,
expr: &Node,
@@ -1188,7 +1269,7 @@ pub const Node = struct {
ArrayAccess: &Node,
Slice: SliceRange,
ArrayInitializer: ArrayList(&Node),
- StructInitializer: ArrayList(&FieldInitializer),
+ StructInitializer: ArrayList(&Node),
};
const CallInfo = struct {
@@ -1230,7 +1311,7 @@ pub const Node = struct {
i -= exprs.len;
},
Op.StructInitializer => |fields| {
- if (i < fields.len) return &fields.at(i).base;
+ if (i < fields.len) return fields.at(i);
i -= fields.len;
},
}
@@ -1339,6 +1420,7 @@ pub const Node = struct {
pub const Suspend = struct {
base: Node,
+ label: ?Token,
suspend_token: Token,
payload: ?&Node,
body: ?&Node,
@@ -1360,6 +1442,7 @@ pub const Node = struct {
}
pub fn firstToken(self: &Suspend) Token {
+ if (self.label) |label| return label;
return self.suspend_token;
}
@@ -1751,6 +1834,7 @@ pub const Node = struct {
pub const TestDecl = struct {
base: Node,
+ doc_comments: ?&DocComment,
test_token: Token,
name: &Node,
body_node: &Node,
diff --git a/std/zig/bench.zig b/std/zig/bench.zig
new file mode 100644
index 0000000000..c3b6b0d3d3
--- /dev/null
+++ b/std/zig/bench.zig
@@ -0,0 +1,38 @@
+const std = @import("std");
+const mem = std.mem;
+const warn = std.debug.warn;
+const Tokenizer = std.zig.Tokenizer;
+const Parser = std.zig.Parser;
+const io = std.io;
+
+const source = @embedFile("../os/index.zig");
+var fixed_buffer_mem: [10 * 1024 * 1024]u8 = undefined;
+
+pub fn main() !void {
+ var i: usize = 0;
+ var timer = try std.os.time.Timer.start();
+ const start = timer.lap();
+ const iterations = 100;
+ var memory_used: usize = 0;
+ while (i < iterations) : (i += 1) {
+ memory_used += testOnce();
+ }
+ const end = timer.read();
+ memory_used /= iterations;
+ const elapsed_s = f64(end - start) / std.os.time.ns_per_s;
+ const bytes_per_sec = f64(source.len * iterations) / elapsed_s;
+ const mb_per_sec = bytes_per_sec / (1024 * 1024);
+
+ var stdout_file = try std.io.getStdOut();
+ const stdout = &std.io.FileOutStream.init(&stdout_file).stream;
+ try stdout.print("{.3} MB/s, {} KB used \n", mb_per_sec, memory_used / 1024);
+}
+
+fn testOnce() usize {
+ var fixed_buf_alloc = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
+ var allocator = &fixed_buf_alloc.allocator;
+ var tokenizer = Tokenizer.init(source);
+ var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
+ _ = parser.parse() catch @panic("parse failure");
+ return fixed_buf_alloc.end_index;
+}
diff --git a/std/zig/parser.zig b/std/zig/parser.zig
index 79a38f00ee..e4b2aa620a 100644
--- a/std/zig/parser.zig
+++ b/std/zig/parser.zig
@@ -228,7 +228,6 @@ pub const Parser = struct {
Statement: &ast.Node.Block,
ComptimeStatement: ComptimeStatementCtx,
Semicolon: &&ast.Node,
- AddComments: AddCommentsCtx,
LookForSameLineComment: &&ast.Node,
LookForSameLineCommentDirect: &ast.Node,
@@ -239,11 +238,12 @@ pub const Parser = struct {
ExprListItemOrEnd: ExprListCtx,
ExprListCommaOrEnd: ExprListCtx,
- FieldInitListItemOrEnd: ListSave(&ast.Node.FieldInitializer),
- FieldInitListCommaOrEnd: ListSave(&ast.Node.FieldInitializer),
+ FieldInitListItemOrEnd: ListSave(&ast.Node),
+ FieldInitListCommaOrEnd: ListSave(&ast.Node),
FieldListCommaOrEnd: &ast.Node.ContainerDecl,
- IdentifierListItemOrEnd: ListSave(&ast.Node),
- IdentifierListCommaOrEnd: ListSave(&ast.Node),
+ FieldInitValue: OptionalCtx,
+ ErrorTagListItemOrEnd: ListSave(&ast.Node),
+ ErrorTagListCommaOrEnd: ListSave(&ast.Node),
SwitchCaseOrEnd: ListSave(&ast.Node),
SwitchCaseCommaOrEnd: ListSave(&ast.Node),
SwitchCaseFirstItem: &ArrayList(&ast.Node),
@@ -300,6 +300,7 @@ pub const Parser = struct {
ErrorTypeOrSetDecl: ErrorTypeOrSetDeclCtx,
StringLiteral: OptionalCtx,
Identifier: OptionalCtx,
+ ErrorTag: &&ast.Node,
IfToken: @TagType(Token.Id),
@@ -324,6 +325,7 @@ pub const Parser = struct {
ast.Node.Root {
.base = undefined,
.decls = ArrayList(&ast.Node).init(arena),
+ .doc_comments = null,
// initialized when we get the eof token
.eof_token = undefined,
}
@@ -353,7 +355,7 @@ pub const Parser = struct {
try root_node.decls.append(&line_comment.base);
}
- const comments = try self.eatComments(arena);
+ const comments = try self.eatDocComments(arena);
const token = self.getNextToken();
switch (token.id) {
Token.Id.Keyword_test => {
@@ -362,7 +364,6 @@ pub const Parser = struct {
const block = try arena.construct(ast.Node.Block {
.base = ast.Node {
.id = ast.Node.Id.Block,
- .doc_comments = null,
.same_line_comment = null,
},
.label = null,
@@ -373,9 +374,9 @@ pub const Parser = struct {
const test_node = try arena.construct(ast.Node.TestDecl {
.base = ast.Node {
.id = ast.Node.Id.TestDecl,
- .doc_comments = comments,
.same_line_comment = null,
},
+ .doc_comments = comments,
.test_token = token,
.name = undefined,
.body_node = &block.base,
@@ -393,7 +394,11 @@ pub const Parser = struct {
},
Token.Id.Eof => {
root_node.eof_token = token;
- return Tree {.root_node = root_node, .arena_allocator = arena_allocator};
+ root_node.doc_comments = comments;
+ return Tree {
+ .root_node = root_node,
+ .arena_allocator = arena_allocator,
+ };
},
Token.Id.Keyword_pub => {
stack.append(State.TopLevel) catch unreachable;
@@ -423,6 +428,7 @@ pub const Parser = struct {
.base = undefined,
.comptime_token = token,
.expr = &block.base,
+ .doc_comments = comments,
}
);
stack.append(State.TopLevel) catch unreachable;
@@ -519,6 +525,7 @@ pub const Parser = struct {
.visib_token = ctx.visib_token,
.expr = undefined,
.semicolon_token = undefined,
+ .doc_comments = ctx.comments,
}
);
stack.append(State {
@@ -555,9 +562,9 @@ pub const Parser = struct {
const fn_proto = try arena.construct(ast.Node.FnProto {
.base = ast.Node {
.id = ast.Node.Id.FnProto,
- .doc_comments = ctx.comments,
.same_line_comment = null,
},
+ .doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.name_token = null,
.fn_token = undefined,
@@ -624,9 +631,9 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.StructField {
.base = ast.Node {
.id = ast.Node.Id.StructField,
- .doc_comments = null,
.same_line_comment = null,
},
+ .doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.name_token = identifier,
.type_expr = undefined,
@@ -653,6 +660,15 @@ pub const Parser = struct {
continue;
},
+ State.FieldInitValue => |ctx| {
+ const eq_tok = self.getNextToken();
+ if (eq_tok.id != Token.Id.Equal) {
+ self.putBackToken(eq_tok);
+ continue;
+ }
+ stack.append(State { .Expression = ctx }) catch unreachable;
+ continue;
+ },
State.ContainerKind => |ctx| {
const token = self.getNextToken();
@@ -699,7 +715,16 @@ pub const Parser = struct {
const init_arg_token = self.getNextToken();
switch (init_arg_token.id) {
Token.Id.Keyword_enum => {
- container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg.Enum;
+ container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg {.Enum = null};
+ const lparen_tok = self.getNextToken();
+ if (lparen_tok.id == Token.Id.LParen) {
+ try stack.append(State { .ExpectToken = Token.Id.RParen } );
+ try stack.append(State { .Expression = OptionalCtx {
+ .RequiredNull = &container_decl.init_arg_expr.Enum,
+ } });
+ } else {
+ self.putBackToken(lparen_tok);
+ }
},
else => {
self.putBackToken(init_arg_token);
@@ -709,12 +734,13 @@ pub const Parser = struct {
}
continue;
},
+
State.ContainerDecl => |container_decl| {
while (try self.eatLineComment(arena)) |line_comment| {
try container_decl.fields_and_decls.append(&line_comment.base);
}
- const comments = try self.eatComments(arena);
+ const comments = try self.eatDocComments(arena);
const token = self.getNextToken();
switch (token.id) {
Token.Id.Identifier => {
@@ -723,9 +749,9 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.StructField {
.base = ast.Node {
.id = ast.Node.Id.StructField,
- .doc_comments = comments,
.same_line_comment = null,
},
+ .doc_comments = comments,
.visib_token = null,
.name_token = token,
.type_expr = undefined,
@@ -744,10 +770,13 @@ pub const Parser = struct {
.base = undefined,
.name_token = token,
.type_expr = null,
+ .value_expr = null,
+ .doc_comments = comments,
}
);
stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
+ try stack.append(State { .FieldInitValue = OptionalCtx { .RequiredNull = &node.value_expr } });
try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &node.type_expr } });
try stack.append(State { .IfToken = Token.Id.Colon });
continue;
@@ -758,6 +787,7 @@ pub const Parser = struct {
.base = undefined,
.name_token = token,
.value = null,
+ .doc_comments = comments,
}
);
@@ -809,6 +839,9 @@ pub const Parser = struct {
continue;
},
Token.Id.RBrace => {
+ if (comments != null) {
+ return self.parseError(token, "doc comments must be attached to a node");
+ }
container_decl.rbrace_token = token;
continue;
},
@@ -834,9 +867,9 @@ pub const Parser = struct {
const var_decl = try arena.construct(ast.Node.VarDecl {
.base = ast.Node {
.id = ast.Node.Id.VarDecl,
- .doc_comments = ctx.comments,
.same_line_comment = null,
},
+ .doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.mut_token = ctx.mut_token,
.comptime_token = ctx.comptime_token,
@@ -1093,6 +1126,22 @@ pub const Parser = struct {
}) catch unreachable;
continue;
},
+ Token.Id.Keyword_suspend => {
+ const node = try arena.construct(ast.Node.Suspend {
+ .base = ast.Node {
+ .id = ast.Node.Id.Suspend,
+ .same_line_comment = null,
+ },
+ .label = ctx.label,
+ .suspend_token = token,
+ .payload = null,
+ .body = null,
+ });
+ ctx.opt_ctx.store(&node.base);
+ stack.append(State { .SuspendBody = node }) catch unreachable;
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ continue;
+ },
Token.Id.Keyword_inline => {
stack.append(State {
.Inline = InlineCtx {
@@ -1244,7 +1293,6 @@ pub const Parser = struct {
}
},
State.Statement => |block| {
- const comments = try self.eatComments(arena);
const token = self.getNextToken();
switch (token.id) {
Token.Id.Keyword_comptime => {
@@ -1259,7 +1307,7 @@ pub const Parser = struct {
Token.Id.Keyword_var, Token.Id.Keyword_const => {
stack.append(State {
.VarDecl = VarDeclCtx {
- .comments = comments,
+ .comments = null,
.visib_token = null,
.comptime_token = null,
.extern_export_token = null,
@@ -1274,7 +1322,6 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.Defer {
.base = ast.Node {
.id = ast.Node.Id.Defer,
- .doc_comments = comments,
.same_line_comment = null,
},
.defer_token = token,
@@ -1310,23 +1357,18 @@ pub const Parser = struct {
const statement = try block.statements.addOne();
stack.append(State { .LookForSameLineComment = statement }) catch unreachable;
try stack.append(State { .Semicolon = statement });
- try stack.append(State { .AddComments = AddCommentsCtx {
- .node_ptr = statement,
- .comments = comments,
- }});
try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = statement } });
continue;
}
}
},
State.ComptimeStatement => |ctx| {
- const comments = try self.eatComments(arena);
const token = self.getNextToken();
switch (token.id) {
Token.Id.Keyword_var, Token.Id.Keyword_const => {
stack.append(State {
.VarDecl = VarDeclCtx {
- .comments = comments,
+ .comments = null,
.visib_token = null,
.comptime_token = ctx.comptime_token,
.extern_export_token = null,
@@ -1340,9 +1382,10 @@ pub const Parser = struct {
else => {
self.putBackToken(token);
self.putBackToken(ctx.comptime_token);
- const statememt = try ctx.block.statements.addOne();
- stack.append(State { .Semicolon = statememt }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = statememt } });
+ const statement = try ctx.block.statements.addOne();
+ stack.append(State { .LookForSameLineComment = statement }) catch unreachable;
+ try stack.append(State { .Semicolon = statement });
+ try stack.append(State { .Expression = OptionalCtx { .Required = statement } });
continue;
}
}
@@ -1356,12 +1399,6 @@ pub const Parser = struct {
continue;
},
- State.AddComments => |add_comments_ctx| {
- const node = *add_comments_ctx.node_ptr;
- node.doc_comments = add_comments_ctx.comments;
- continue;
- },
-
State.LookForSameLineComment => |node_ptr| {
try self.lookForSameLineComment(arena, *node_ptr);
continue;
@@ -1474,6 +1511,10 @@ pub const Parser = struct {
}
},
State.FieldInitListItemOrEnd => |list_state| {
+ while (try self.eatLineComment(arena)) |line_comment| {
+ try list_state.list.append(&line_comment.base);
+ }
+
if (self.eatToken(Token.Id.RBrace)) |rbrace| {
*list_state.ptr = rbrace;
continue;
@@ -1482,14 +1523,13 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.FieldInitializer {
.base = ast.Node {
.id = ast.Node.Id.FieldInitializer,
- .doc_comments = null,
.same_line_comment = null,
},
.period_token = undefined,
.name_token = undefined,
.expr = undefined,
});
- try list_state.list.append(node);
+ try list_state.list.append(&node.base);
stack.append(State { .FieldInitListCommaOrEnd = list_state }) catch unreachable;
try stack.append(State { .Expression = OptionalCtx{ .Required = &node.expr } });
@@ -1527,7 +1567,7 @@ pub const Parser = struct {
try stack.append(State { .ContainerDecl = container_decl });
continue;
},
- State.IdentifierListItemOrEnd => |list_state| {
+ State.ErrorTagListItemOrEnd => |list_state| {
while (try self.eatLineComment(arena)) |line_comment| {
try list_state.list.append(&line_comment.base);
}
@@ -1537,23 +1577,18 @@ pub const Parser = struct {
continue;
}
- const comments = try self.eatComments(arena);
const node_ptr = try list_state.list.addOne();
- try stack.append(State { .AddComments = AddCommentsCtx {
- .node_ptr = node_ptr,
- .comments = comments,
- }});
- try stack.append(State { .IdentifierListCommaOrEnd = list_state });
- try stack.append(State { .Identifier = OptionalCtx { .Required = node_ptr } });
+ try stack.append(State { .ErrorTagListCommaOrEnd = list_state });
+ try stack.append(State { .ErrorTag = node_ptr });
continue;
},
- State.IdentifierListCommaOrEnd => |list_state| {
+ State.ErrorTagListCommaOrEnd => |list_state| {
if (try self.expectCommaOrEnd(Token.Id.RBrace)) |end| {
*list_state.ptr = end;
continue;
} else {
- stack.append(State { .IdentifierListItemOrEnd = list_state }) catch unreachable;
+ stack.append(State { .ErrorTagListItemOrEnd = list_state }) catch unreachable;
continue;
}
},
@@ -1567,11 +1602,10 @@ pub const Parser = struct {
continue;
}
- const comments = try self.eatComments(arena);
+ const comments = try self.eatDocComments(arena);
const node = try arena.construct(ast.Node.SwitchCase {
.base = ast.Node {
.id = ast.Node.Id.SwitchCase,
- .doc_comments = comments,
.same_line_comment = null,
},
.items = ArrayList(&ast.Node).init(arena),
@@ -1684,9 +1718,9 @@ pub const Parser = struct {
const fn_proto = try arena.construct(ast.Node.FnProto {
.base = ast.Node {
.id = ast.Node.Id.FnProto,
- .doc_comments = ctx.comments,
.same_line_comment = null,
},
+ .doc_comments = ctx.comments,
.visib_token = null,
.name_token = null,
.fn_token = fn_token,
@@ -1857,12 +1891,13 @@ pub const Parser = struct {
}
);
- stack.append(State {
+ stack.append(State {.LookForSameLineCommentDirect = &node.base }) catch unreachable;
+ try stack.append(State {
.ExpectTokenSave = ExpectTokenSave {
.id = Token.Id.Pipe,
.ptr = &node.rpipe,
}
- }) catch unreachable;
+ });
try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
try stack.append(State {
.OptionalTokenSave = OptionalTokenSave {
@@ -2317,7 +2352,7 @@ pub const Parser = struct {
.base = undefined,
.lhs = lhs,
.op = ast.Node.SuffixOp.Op {
- .StructInitializer = ArrayList(&ast.Node.FieldInitializer).init(arena),
+ .StructInitializer = ArrayList(&ast.Node).init(arena),
},
.rtoken = undefined,
}
@@ -2325,7 +2360,7 @@ pub const Parser = struct {
stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
try stack.append(State { .IfToken = Token.Id.LBrace });
try stack.append(State {
- .FieldInitListItemOrEnd = ListSave(&ast.Node.FieldInitializer) {
+ .FieldInitListItemOrEnd = ListSave(&ast.Node) {
.list = &node.op.StructInitializer,
.ptr = &node.rtoken,
}
@@ -2550,6 +2585,29 @@ pub const Parser = struct {
_ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.Unreachable, token);
continue;
},
+ Token.Id.Keyword_promise => {
+ const node = try arena.construct(ast.Node.PromiseType {
+ .base = ast.Node {
+ .id = ast.Node.Id.PromiseType,
+ .same_line_comment = null,
+ },
+ .promise_token = token,
+ .result = null,
+ });
+ opt_ctx.store(&node.base);
+ const next_token = self.getNextToken();
+ if (next_token.id != Token.Id.Arrow) {
+ self.putBackToken(next_token);
+ continue;
+ }
+ node.result = ast.Node.PromiseType.Result {
+ .arrow_token = next_token,
+ .return_type = undefined,
+ };
+ const return_type_ptr = &((??node.result).return_type);
+ try stack.append(State { .Expression = OptionalCtx { .Required = return_type_ptr, } });
+ continue;
+ },
Token.Id.StringLiteral, Token.Id.MultilineStringLiteralLine => {
opt_ctx.store((try self.parseStringLiteral(arena, token)) ?? unreachable);
continue;
@@ -2656,9 +2714,9 @@ pub const Parser = struct {
const fn_proto = try arena.construct(ast.Node.FnProto {
.base = ast.Node {
.id = ast.Node.Id.FnProto,
- .doc_comments = null,
.same_line_comment = null,
},
+ .doc_comments = null,
.visib_token = null,
.name_token = null,
.fn_token = token,
@@ -2680,9 +2738,9 @@ pub const Parser = struct {
const fn_proto = try arena.construct(ast.Node.FnProto {
.base = ast.Node {
.id = ast.Node.Id.FnProto,
- .doc_comments = null,
.same_line_comment = null,
},
+ .doc_comments = null,
.visib_token = null,
.name_token = null,
.fn_token = undefined,
@@ -2773,7 +2831,6 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.ErrorSetDecl {
.base = ast.Node {
.id = ast.Node.Id.ErrorSetDecl,
- .doc_comments = null,
.same_line_comment = null,
},
.error_token = ctx.error_token,
@@ -2783,7 +2840,7 @@ pub const Parser = struct {
ctx.opt_ctx.store(&node.base);
stack.append(State {
- .IdentifierListItemOrEnd = ListSave(&ast.Node) {
+ .ErrorTagListItemOrEnd = ListSave(&ast.Node) {
.list = &node.decls,
.ptr = &node.rbrace_token,
}
@@ -2803,6 +2860,7 @@ pub const Parser = struct {
}
);
},
+
State.Identifier => |opt_ctx| {
if (self.eatToken(Token.Id.Identifier)) |ident_token| {
_ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.Identifier, ident_token);
@@ -2815,6 +2873,25 @@ pub const Parser = struct {
}
},
+ State.ErrorTag => |node_ptr| {
+ const comments = try self.eatDocComments(arena);
+ const ident_token = self.getNextToken();
+ if (ident_token.id != Token.Id.Identifier) {
+ return self.parseError(ident_token, "expected {}, found {}",
+ @tagName(Token.Id.Identifier), @tagName(ident_token.id));
+ }
+
+ const node = try arena.construct(ast.Node.ErrorTag {
+ .base = ast.Node {
+ .id = ast.Node.Id.ErrorTag,
+ .same_line_comment = null,
+ },
+ .doc_comments = comments,
+ .name_token = ident_token,
+ });
+ *node_ptr = &node.base;
+ continue;
+ },
State.ExpectToken => |token_id| {
_ = try self.expectToken(token_id);
@@ -2853,7 +2930,7 @@ pub const Parser = struct {
}
}
- fn eatComments(self: &Parser, arena: &mem.Allocator) !?&ast.Node.DocComment {
+ fn eatDocComments(self: &Parser, arena: &mem.Allocator) !?&ast.Node.DocComment {
var result: ?&ast.Node.DocComment = null;
while (true) {
if (self.eatToken(Token.Id.DocComment)) |line_comment| {
@@ -2864,7 +2941,6 @@ pub const Parser = struct {
const comment_node = try arena.construct(ast.Node.DocComment {
.base = ast.Node {
.id = ast.Node.Id.DocComment,
- .doc_comments = null,
.same_line_comment = null,
},
.lines = ArrayList(Token).init(arena),
@@ -2886,7 +2962,6 @@ pub const Parser = struct {
return try arena.construct(ast.Node.LineComment {
.base = ast.Node {
.id = ast.Node.Id.LineComment,
- .doc_comments = null,
.same_line_comment = null,
},
.token = token,
@@ -3022,6 +3097,7 @@ pub const Parser = struct {
const node = try self.createToCtxNode(arena, ctx, ast.Node.Suspend,
ast.Node.Suspend {
.base = undefined,
+ .label = null,
.suspend_token = *token,
.payload = null,
.body = null,
@@ -3047,6 +3123,7 @@ pub const Parser = struct {
stack.append(State { .Else = &node.@"else" }) catch unreachable;
try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .LookForSameLineComment = &node.condition });
try stack.append(State { .ExpectToken = Token.Id.RParen });
try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
try stack.append(State { .ExpectToken = Token.Id.LParen });
@@ -3078,7 +3155,6 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.Switch {
.base = ast.Node {
.id = ast.Node.Id.Switch,
- .doc_comments = null,
.same_line_comment = null,
},
.switch_token = *token,
@@ -3106,6 +3182,7 @@ pub const Parser = struct {
.base = undefined,
.comptime_token = *token,
.expr = undefined,
+ .doc_comments = null,
}
);
try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
@@ -3248,7 +3325,6 @@ pub const Parser = struct {
const id = ast.Node.typeToId(T);
break :blk ast.Node {
.id = id,
- .doc_comments = null,
.same_line_comment = null,
};
};
@@ -3383,11 +3459,10 @@ pub const Parser = struct {
Expression: &ast.Node,
VarDecl: &ast.Node.VarDecl,
Statement: &ast.Node,
- FieldInitializer: &ast.Node.FieldInitializer,
PrintIndent,
Indent: usize,
PrintSameLineComment: ?&Token,
- PrintComments: &ast.Node,
+ PrintLineComment: &Token,
};
pub fn renderSource(self: &Parser, stream: var, root_node: &ast.Node.Root) !void {
@@ -3426,7 +3501,7 @@ pub const Parser = struct {
switch (decl.id) {
ast.Node.Id.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
- try self.renderComments(stream, &fn_proto.base, indent);
+ try self.renderComments(stream, fn_proto, indent);
if (fn_proto.body_node) |body_node| {
stack.append(RenderState { .Expression = body_node}) catch unreachable;
@@ -3448,12 +3523,12 @@ pub const Parser = struct {
},
ast.Node.Id.VarDecl => {
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl);
- try self.renderComments(stream, &var_decl.base, indent);
+ try self.renderComments(stream, var_decl, indent);
try stack.append(RenderState { .VarDecl = var_decl});
},
ast.Node.Id.TestDecl => {
const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl);
- try self.renderComments(stream, &test_decl.base, indent);
+ try self.renderComments(stream, test_decl, indent);
try stream.print("test ");
try stack.append(RenderState { .Expression = test_decl.body_node });
try stack.append(RenderState { .Text = " " });
@@ -3461,6 +3536,7 @@ pub const Parser = struct {
},
ast.Node.Id.StructField => {
const field = @fieldParentPtr(ast.Node.StructField, "base", decl);
+ try self.renderComments(stream, field, indent);
if (field.visib_token) |visib_token| {
try stream.print("{} ", self.tokenizer.getTokenSlice(visib_token));
}
@@ -3470,9 +3546,16 @@ pub const Parser = struct {
},
ast.Node.Id.UnionTag => {
const tag = @fieldParentPtr(ast.Node.UnionTag, "base", decl);
+ try self.renderComments(stream, tag, indent);
try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
try stack.append(RenderState { .Text = "," });
+
+ if (tag.value_expr) |value_expr| {
+ try stack.append(RenderState { .Expression = value_expr });
+ try stack.append(RenderState { .Text = " = " });
+ }
+
if (tag.type_expr) |type_expr| {
try stream.print(": ");
try stack.append(RenderState { .Expression = type_expr});
@@ -3480,6 +3563,7 @@ pub const Parser = struct {
},
ast.Node.Id.EnumTag => {
const tag = @fieldParentPtr(ast.Node.EnumTag, "base", decl);
+ try self.renderComments(stream, tag, indent);
try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
try stack.append(RenderState { .Text = "," });
@@ -3488,6 +3572,11 @@ pub const Parser = struct {
try stack.append(RenderState { .Expression = value});
}
},
+ ast.Node.Id.ErrorTag => {
+ const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", decl);
+ try self.renderComments(stream, tag, indent);
+ try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
+ },
ast.Node.Id.Comptime => {
if (requireSemiColon(decl)) {
try stack.append(RenderState { .Text = ";" });
@@ -3502,17 +3591,12 @@ pub const Parser = struct {
}
},
- RenderState.FieldInitializer => |field_init| {
- try stream.print(".{}", self.tokenizer.getTokenSlice(field_init.name_token));
- try stream.print(" = ");
- try stack.append(RenderState { .Expression = field_init.expr });
- },
-
RenderState.VarDecl => |var_decl| {
try stack.append(RenderState { .Text = ";" });
if (var_decl.init_node) |init_node| {
try stack.append(RenderState { .Expression = init_node });
- try stack.append(RenderState { .Text = " = " });
+ const text = if (init_node.id == ast.Node.Id.MultilineStringLiteral) " =" else " = ";
+ try stack.append(RenderState { .Text = text });
}
if (var_decl.align_node) |align_node| {
try stack.append(RenderState { .Text = ")" });
@@ -3630,6 +3714,9 @@ pub const Parser = struct {
},
ast.Node.Id.Suspend => {
const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
+ if (suspend_node.label) |label| {
+ try stream.print("{}: ", self.tokenizer.getTokenSlice(label));
+ }
try stream.print("{}", self.tokenizer.getTokenSlice(suspend_node.suspend_token));
if (suspend_node.body) |body| {
@@ -3803,19 +3890,41 @@ pub const Parser = struct {
try stack.append(RenderState { .Expression = suffix_op.lhs });
continue;
}
+ if (field_inits.len == 1) {
+ const field_init = field_inits.at(0);
+
+ try stack.append(RenderState { .Text = " }" });
+ try stack.append(RenderState { .Expression = field_init });
+ try stack.append(RenderState { .Text = "{ " });
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ continue;
+ }
try stack.append(RenderState { .Text = "}"});
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Text = "\n" });
var i = field_inits.len;
while (i != 0) {
i -= 1;
const field_init = field_inits.at(i);
- try stack.append(RenderState { .Text = ",\n" });
- try stack.append(RenderState { .FieldInitializer = field_init });
+ if (field_init.id != ast.Node.Id.LineComment) {
+ try stack.append(RenderState { .Text = "," });
+ }
+ try stack.append(RenderState { .Expression = field_init });
try stack.append(RenderState.PrintIndent);
+ if (i != 0) {
+ try stack.append(RenderState { .Text = blk: {
+ const prev_node = field_inits.at(i - 1);
+ const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, field_init.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ break :blk "\n";
+ }});
+ }
}
try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = " {\n"});
+ try stack.append(RenderState { .Text = "{\n"});
try stack.append(RenderState { .Expression = suffix_op.lhs });
},
ast.Node.SuffixOp.Op.ArrayInitializer => |exprs| {
@@ -3829,7 +3938,7 @@ pub const Parser = struct {
try stack.append(RenderState { .Text = "}" });
try stack.append(RenderState { .Expression = expr });
- try stack.append(RenderState { .Text = " {" });
+ try stack.append(RenderState { .Text = "{" });
try stack.append(RenderState { .Expression = suffix_op.lhs });
continue;
}
@@ -3846,7 +3955,7 @@ pub const Parser = struct {
try stack.append(RenderState.PrintIndent);
}
try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = " {\n"});
+ try stack.append(RenderState { .Text = "{\n"});
try stack.append(RenderState { .Expression = suffix_op.lhs });
},
}
@@ -4014,7 +4123,15 @@ pub const Parser = struct {
switch (container_decl.init_arg_expr) {
ast.Node.ContainerDecl.InitArg.None => try stack.append(RenderState { .Text = " "}),
- ast.Node.ContainerDecl.InitArg.Enum => try stack.append(RenderState { .Text = "(enum) "}),
+ ast.Node.ContainerDecl.InitArg.Enum => |enum_tag_type| {
+ if (enum_tag_type) |expr| {
+ try stack.append(RenderState { .Text = ")) "});
+ try stack.append(RenderState { .Expression = expr});
+ try stack.append(RenderState { .Text = "(enum("});
+ } else {
+ try stack.append(RenderState { .Text = "(enum) "});
+ }
+ },
ast.Node.ContainerDecl.InitArg.Type => |type_expr| {
try stack.append(RenderState { .Text = ") "});
try stack.append(RenderState { .Expression = type_expr});
@@ -4024,14 +4141,39 @@ pub const Parser = struct {
},
ast.Node.Id.ErrorSetDecl => {
const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base);
- try stream.print("error ");
+
+ const decls = err_set_decl.decls.toSliceConst();
+ if (decls.len == 0) {
+ try stream.write("error{}");
+ continue;
+ }
+
+ if (decls.len == 1) blk: {
+ const node = decls[0];
+
+ // if there are any doc comments or same line comments
+ // don't try to put it all on one line
+ if (node.same_line_comment != null) break :blk;
+ if (node.cast(ast.Node.ErrorTag)) |tag| {
+ if (tag.doc_comments != null) break :blk;
+ } else {
+ break :blk;
+ }
+
+
+ try stream.write("error{");
+ try stack.append(RenderState { .Text = "}" });
+ try stack.append(RenderState { .TopLevelDecl = node });
+ continue;
+ }
+
+ try stream.write("error{");
try stack.append(RenderState { .Text = "}"});
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState { .Indent = indent });
try stack.append(RenderState { .Text = "\n"});
- const decls = err_set_decl.decls.toSliceConst();
var i = decls.len;
while (i != 0) {
i -= 1;
@@ -4039,8 +4181,7 @@ pub const Parser = struct {
if (node.id != ast.Node.Id.LineComment) {
try stack.append(RenderState { .Text = "," });
}
- try stack.append(RenderState { .Expression = node });
- try stack.append(RenderState { .PrintComments = node });
+ try stack.append(RenderState { .TopLevelDecl = node });
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState {
.Text = blk: {
@@ -4056,7 +4197,6 @@ pub const Parser = struct {
});
}
try stack.append(RenderState { .Indent = indent + indent_delta});
- try stack.append(RenderState { .Text = "{"});
},
ast.Node.Id.MultilineStringLiteral => {
const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
@@ -4068,7 +4208,7 @@ pub const Parser = struct {
try stream.writeByteNTimes(' ', indent + indent_delta);
try stream.print("{}", self.tokenizer.getTokenSlice(t));
}
- try stream.writeByteNTimes(' ', indent + indent_delta);
+ try stream.writeByteNTimes(' ', indent);
},
ast.Node.Id.UndefinedLiteral => {
const undefined_literal = @fieldParentPtr(ast.Node.UndefinedLiteral, "base", base);
@@ -4151,6 +4291,14 @@ pub const Parser = struct {
try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(visib_token) });
}
},
+ ast.Node.Id.PromiseType => {
+ const promise_type = @fieldParentPtr(ast.Node.PromiseType, "base", base);
+ try stream.write(self.tokenizer.getTokenSlice(promise_type.promise_token));
+ if (promise_type.result) |result| {
+ try stream.write(self.tokenizer.getTokenSlice(result.arrow_token));
+ try stack.append(RenderState { .Expression = result.return_type});
+ }
+ },
ast.Node.Id.LineComment => {
const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", base);
try stream.write(self.tokenizer.getTokenSlice(line_comment_node.token));
@@ -4158,14 +4306,21 @@ pub const Parser = struct {
ast.Node.Id.DocComment => unreachable, // doc comments are attached to nodes
ast.Node.Id.Switch => {
const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base);
+ const cases = switch_node.cases.toSliceConst();
+
try stream.print("{} (", self.tokenizer.getTokenSlice(switch_node.switch_token));
+ if (cases.len == 0) {
+ try stack.append(RenderState { .Text = ") {}"});
+ try stack.append(RenderState { .Expression = switch_node.expr });
+ continue;
+ }
+
try stack.append(RenderState { .Text = "}"});
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState { .Indent = indent });
try stack.append(RenderState { .Text = "\n"});
- const cases = switch_node.cases.toSliceConst();
var i = cases.len;
while (i != 0) {
i -= 1;
@@ -4192,8 +4347,6 @@ pub const Parser = struct {
ast.Node.Id.SwitchCase => {
const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base);
- try self.renderComments(stream, base, indent);
-
try stack.append(RenderState { .PrintSameLineComment = base.same_line_comment });
try stack.append(RenderState { .Text = "," });
try stack.append(RenderState { .Expression = switch_case.expr });
@@ -4372,7 +4525,18 @@ pub const Parser = struct {
}
}
- try stack.append(RenderState { .Expression = if_node.body });
+ if (if_node.condition.same_line_comment) |comment| {
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Expression = if_node.body });
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent + indent_delta });
+ try stack.append(RenderState { .Text = "\n" });
+ try stack.append(RenderState { .PrintLineComment = comment });
+ } else {
+ try stack.append(RenderState { .Expression = if_node.body });
+ }
+
+
try stack.append(RenderState { .Text = " " });
if (if_node.payload) |payload| {
@@ -4505,6 +4669,7 @@ pub const Parser = struct {
ast.Node.Id.StructField,
ast.Node.Id.UnionTag,
ast.Node.Id.EnumTag,
+ ast.Node.Id.ErrorTag,
ast.Node.Id.Root,
ast.Node.Id.VarDecl,
ast.Node.Id.Use,
@@ -4512,7 +4677,6 @@ pub const Parser = struct {
ast.Node.Id.ParamDecl => unreachable,
},
RenderState.Statement => |base| {
- try self.renderComments(stream, base, indent);
try stack.append(RenderState { .PrintSameLineComment = base.same_line_comment } );
switch (base.id) {
ast.Node.Id.VarDecl => {
@@ -4533,15 +4697,14 @@ pub const Parser = struct {
const comment_token = maybe_comment ?? break :blk;
try stream.print(" {}", self.tokenizer.getTokenSlice(comment_token));
},
-
- RenderState.PrintComments => |node| blk: {
- try self.renderComments(stream, node, indent);
+ RenderState.PrintLineComment => |comment_token| {
+ try stream.write(self.tokenizer.getTokenSlice(comment_token));
},
}
}
}
- fn renderComments(self: &Parser, stream: var, node: &ast.Node, indent: usize) !void {
+ fn renderComments(self: &Parser, stream: var, node: var, indent: usize) !void {
const comment = node.doc_comments ?? return;
for (comment.lines.toSliceConst()) |line_token| {
try stream.print("{}\n", self.tokenizer.getTokenSlice(line_token));
diff --git a/std/zig/parser_test.zig b/std/zig/parser_test.zig
index 74a49a70e3..e1d75d8380 100644
--- a/std/zig/parser_test.zig
+++ b/std/zig/parser_test.zig
@@ -1,6 +1,129 @@
+test "zig fmt: same-line comment after non-block if expression" {
+ try testCanonical(
+ \\comptime {
+ \\ if (sr > n_uword_bits - 1) {
+ \\ // d > r
+ \\ return 0;
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: switch with empty body" {
+ try testCanonical(
+ \\test "" {
+ \\ foo() catch |err| switch (err) {};
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: same-line comment on comptime expression" {
+ try testCanonical(
+ \\test "" {
+ \\ comptime assert(@typeId(T) == builtin.TypeId.Int); // must pass an integer to absInt
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: float literal with exponent" {
+ try testCanonical(
+ \\pub const f64_true_min = 4.94065645841246544177e-324;
+ \\
+ );
+}
+
+test "zig fmt: line comments in struct initializer" {
+ try testCanonical(
+ \\fn foo() void {
+ \\ return Self{
+ \\ .a = b,
+ \\
+ \\ // Initialize these two fields to buffer_size so that
+ \\ // in `readFn` we treat the state as being able to read
+ \\ .start_index = buffer_size,
+ \\ .end_index = buffer_size,
+ \\
+ \\ // middle
+ \\
+ \\ .a = b,
+ \\
+ \\ // end
+ \\ };
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: doc comments before struct field" {
+ try testCanonical(
+ \\pub const Allocator = struct {
+ \\ /// Allocate byte_count bytes and return them in a slice, with the
+ \\ /// slice's pointer aligned at least to alignment bytes.
+ \\ allocFn: fn() void,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: error set declaration" {
+ try testCanonical(
+ \\const E = error{
+ \\ A,
+ \\ B,
+ \\
+ \\ C,
+ \\};
+ \\
+ \\const Error = error{
+ \\ /// no more memory
+ \\ OutOfMemory,
+ \\};
+ \\
+ \\const Error = error{
+ \\ /// no more memory
+ \\ OutOfMemory,
+ \\
+ \\ /// another
+ \\ Another,
+ \\
+ \\ // end
+ \\};
+ \\
+ \\const Error = error{OutOfMemory};
+ \\const Error = error{};
+ \\
+ );
+}
+
+test "zig fmt: union(enum(u32)) with assigned enum values" {
+ try testCanonical(
+ \\const MultipleChoice = union(enum(u32)) {
+ \\ A = 20,
+ \\ B = 40,
+ \\ C = 60,
+ \\ D = 1000,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: labeled suspend" {
+ try testCanonical(
+ \\fn foo() void {
+ \\ s: suspend |p| {
+ \\ break :s;
+ \\ }
+ \\}
+ \\
+ );
+}
+
test "zig fmt: comments before error set decl" {
try testCanonical(
- \\const UnexpectedError = error {
+ \\const UnexpectedError = error{
\\ /// The Operating System returned an undocumented error code.
\\ Unexpected,
\\ // another
@@ -92,7 +215,7 @@ test "zig fmt: same-line comment after field decl" {
test "zig fmt: array literal with 1 item on 1 line" {
try testCanonical(
- \\var s = []const u64 {0} ** 25;
+ \\var s = []const u64{0} ** 25;
\\
);
}
@@ -117,7 +240,7 @@ test "zig fmt: comments before global variables" {
);
}
-test "zig fmt: comments before statements" {
+test "zig fmt: comments in statements" {
try testCanonical(
\\test "std" {
\\ // statement comment
@@ -147,22 +270,6 @@ test "zig fmt: comments before test decl" {
);
}
-test "zig fmt: comments before variable declarations" {
- try testCanonical(
- \\const std = @import("std");
- \\
- \\pub fn main() !void {
- \\ /// If this program is run without stdout attached, exit with an error.
- \\ /// another comment
- \\ var stdout_file = try std.io.getStdOut;
- \\ // If this program is run without stdout attached, exit with an error.
- \\ // another comment
- \\ var stdout_file = try std.io.getStdOut;
- \\}
- \\
- );
-}
-
test "zig fmt: preserve spacing" {
try testCanonical(
\\const std = @import("std");
@@ -423,10 +530,18 @@ test "zig fmt: functions" {
test "zig fmt: multiline string" {
try testCanonical(
- \\const s =
- \\ \\ something
- \\ \\ something else
+ \\test "" {
+ \\ const s1 =
+ \\ \\one
+ \\ \\two)
+ \\ \\three
\\ ;
+ \\ const s2 =
+ \\ c\\one
+ \\ c\\two)
+ \\ c\\three
+ \\ ;
+ \\}
\\
);
}
@@ -570,26 +685,14 @@ test "zig fmt: union declaration" {
);
}
-test "zig fmt: error set declaration" {
- try testCanonical(
- \\const E = error {
- \\ A,
- \\ B,
- \\
- \\ C,
- \\};
- \\
- );
-}
-
test "zig fmt: arrays" {
try testCanonical(
\\test "test array" {
- \\ const a: [2]u8 = [2]u8 {
+ \\ const a: [2]u8 = [2]u8{
\\ 1,
\\ 2,
\\ };
- \\ const a: [2]u8 = []u8 {
+ \\ const a: [2]u8 = []u8{
\\ 1,
\\ 2,
\\ };
@@ -601,15 +704,17 @@ test "zig fmt: arrays" {
test "zig fmt: container initializers" {
try testCanonical(
- \\const a1 = []u8{};
- \\const a2 = []u8 {
+ \\const a0 = []u8{};
+ \\const a1 = []u8{1};
+ \\const a2 = []u8{
\\ 1,
\\ 2,
\\ 3,
\\ 4,
\\};
- \\const s1 = S{};
- \\const s2 = S {
+ \\const s0 = S{};
+ \\const s1 = S{ .a = 1 };
+ \\const s2 = S{
\\ .a = 1,
\\ .b = 2,
\\};
@@ -678,9 +783,6 @@ test "zig fmt: switch" {
\\ Float: f64,
\\ };
\\
- \\ const u = Union {
- \\ .Int = 0,
- \\ };
\\ switch (u) {
\\ Union.Int => |int| {},
\\ Union.Float => |*float| unreachable,
@@ -759,11 +861,6 @@ test "zig fmt: while" {
test "zig fmt: for" {
try testCanonical(
\\test "for" {
- \\ const a = []u8 {
- \\ 1,
- \\ 2,
- \\ 3,
- \\ };
\\ for (a) |v| {
\\ continue;
\\ }
@@ -940,12 +1037,12 @@ test "zig fmt: coroutines" {
\\ suspend;
\\ x += 1;
\\ suspend |p| {}
- \\ const p = async simpleAsyncFn() catch unreachable;
+ \\ const p: promise->void = async simpleAsyncFn() catch unreachable;
\\ await p;
\\}
\\
\\test "coroutine suspend, resume, cancel" {
- \\ const p = try async testAsyncSeq();
+ \\ const p: promise = try async testAsyncSeq();
\\ resume p;
\\ cancel p;
\\}
@@ -994,15 +1091,6 @@ test "zig fmt: error return" {
);
}
-test "zig fmt: struct literals with fields on each line" {
- try testCanonical(
- \\var self = BufSet {
- \\ .hash_map = BufSetHashMap.init(a),
- \\};
- \\
- );
-}
-
const std = @import("std");
const mem = std.mem;
const warn = std.debug.warn;
@@ -1028,15 +1116,15 @@ fn testParse(source: []const u8, allocator: &mem.Allocator) ![]u8 {
return buffer.toOwnedSlice();
}
-fn testCanonical(source: []const u8) !void {
+fn testTransform(source: []const u8, expected_source: []const u8) !void {
const needed_alloc_count = x: {
// Try it once with unlimited memory, make sure it works
var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, @maxValue(usize));
const result_source = try testParse(source, &failing_allocator.allocator);
- if (!mem.eql(u8, result_source, source)) {
+ if (!mem.eql(u8, result_source, expected_source)) {
warn("\n====== expected this output: =========\n");
- warn("{}", source);
+ warn("{}", expected_source);
warn("\n======== instead found this: =========\n");
warn("{}", result_source);
warn("\n======================================\n");
@@ -1067,3 +1155,7 @@ fn testCanonical(source: []const u8) !void {
}
}
+fn testCanonical(source: []const u8) !void {
+ return testTransform(source, source);
+}
+
diff --git a/std/zig/tokenizer.zig b/std/zig/tokenizer.zig
index 92a0fbc5d5..31dc06b695 100644
--- a/std/zig/tokenizer.zig
+++ b/std/zig/tokenizer.zig
@@ -6,59 +6,60 @@ pub const Token = struct {
start: usize,
end: usize,
- const KeywordId = struct {
+ const Keyword = struct {
bytes: []const u8,
id: Id,
};
- const keywords = []KeywordId {
- KeywordId{.bytes="align", .id = Id.Keyword_align},
- KeywordId{.bytes="and", .id = Id.Keyword_and},
- KeywordId{.bytes="asm", .id = Id.Keyword_asm},
- KeywordId{.bytes="async", .id = Id.Keyword_async},
- KeywordId{.bytes="await", .id = Id.Keyword_await},
- KeywordId{.bytes="break", .id = Id.Keyword_break},
- KeywordId{.bytes="catch", .id = Id.Keyword_catch},
- KeywordId{.bytes="cancel", .id = Id.Keyword_cancel},
- KeywordId{.bytes="comptime", .id = Id.Keyword_comptime},
- KeywordId{.bytes="const", .id = Id.Keyword_const},
- KeywordId{.bytes="continue", .id = Id.Keyword_continue},
- KeywordId{.bytes="defer", .id = Id.Keyword_defer},
- KeywordId{.bytes="else", .id = Id.Keyword_else},
- KeywordId{.bytes="enum", .id = Id.Keyword_enum},
- KeywordId{.bytes="errdefer", .id = Id.Keyword_errdefer},
- KeywordId{.bytes="error", .id = Id.Keyword_error},
- KeywordId{.bytes="export", .id = Id.Keyword_export},
- KeywordId{.bytes="extern", .id = Id.Keyword_extern},
- KeywordId{.bytes="false", .id = Id.Keyword_false},
- KeywordId{.bytes="fn", .id = Id.Keyword_fn},
- KeywordId{.bytes="for", .id = Id.Keyword_for},
- KeywordId{.bytes="if", .id = Id.Keyword_if},
- KeywordId{.bytes="inline", .id = Id.Keyword_inline},
- KeywordId{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
- KeywordId{.bytes="noalias", .id = Id.Keyword_noalias},
- KeywordId{.bytes="null", .id = Id.Keyword_null},
- KeywordId{.bytes="or", .id = Id.Keyword_or},
- KeywordId{.bytes="packed", .id = Id.Keyword_packed},
- KeywordId{.bytes="pub", .id = Id.Keyword_pub},
- KeywordId{.bytes="resume", .id = Id.Keyword_resume},
- KeywordId{.bytes="return", .id = Id.Keyword_return},
- KeywordId{.bytes="section", .id = Id.Keyword_section},
- KeywordId{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
- KeywordId{.bytes="struct", .id = Id.Keyword_struct},
- KeywordId{.bytes="suspend", .id = Id.Keyword_suspend},
- KeywordId{.bytes="switch", .id = Id.Keyword_switch},
- KeywordId{.bytes="test", .id = Id.Keyword_test},
- KeywordId{.bytes="this", .id = Id.Keyword_this},
- KeywordId{.bytes="true", .id = Id.Keyword_true},
- KeywordId{.bytes="try", .id = Id.Keyword_try},
- KeywordId{.bytes="undefined", .id = Id.Keyword_undefined},
- KeywordId{.bytes="union", .id = Id.Keyword_union},
- KeywordId{.bytes="unreachable", .id = Id.Keyword_unreachable},
- KeywordId{.bytes="use", .id = Id.Keyword_use},
- KeywordId{.bytes="var", .id = Id.Keyword_var},
- KeywordId{.bytes="volatile", .id = Id.Keyword_volatile},
- KeywordId{.bytes="while", .id = Id.Keyword_while},
+ const keywords = []Keyword {
+ Keyword{.bytes="align", .id = Id.Keyword_align},
+ Keyword{.bytes="and", .id = Id.Keyword_and},
+ Keyword{.bytes="asm", .id = Id.Keyword_asm},
+ Keyword{.bytes="async", .id = Id.Keyword_async},
+ Keyword{.bytes="await", .id = Id.Keyword_await},
+ Keyword{.bytes="break", .id = Id.Keyword_break},
+ Keyword{.bytes="catch", .id = Id.Keyword_catch},
+ Keyword{.bytes="cancel", .id = Id.Keyword_cancel},
+ Keyword{.bytes="comptime", .id = Id.Keyword_comptime},
+ Keyword{.bytes="const", .id = Id.Keyword_const},
+ Keyword{.bytes="continue", .id = Id.Keyword_continue},
+ Keyword{.bytes="defer", .id = Id.Keyword_defer},
+ Keyword{.bytes="else", .id = Id.Keyword_else},
+ Keyword{.bytes="enum", .id = Id.Keyword_enum},
+ Keyword{.bytes="errdefer", .id = Id.Keyword_errdefer},
+ Keyword{.bytes="error", .id = Id.Keyword_error},
+ Keyword{.bytes="export", .id = Id.Keyword_export},
+ Keyword{.bytes="extern", .id = Id.Keyword_extern},
+ Keyword{.bytes="false", .id = Id.Keyword_false},
+ Keyword{.bytes="fn", .id = Id.Keyword_fn},
+ Keyword{.bytes="for", .id = Id.Keyword_for},
+ Keyword{.bytes="if", .id = Id.Keyword_if},
+ Keyword{.bytes="inline", .id = Id.Keyword_inline},
+ Keyword{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
+ Keyword{.bytes="noalias", .id = Id.Keyword_noalias},
+ Keyword{.bytes="null", .id = Id.Keyword_null},
+ Keyword{.bytes="or", .id = Id.Keyword_or},
+ Keyword{.bytes="packed", .id = Id.Keyword_packed},
+ Keyword{.bytes="promise", .id = Id.Keyword_promise},
+ Keyword{.bytes="pub", .id = Id.Keyword_pub},
+ Keyword{.bytes="resume", .id = Id.Keyword_resume},
+ Keyword{.bytes="return", .id = Id.Keyword_return},
+ Keyword{.bytes="section", .id = Id.Keyword_section},
+ Keyword{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
+ Keyword{.bytes="struct", .id = Id.Keyword_struct},
+ Keyword{.bytes="suspend", .id = Id.Keyword_suspend},
+ Keyword{.bytes="switch", .id = Id.Keyword_switch},
+ Keyword{.bytes="test", .id = Id.Keyword_test},
+ Keyword{.bytes="this", .id = Id.Keyword_this},
+ Keyword{.bytes="true", .id = Id.Keyword_true},
+ Keyword{.bytes="try", .id = Id.Keyword_try},
+ Keyword{.bytes="undefined", .id = Id.Keyword_undefined},
+ Keyword{.bytes="union", .id = Id.Keyword_union},
+ Keyword{.bytes="unreachable", .id = Id.Keyword_unreachable},
+ Keyword{.bytes="use", .id = Id.Keyword_use},
+ Keyword{.bytes="var", .id = Id.Keyword_var},
+ Keyword{.bytes="volatile", .id = Id.Keyword_volatile},
+ Keyword{.bytes="while", .id = Id.Keyword_while},
};
fn getKeyword(bytes: []const u8) ?Id {
@@ -166,6 +167,7 @@ pub const Token = struct {
Keyword_null,
Keyword_or,
Keyword_packed,
+ Keyword_promise,
Keyword_pub,
Keyword_resume,
Keyword_return,
@@ -910,10 +912,10 @@ pub const Tokenizer = struct {
},
},
State.FloatFraction => switch (c) {
- 'p', 'P' => {
+ 'p', 'P', 'e', 'E' => {
state = State.FloatExponentUnsigned;
},
- '0'...'9', 'a'...'f', 'A'...'F' => {},
+ '0'...'9' => {},
else => break,
},
State.FloatExponentUnsigned => switch (c) {
@@ -1106,6 +1108,15 @@ test "tokenizer" {
});
}
+test "tokenizer - float literal" {
+ testTokenize("a = 4.94065645841246544177e-324;\n", []Token.Id {
+ Token.Id.Identifier,
+ Token.Id.Equal,
+ Token.Id.FloatLiteral,
+ Token.Id.Semicolon,
+ });
+}
+
test "tokenizer - chars" {
testTokenize("'c'", []Token.Id {Token.Id.CharLiteral});
}
diff --git a/test/behavior.zig b/test/behavior.zig
index cb484b39a5..5239d61941 100644
--- a/test/behavior.zig
+++ b/test/behavior.zig
@@ -37,6 +37,7 @@ comptime {
_ = @import("cases/pub_enum/index.zig");
_ = @import("cases/ref_var_in_if_after_if_2nd_switch_prong.zig");
_ = @import("cases/reflection.zig");
+ _ = @import("cases/type_info.zig");
_ = @import("cases/sizeof_and_typeof.zig");
_ = @import("cases/slice.zig");
_ = @import("cases/struct.zig");
@@ -53,4 +54,5 @@ comptime {
_ = @import("cases/var_args.zig");
_ = @import("cases/void.zig");
_ = @import("cases/while.zig");
+ _ = @import("cases/fn_in_struct_in_comptime.zig");
}
diff --git a/test/cases/coroutines.zig b/test/cases/coroutines.zig
index d00617eb7c..4aa97861ac 100644
--- a/test/cases/coroutines.zig
+++ b/test/cases/coroutines.zig
@@ -219,8 +219,9 @@ async fn printTrace(p: promise->error!void) void {
std.debug.assert(e == error.Fail);
if (@errorReturnTrace()) |trace| {
assert(trace.index == 1);
- } else if (builtin.mode != builtin.Mode.ReleaseFast) {
- @panic("expected return trace");
+ } else switch (builtin.mode) {
+ builtin.Mode.Debug, builtin.Mode.ReleaseSafe => @panic("expected return trace"),
+ builtin.Mode.ReleaseFast, builtin.Mode.ReleaseSmall => {},
}
};
}
diff --git a/test/cases/enum.zig b/test/cases/enum.zig
index 872e753f20..1c46a3d9e0 100644
--- a/test/cases/enum.zig
+++ b/test/cases/enum.zig
@@ -882,3 +882,12 @@ test "enum with 1 field but explicit tag type should still have the tag type" {
};
comptime @import("std").debug.assert(@sizeOf(Enum) == @sizeOf(u8));
}
+
+test "empty extern enum with members" {
+ const E = extern enum {
+ A,
+ B,
+ C,
+ };
+ assert(@sizeOf(E) == @sizeOf(c_int));
+}
diff --git a/test/cases/fn_in_struct_in_comptime.zig b/test/cases/fn_in_struct_in_comptime.zig
new file mode 100644
index 0000000000..4f181d7ffb
--- /dev/null
+++ b/test/cases/fn_in_struct_in_comptime.zig
@@ -0,0 +1,17 @@
+const assert = @import("std").debug.assert;
+
+fn get_foo() fn(&u8)usize {
+ comptime {
+ return struct {
+ fn func(ptr: &u8) usize {
+ var u = @ptrToInt(ptr);
+ return u;
+ }
+ }.func;
+ }
+}
+
+test "define a function in an anonymous struct in comptime" {
+ const foo = get_foo();
+ assert(foo(@intToPtr(&u8, 12345)) == 12345);
+}
diff --git a/test/cases/math.zig b/test/cases/math.zig
index dfc5946fdb..406ffec8d4 100644
--- a/test/cases/math.zig
+++ b/test/cases/math.zig
@@ -335,6 +335,23 @@ test "big number shifting" {
}
}
+test "big number multi-limb shift and mask" {
+ comptime {
+ var a = 0xefffffffa0000001eeeeeeefaaaaaaab;
+
+ assert(u32(a & 0xffffffff) == 0xaaaaaaab);
+ a >>= 32;
+ assert(u32(a & 0xffffffff) == 0xeeeeeeef);
+ a >>= 32;
+ assert(u32(a & 0xffffffff) == 0xa0000001);
+ a >>= 32;
+ assert(u32(a & 0xffffffff) == 0xefffffff);
+ a >>= 32;
+
+ assert(a == 0);
+ }
+}
+
test "xor" {
test_xor();
comptime test_xor();
diff --git a/test/cases/type_info.zig b/test/cases/type_info.zig
new file mode 100644
index 0000000000..f10703e3ee
--- /dev/null
+++ b/test/cases/type_info.zig
@@ -0,0 +1,200 @@
+const assert = @import("std").debug.assert;
+const mem = @import("std").mem;
+const TypeInfo = @import("builtin").TypeInfo;
+const TypeId = @import("builtin").TypeId;
+
+test "type info: tag type, void info" {
+ comptime {
+ assert(@TagType(TypeInfo) == TypeId);
+ const void_info = @typeInfo(void);
+ assert(TypeId(void_info) == TypeId.Void);
+ assert(void_info.Void == {});
+ }
+}
+
+test "type info: integer, floating point type info" {
+ comptime {
+ const u8_info = @typeInfo(u8);
+ assert(TypeId(u8_info) == TypeId.Int);
+ assert(!u8_info.Int.is_signed);
+ assert(u8_info.Int.bits == 8);
+
+ const f64_info = @typeInfo(f64);
+ assert(TypeId(f64_info) == TypeId.Float);
+ assert(f64_info.Float.bits == 64);
+ }
+}
+
+test "type info: pointer type info" {
+ comptime {
+ const u32_ptr_info = @typeInfo(&u32);
+ assert(TypeId(u32_ptr_info) == TypeId.Pointer);
+ assert(u32_ptr_info.Pointer.is_const == false);
+ assert(u32_ptr_info.Pointer.is_volatile == false);
+ assert(u32_ptr_info.Pointer.alignment == 4);
+ assert(u32_ptr_info.Pointer.child == u32);
+ }
+}
+
+test "type info: slice type info" {
+ comptime {
+ const u32_slice_info = @typeInfo([]u32);
+ assert(TypeId(u32_slice_info) == TypeId.Slice);
+ assert(u32_slice_info.Slice.is_const == false);
+ assert(u32_slice_info.Slice.is_volatile == false);
+ assert(u32_slice_info.Slice.alignment == 4);
+ assert(u32_slice_info.Slice.child == u32);
+ }
+}
+
+test "type info: array type info" {
+ comptime {
+ const arr_info = @typeInfo([42]bool);
+ assert(TypeId(arr_info) == TypeId.Array);
+ assert(arr_info.Array.len == 42);
+ assert(arr_info.Array.child == bool);
+ }
+}
+
+test "type info: nullable type info" {
+ comptime {
+ const null_info = @typeInfo(?void);
+ assert(TypeId(null_info) == TypeId.Nullable);
+ assert(null_info.Nullable.child == void);
+ }
+}
+
+test "type info: promise info" {
+ comptime {
+ const null_promise_info = @typeInfo(promise);
+ assert(TypeId(null_promise_info) == TypeId.Promise);
+ assert(null_promise_info.Promise.child == @typeOf(undefined));
+
+ const promise_info = @typeInfo(promise->usize);
+ assert(TypeId(promise_info) == TypeId.Promise);
+ assert(promise_info.Promise.child == usize);
+ }
+
+}
+
+test "type info: error set, error union info" {
+ comptime {
+ const TestErrorSet = error {
+ First,
+ Second,
+ Third,
+ };
+
+ const error_set_info = @typeInfo(TestErrorSet);
+ assert(TypeId(error_set_info) == TypeId.ErrorSet);
+ assert(error_set_info.ErrorSet.errors.len == 3);
+ assert(mem.eql(u8, error_set_info.ErrorSet.errors[0].name, "First"));
+ assert(error_set_info.ErrorSet.errors[2].value == usize(TestErrorSet.Third));
+
+ const error_union_info = @typeInfo(TestErrorSet!usize);
+ assert(TypeId(error_union_info) == TypeId.ErrorUnion);
+ assert(error_union_info.ErrorUnion.error_set == TestErrorSet);
+ assert(error_union_info.ErrorUnion.payload == usize);
+ }
+}
+
+test "type info: enum info" {
+ comptime {
+ const Os = @import("builtin").Os;
+
+ const os_info = @typeInfo(Os);
+ assert(TypeId(os_info) == TypeId.Enum);
+ assert(os_info.Enum.layout == TypeInfo.ContainerLayout.Auto);
+ assert(os_info.Enum.fields.len == 32);
+ assert(mem.eql(u8, os_info.Enum.fields[1].name, "ananas"));
+ assert(os_info.Enum.fields[10].value == 10);
+ assert(os_info.Enum.tag_type == u5);
+ assert(os_info.Enum.defs.len == 0);
+ }
+}
+
+test "type info: union info" {
+ comptime {
+ const typeinfo_info = @typeInfo(TypeInfo);
+ assert(TypeId(typeinfo_info) == TypeId.Union);
+ assert(typeinfo_info.Union.layout == TypeInfo.ContainerLayout.Auto);
+ assert(typeinfo_info.Union.tag_type == TypeId);
+ assert(typeinfo_info.Union.fields.len == 26);
+ assert(typeinfo_info.Union.fields[4].enum_field != null);
+ assert((??typeinfo_info.Union.fields[4].enum_field).value == 4);
+ assert(typeinfo_info.Union.fields[4].field_type == @typeOf(@typeInfo(u8).Int));
+ assert(typeinfo_info.Union.defs.len == 21);
+
+ const TestNoTagUnion = union {
+ Foo: void,
+ Bar: u32,
+ };
+
+ const notag_union_info = @typeInfo(TestNoTagUnion);
+ assert(TypeId(notag_union_info) == TypeId.Union);
+ assert(notag_union_info.Union.tag_type == @typeOf(undefined));
+ assert(notag_union_info.Union.layout == TypeInfo.ContainerLayout.Auto);
+ assert(notag_union_info.Union.fields.len == 2);
+ assert(notag_union_info.Union.fields[0].enum_field == null);
+ assert(notag_union_info.Union.fields[1].field_type == u32);
+
+ const TestExternUnion = extern union {
+ foo: &c_void,
+ };
+
+ const extern_union_info = @typeInfo(TestExternUnion);
+ assert(extern_union_info.Union.layout == TypeInfo.ContainerLayout.Extern);
+ assert(extern_union_info.Union.tag_type == @typeOf(undefined));
+ assert(extern_union_info.Union.fields[0].enum_field == null);
+ assert(extern_union_info.Union.fields[0].field_type == &c_void);
+ }
+}
+
+test "type info: struct info" {
+ comptime {
+ const struct_info = @typeInfo(TestStruct);
+ assert(TypeId(struct_info) == TypeId.Struct);
+ assert(struct_info.Struct.layout == TypeInfo.ContainerLayout.Packed);
+ assert(struct_info.Struct.fields.len == 3);
+ assert(struct_info.Struct.fields[1].offset == null);
+ assert(struct_info.Struct.fields[2].field_type == &TestStruct);
+ assert(struct_info.Struct.defs.len == 2);
+ assert(struct_info.Struct.defs[0].is_pub);
+ assert(!struct_info.Struct.defs[0].data.Fn.is_extern);
+ assert(struct_info.Struct.defs[0].data.Fn.lib_name == null);
+ assert(struct_info.Struct.defs[0].data.Fn.return_type == void);
+ assert(struct_info.Struct.defs[0].data.Fn.fn_type == fn(&const TestStruct)void);
+ }
+}
+
+const TestStruct = packed struct {
+ const Self = this;
+
+ fieldA: usize,
+ fieldB: void,
+ fieldC: &Self,
+
+ pub fn foo(self: &const Self) void {}
+};
+
+test "type info: function type info" {
+ comptime {
+ const fn_info = @typeInfo(@typeOf(foo));
+ assert(TypeId(fn_info) == TypeId.Fn);
+ assert(fn_info.Fn.calling_convention == TypeInfo.CallingConvention.Unspecified);
+ assert(fn_info.Fn.is_generic);
+ assert(fn_info.Fn.args.len == 2);
+ assert(fn_info.Fn.is_var_args);
+ assert(fn_info.Fn.return_type == @typeOf(undefined));
+ assert(fn_info.Fn.async_allocator_type == @typeOf(undefined));
+
+ const test_instance: TestStruct = undefined;
+ const bound_fn_info = @typeInfo(@typeOf(test_instance.foo));
+ assert(TypeId(bound_fn_info) == TypeId.BoundFn);
+ assert(bound_fn_info.BoundFn.args[0].arg_type == &const TestStruct);
+ }
+}
+
+fn foo(comptime a: usize, b: bool, args: ...) usize {
+ return 0;
+}
diff --git a/test/cases/union.zig b/test/cases/union.zig
index 50cf8004b9..93b5f740be 100644
--- a/test/cases/union.zig
+++ b/test/cases/union.zig
@@ -48,6 +48,16 @@ test "basic unions" {
assert(foo.float == 12.34);
}
+test "comptime union field access" {
+ comptime {
+ var foo = Foo { .int = 0 };
+ assert(foo.int == 0);
+
+ foo = Foo { .float = 42.42 };
+ assert(foo.float == 42.42);
+ }
+}
+
test "init union with runtime value" {
var foo: Foo = undefined;
@@ -275,3 +285,16 @@ const PartialInst = union(enum) {
const PartialInstWithPayload = union(enum) {
Compiled: i32,
};
+
+
+test "access a member of tagged union with conflicting enum tag name" {
+ const Bar = union(enum) {
+ A: A,
+ B: B,
+
+ const A = u8;
+ const B = void;
+ };
+
+ comptime assert(Bar.A == u8);
+}
diff --git a/test/compile_errors.zig b/test/compile_errors.zig
index 52e063eb39..300f27cb6a 100644
--- a/test/compile_errors.zig
+++ b/test/compile_errors.zig
@@ -3210,6 +3210,18 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
,
".tmp_source.zig:5:42: error: zero-bit field 'val' in struct 'Empty' has no offset");
+ cases.add("invalid union field access in comptime",
+ \\const Foo = union {
+ \\ Bar: u8,
+ \\ Baz: void,
+ \\};
+ \\comptime {
+ \\ var foo = Foo {.Baz = {}};
+ \\ const bar_val = foo.Bar;
+ \\}
+ ,
+ ".tmp_source.zig:7:24: error: accessing union field 'Bar' while field 'Baz' is set");
+
cases.add("getting return type of generic function",
\\fn generic(a: var) void {}
\\comptime {
@@ -3225,5 +3237,4 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
\\}
,
".tmp_source.zig:3:36: error: @ArgType could not resolve the type of arg 0 because 'fn(var)var' is generic");
-
}
diff --git a/test/tests.zig b/test/tests.zig
index c3c7bf9d4b..5fbb56b736 100644
--- a/test/tests.zig
+++ b/test/tests.zig
@@ -152,7 +152,7 @@ pub fn addPkgTests(b: &build.Builder, test_filter: ?[]const u8, root_src: []cons
const step = b.step(b.fmt("test-{}", name), desc);
for (test_targets) |test_target| {
const is_native = (test_target.os == builtin.os and test_target.arch == builtin.arch);
- for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast}) |mode| {
+ for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast, Mode.ReleaseSmall}) |mode| {
for ([]bool{false, true}) |link_libc| {
if (link_libc and !is_native) {
// don't assume we have a cross-compiling libc set up
@@ -451,7 +451,7 @@ pub const CompareOutputContext = struct {
self.step.dependOn(&run_and_cmp_output.step);
},
Special.None => {
- for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast}) |mode| {
+ for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast, Mode.ReleaseSmall}) |mode| {
const annotated_case_name = fmt.allocPrint(self.b.allocator, "{} {} ({})",
"compare-output", case.name, @tagName(mode)) catch unreachable;
if (self.test_filter) |filter| {
@@ -705,7 +705,7 @@ pub const CompileErrorContext = struct {
pub fn addCase(self: &CompileErrorContext, case: &const TestCase) void {
const b = self.b;
- for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast}) |mode| {
+ for ([]Mode{Mode.Debug, Mode.ReleaseFast}) |mode| {
const annotated_case_name = fmt.allocPrint(self.b.allocator, "compile-error {} ({})",
case.name, @tagName(mode)) catch unreachable;
if (self.test_filter) |filter| {
@@ -773,7 +773,7 @@ pub const BuildExamplesContext = struct {
pub fn addAllArgs(self: &BuildExamplesContext, root_src: []const u8, link_libc: bool) void {
const b = self.b;
- for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast}) |mode| {
+ for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast, Mode.ReleaseSmall}) |mode| {
const annotated_case_name = fmt.allocPrint(self.b.allocator, "build {} ({})",
root_src, @tagName(mode)) catch unreachable;
if (self.test_filter) |filter| {
diff --git a/test/translate_c.zig b/test/translate_c.zig
index 9a69c2b03e..2cd59f6f75 100644
--- a/test/translate_c.zig
+++ b/test/translate_c.zig
@@ -1,6 +1,27 @@
const tests = @import("tests.zig");
pub fn addCases(cases: &tests.TranslateCContext) void {
+ cases.add("double define struct",
+ \\typedef struct Bar Bar;
+ \\typedef struct Foo Foo;
+ \\
+ \\struct Foo {
+ \\ Foo *a;
+ \\};
+ \\
+ \\struct Bar {
+ \\ Foo *a;
+ \\};
+ ,
+ \\pub const struct_Foo = extern struct {
+ \\ a: ?&Foo,
+ \\};
+ \\pub const Foo = struct_Foo;
+ \\pub const struct_Bar = extern struct {
+ \\ a: ?&Foo,
+ \\};
+ );
+
cases.addAllowWarnings("simple data types",
\\#include
\\int foo(char a, unsigned char b, signed char c);
@@ -53,6 +74,28 @@ pub fn addCases(cases: &tests.TranslateCContext) void {
\\pub const Foo = enum_Foo;
);
+ cases.add("enums",
+ \\enum Foo {
+ \\ FooA = 2,
+ \\ FooB = 5,
+ \\ Foo1,
+ \\};
+ ,
+ \\pub const enum_Foo = extern enum {
+ \\ A = 2,
+ \\ B = 5,
+ \\ @"1" = 6,
+ \\};
+ ,
+ \\pub const FooA = enum_Foo.A;
+ ,
+ \\pub const FooB = enum_Foo.B;
+ ,
+ \\pub const Foo1 = enum_Foo.@"1";
+ ,
+ \\pub const Foo = enum_Foo;
+ );
+
cases.add("restrict -> noalias",
\\void foo(void *restrict bar, void *restrict);
,