stage2: zig test now works with the LLVM backend

Frontend improvements:

 * When compiling in `zig test` mode, put a task on the work queue to
   analyze the main package root file. Normally, start code does
   `_ = import("root");` to make Zig analyze the user's code, however in
   the case of `zig test`, the root source file is the test runner.
   Without this change, no tests are picked up.
 * In the main pipeline, once semantic analysis is finished, if there
   are no compile errors, populate the `test_functions` Decl with the
   set of test functions picked up from semantic analysis.
 * Value: add `array` and `slice` Tags.

LLVM backend improvements:

 * Fix incremental updates of globals. Previously the
   value of a global would not get replaced with a new value.
 * Fix LLVM type of arrays. They were incorrectly sending
   the ABI size as the element count.
 * Remove the FuncGen parameter from genTypedValue. This function is for
   generating global constants and there is no function available when
   it is being called.
   - The `ref_val` case is now commented out. I'd like to eliminate
     `ref_val` as one of the possible Value Tags. Instead it should
     always be done via `decl_ref`.
 * Implement constant value generation for slices, arrays, and structs.
 * Constant value generation for functions supports the `decl_ref` tag.
This commit is contained in:
Andrew Kelley 2021-07-27 14:06:42 -07:00
parent ba71b96fe6
commit a8e964eadd
6 changed files with 332 additions and 82 deletions

View File

@ -1709,7 +1709,9 @@ pub fn update(self: *Compilation) !void {
// in the start code, but when using the stage1 backend that won't happen,
// so in order to run AstGen on the root source file we put it into the
// import_table here.
if (use_stage1) {
// Likewise, in the case of `zig test`, the test runner is the root source file,
// and so there is nothing to import the main file.
if (use_stage1 or self.bin_file.options.is_test) {
_ = try module.importPkg(module.main_pkg);
}
@ -1725,6 +1727,9 @@ pub fn update(self: *Compilation) !void {
if (!use_stage1) {
try self.work_queue.writeItem(.{ .analyze_pkg = std_pkg });
if (self.bin_file.options.is_test) {
try self.work_queue.writeItem(.{ .analyze_pkg = module.main_pkg });
}
}
}
@ -2053,24 +2058,7 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
assert(decl.has_tv);
assert(decl.ty.hasCodeGenBits());
self.bin_file.updateDecl(module, decl) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => {
decl.analysis = .codegen_failure;
continue;
},
else => {
try module.failed_decls.ensureUnusedCapacity(gpa, 1);
module.failed_decls.putAssumeCapacityNoClobber(decl, try Module.ErrorMsg.create(
gpa,
decl.srcLoc(),
"unable to codegen: {s}",
.{@errorName(err)},
));
decl.analysis = .codegen_failure_retryable;
continue;
},
};
try module.linkerUpdateDecl(decl);
},
},
.codegen_func => |func| switch (func.owner_decl.analysis) {
@ -2396,6 +2384,14 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
};
},
};
if (self.bin_file.options.is_test and self.totalErrorCount() == 0) {
// The `test_functions` decl has been intentionally postponed until now,
// at which point we must populate it with the list of test functions that
// have been discovered and not filtered out.
const mod = self.bin_file.options.module.?;
try mod.populateTestFunctions();
}
}
const AstGenSrc = union(enum) {

View File

@ -112,6 +112,8 @@ compile_log_text: ArrayListUnmanaged(u8) = .{},
emit_h: ?*GlobalEmitH,
test_functions: std.AutoArrayHashMapUnmanaged(*Decl, void) = .{},
/// A `Module` has zero or one of these depending on whether `-femit-h` is enabled.
pub const GlobalEmitH = struct {
/// Where to put the output.
@ -282,6 +284,7 @@ pub const Decl = struct {
pub fn destroy(decl: *Decl, module: *Module) void {
const gpa = module.gpa;
log.debug("destroy {*} ({s})", .{ decl, decl.name });
_ = module.test_functions.swapRemove(decl);
if (decl.deletion_flag) {
assert(module.deletion_set.swapRemove(decl));
}
@ -3319,6 +3322,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) SemaError!voi
// the test name filter.
if (!mod.comp.bin_file.options.is_test) break :blk false;
if (decl_pkg != mod.main_pkg) break :blk false;
try mod.test_functions.put(gpa, new_decl, {});
break :blk true;
},
else => blk: {
@ -3326,6 +3330,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) SemaError!voi
if (!mod.comp.bin_file.options.is_test) break :blk false;
if (decl_pkg != mod.main_pkg) break :blk false;
// TODO check the name against --test-filter
try mod.test_functions.put(gpa, new_decl, {});
break :blk true;
},
};
@ -3765,17 +3770,38 @@ pub fn createAnonymousDeclNamed(
scope: *Scope,
typed_value: TypedValue,
name: [:0]u8,
) !*Decl {
return mod.createAnonymousDeclFromDeclNamed(scope.ownerDecl().?, typed_value, name);
}
pub fn createAnonymousDecl(mod: *Module, scope: *Scope, typed_value: TypedValue) !*Decl {
return mod.createAnonymousDeclFromDecl(scope.ownerDecl().?, typed_value);
}
pub fn createAnonymousDeclFromDecl(mod: *Module, owner_decl: *Decl, tv: TypedValue) !*Decl {
const name_index = mod.getNextAnonNameIndex();
const name = try std.fmt.allocPrintZ(mod.gpa, "{s}__anon_{d}", .{
owner_decl.name, name_index,
});
return mod.createAnonymousDeclFromDeclNamed(owner_decl, tv, name);
}
/// Takes ownership of `name` even if it returns an error.
pub fn createAnonymousDeclFromDeclNamed(
mod: *Module,
owner_decl: *Decl,
typed_value: TypedValue,
name: [:0]u8,
) !*Decl {
errdefer mod.gpa.free(name);
const scope_decl = scope.ownerDecl().?;
const namespace = scope_decl.namespace;
const namespace = owner_decl.namespace;
try namespace.anon_decls.ensureUnusedCapacity(mod.gpa, 1);
const new_decl = try mod.allocateNewDecl(namespace, scope_decl.src_node);
const new_decl = try mod.allocateNewDecl(namespace, owner_decl.src_node);
new_decl.name = name;
new_decl.src_line = scope_decl.src_line;
new_decl.src_line = owner_decl.src_line;
new_decl.ty = typed_value.ty;
new_decl.val = typed_value.val;
new_decl.has_tv = true;
@ -3796,15 +3822,6 @@ pub fn createAnonymousDeclNamed(
return new_decl;
}
pub fn createAnonymousDecl(mod: *Module, scope: *Scope, typed_value: TypedValue) !*Decl {
const scope_decl = scope.ownerDecl().?;
const name_index = mod.getNextAnonNameIndex();
const name = try std.fmt.allocPrintZ(mod.gpa, "{s}__anon_{d}", .{
scope_decl.name, name_index,
});
return mod.createAnonymousDeclNamed(scope, typed_value, name);
}
pub fn getNextAnonNameIndex(mod: *Module) usize {
return @atomicRmw(usize, &mod.next_anon_name_index, .Add, 1, .Monotonic);
}
@ -4801,3 +4818,95 @@ pub fn processExports(mod: *Module) !void {
};
}
}
pub fn populateTestFunctions(mod: *Module) !void {
const gpa = mod.gpa;
const builtin_pkg = mod.main_pkg.table.get("builtin").?;
const builtin_file = (mod.importPkg(builtin_pkg) catch unreachable).file;
const builtin_namespace = builtin_file.root_decl.?.namespace;
const decl = builtin_namespace.decls.get("test_functions").?;
var buf: Type.Payload.ElemType = undefined;
const tmp_test_fn_ty = decl.ty.slicePtrFieldType(&buf).elemType();
const array_decl = d: {
// Add mod.test_functions to an array decl then make the test_functions
// decl reference it as a slice.
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer new_decl_arena.deinit();
const arena = &new_decl_arena.allocator;
const test_fn_vals = try arena.alloc(Value, mod.test_functions.count());
const array_decl = try mod.createAnonymousDeclFromDecl(decl, .{
.ty = try Type.Tag.array.create(arena, .{
.len = test_fn_vals.len,
.elem_type = try tmp_test_fn_ty.copy(arena),
}),
.val = try Value.Tag.array.create(arena, test_fn_vals),
});
for (mod.test_functions.keys()) |test_decl, i| {
const test_name_slice = mem.sliceTo(test_decl.name, 0);
const test_name_decl = n: {
var name_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer name_decl_arena.deinit();
const bytes = try name_decl_arena.allocator.dupe(u8, test_name_slice);
const test_name_decl = try mod.createAnonymousDeclFromDecl(array_decl, .{
.ty = try Type.Tag.array_u8.create(&name_decl_arena.allocator, bytes.len),
.val = try Value.Tag.bytes.create(&name_decl_arena.allocator, bytes),
});
try test_name_decl.finalizeNewArena(&name_decl_arena);
break :n test_name_decl;
};
try mod.linkerUpdateDecl(test_name_decl);
const field_vals = try arena.create([3]Value);
field_vals.* = .{
try Value.Tag.slice.create(arena, .{
.ptr = try Value.Tag.decl_ref.create(arena, test_name_decl),
.len = try Value.Tag.int_u64.create(arena, test_name_slice.len),
}), // name
try Value.Tag.decl_ref.create(arena, test_decl), // func
Value.initTag(.null_value), // async_frame_size
};
test_fn_vals[i] = try Value.Tag.@"struct".create(arena, field_vals);
}
try array_decl.finalizeNewArena(&new_decl_arena);
break :d array_decl;
};
try mod.linkerUpdateDecl(array_decl);
{
var arena_instance = decl.value_arena.?.promote(gpa);
defer decl.value_arena.?.* = arena_instance.state;
const arena = &arena_instance.allocator;
decl.ty = try Type.Tag.const_slice.create(arena, try tmp_test_fn_ty.copy(arena));
decl.val = try Value.Tag.slice.create(arena, .{
.ptr = try Value.Tag.decl_ref.create(arena, array_decl),
.len = try Value.Tag.int_u64.create(arena, mod.test_functions.count()),
});
}
try mod.linkerUpdateDecl(decl);
}
pub fn linkerUpdateDecl(mod: *Module, decl: *Decl) !void {
mod.comp.bin_file.updateDecl(mod, decl) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => {
decl.analysis = .codegen_failure;
return;
},
else => {
const gpa = mod.gpa;
try mod.failed_decls.ensureUnusedCapacity(gpa, 1);
mod.failed_decls.putAssumeCapacityNoClobber(decl, try ErrorMsg.create(
gpa,
decl.srcLoc(),
"unable to codegen: {s}",
.{@errorName(err)},
));
decl.analysis = .codegen_failure_retryable;
return;
},
};
}

View File

@ -500,7 +500,18 @@ pub const DeclGen = struct {
} else if (decl.val.castTag(.extern_fn)) |extern_fn| {
_ = try self.resolveLlvmFunction(extern_fn.data);
} else {
_ = try self.resolveGlobalDecl(decl);
const global = try self.resolveGlobalDecl(decl);
assert(decl.has_tv);
const init_val = if (decl.val.castTag(.variable)) |payload| init_val: {
const variable = payload.data;
break :init_val variable.init;
} else init_val: {
global.setGlobalConstant(.True);
break :init_val decl.val;
};
const llvm_init = try self.genTypedValue(.{ .ty = decl.ty, .val = init_val });
llvm.setInitializer(global, llvm_init);
}
}
@ -548,25 +559,11 @@ pub const DeclGen = struct {
}
fn resolveGlobalDecl(self: *DeclGen, decl: *Module.Decl) error{ OutOfMemory, CodegenFail }!*const llvm.Value {
if (self.llvmModule().getNamedGlobal(decl.name)) |val| return val;
assert(decl.has_tv);
const llvm_module = self.object.llvm_module;
if (llvm_module.getNamedGlobal(decl.name)) |val| return val;
// TODO: remove this redundant `llvmType`, it is also called in `genTypedValue`.
const llvm_type = try self.llvmType(decl.ty);
const global = self.llvmModule().addGlobal(llvm_type, decl.name);
const init_val = if (decl.val.castTag(.variable)) |payload| init_val: {
const variable = payload.data;
break :init_val variable.init;
} else init_val: {
global.setGlobalConstant(.True);
break :init_val decl.val;
};
const llvm_init = try self.genTypedValue(.{ .ty = decl.ty, .val = init_val }, null);
llvm.setInitializer(global, llvm_init);
return global;
return llvm_module.addGlobal(llvm_type, decl.name);
}
fn llvmType(self: *DeclGen, t: Type) error{ OutOfMemory, CodegenFail }!*const llvm.Type {
@ -596,7 +593,8 @@ pub const DeclGen = struct {
},
.Array => {
const elem_type = try self.llvmType(t.elemType());
return elem_type.arrayType(@intCast(c_uint, t.abiSize(self.module.getTarget())));
const total_len = t.arrayLen() + @boolToInt(t.sentinel() != null);
return elem_type.arrayType(@intCast(c_uint, total_len));
},
.Optional => {
if (!t.isPtrLikeOptional()) {
@ -674,8 +672,7 @@ pub const DeclGen = struct {
}
}
// TODO: figure out a way to remove the FuncGen argument
fn genTypedValue(self: *DeclGen, tv: TypedValue, fg: ?*FuncGen) error{ OutOfMemory, CodegenFail }!*const llvm.Value {
fn genTypedValue(self: *DeclGen, tv: TypedValue) error{ OutOfMemory, CodegenFail }!*const llvm.Value {
const llvm_type = try self.llvmType(tv.ty);
if (tv.val.isUndef())
@ -711,20 +708,36 @@ pub const DeclGen = struct {
usize_type.constNull(),
};
// TODO: consider using buildInBoundsGEP2 for opaque pointers
return fg.?.builder.buildInBoundsGEP(val, &indices, 2, "");
return val.constInBoundsGEP(&indices, indices.len);
},
.ref_val => {
const elem_value = tv.val.castTag(.ref_val).?.data;
const elem_type = tv.ty.castPointer().?.data;
const alloca = fg.?.buildAlloca(try self.llvmType(elem_type));
_ = fg.?.builder.buildStore(try self.genTypedValue(.{ .ty = elem_type, .val = elem_value }, fg), alloca);
return alloca;
//const elem_value = tv.val.castTag(.ref_val).?.data;
//const elem_type = tv.ty.castPointer().?.data;
//const alloca = fg.?.buildAlloca(try self.llvmType(elem_type));
//_ = fg.?.builder.buildStore(try self.genTypedValue(.{ .ty = elem_type, .val = elem_value }, fg), alloca);
//return alloca;
// TODO eliminate the ref_val Value Tag
return self.todo("implement const of pointer tag ref_val", .{});
},
.variable => {
const variable = tv.val.castTag(.variable).?.data;
return self.resolveGlobalDecl(variable.owner_decl);
},
.slice => {
const slice = tv.val.castTag(.slice).?.data;
var buf: Type.Payload.ElemType = undefined;
const fields: [2]*const llvm.Value = .{
try self.genTypedValue(.{
.ty = tv.ty.slicePtrFieldType(&buf),
.val = slice.ptr,
}),
try self.genTypedValue(.{
.ty = Type.initTag(.usize),
.val = slice.len,
}),
};
return self.context.constStruct(&fields, fields.len, .False);
},
else => |tag| return self.todo("implement const of pointer type '{}' ({})", .{ tv.ty, tag }),
},
.Array => {
@ -734,10 +747,28 @@ pub const DeclGen = struct {
return self.todo("handle other sentinel values", .{});
} else false;
return self.context.constString(payload.data.ptr, @intCast(c_uint, payload.data.len), llvm.Bool.fromBool(!zero_sentinel));
} else {
return self.todo("handle more array values", .{});
return self.context.constString(
payload.data.ptr,
@intCast(c_uint, payload.data.len),
llvm.Bool.fromBool(!zero_sentinel),
);
}
if (tv.val.castTag(.array)) |payload| {
const gpa = self.gpa;
const elem_ty = tv.ty.elemType();
const elem_vals = payload.data;
const llvm_elems = try gpa.alloc(*const llvm.Value, elem_vals.len);
defer gpa.free(llvm_elems);
for (elem_vals) |elem_val, i| {
llvm_elems[i] = try self.genTypedValue(.{ .ty = elem_ty, .val = elem_val });
}
const llvm_elem_ty = try self.llvmType(elem_ty);
return llvm_elem_ty.constArray(
llvm_elems.ptr,
@intCast(c_uint, llvm_elems.len),
);
}
return self.todo("handle more array values", .{});
},
.Optional => {
if (!tv.ty.isPtrLikeOptional()) {
@ -750,26 +781,25 @@ pub const DeclGen = struct {
llvm_child_type.constNull(),
self.context.intType(1).constNull(),
};
return self.context.constStruct(&optional_values, 2, .False);
return self.context.constStruct(&optional_values, optional_values.len, .False);
} else {
var optional_values: [2]*const llvm.Value = .{
try self.genTypedValue(.{ .ty = child_type, .val = tv.val }, fg),
try self.genTypedValue(.{ .ty = child_type, .val = tv.val }),
self.context.intType(1).constAllOnes(),
};
return self.context.constStruct(&optional_values, 2, .False);
return self.context.constStruct(&optional_values, optional_values.len, .False);
}
} else {
return self.todo("implement const of optional pointer", .{});
}
},
.Fn => {
const fn_decl = if (tv.val.castTag(.extern_fn)) |extern_fn|
extern_fn.data
else if (tv.val.castTag(.function)) |func_payload|
func_payload.data.owner_decl
else
unreachable;
const fn_decl = switch (tv.val.tag()) {
.extern_fn => tv.val.castTag(.extern_fn).?.data,
.function => tv.val.castTag(.function).?.data.owner_decl,
.decl_ref => tv.val.castTag(.decl_ref).?.data,
else => unreachable,
};
return self.resolveLlvmFunction(fn_decl);
},
.ErrorSet => {
@ -793,11 +823,29 @@ pub const DeclGen = struct {
if (!payload_type.hasCodeGenBits()) {
// We use the error type directly as the type.
return self.genTypedValue(.{ .ty = error_type, .val = sub_val }, fg);
return self.genTypedValue(.{ .ty = error_type, .val = sub_val });
}
return self.todo("implement error union const of type '{}'", .{tv.ty});
},
.Struct => {
const fields_len = tv.ty.structFieldCount();
const field_vals = tv.val.castTag(.@"struct").?.data;
const gpa = self.gpa;
const llvm_fields = try gpa.alloc(*const llvm.Value, fields_len);
defer gpa.free(llvm_fields);
for (llvm_fields) |*llvm_field, i| {
llvm_field.* = try self.genTypedValue(.{
.ty = tv.ty.structFieldType(i),
.val = field_vals[i],
});
}
return self.context.constStruct(
llvm_fields.ptr,
@intCast(c_uint, llvm_fields.len),
.False,
);
},
else => return self.todo("implement const of type '{}'", .{tv.ty}),
}
}
@ -869,7 +917,7 @@ pub const FuncGen = struct {
fn resolveInst(self: *FuncGen, inst: Air.Inst.Ref) !*const llvm.Value {
if (self.air.value(inst)) |val| {
return self.dg.genTypedValue(.{ .ty = self.air.typeOf(inst), .val = val }, self);
return self.dg.genTypedValue(.{ .ty = self.air.typeOf(inst), .val = val });
}
const inst_index = Air.refToIndex(inst).?;
if (self.func_inst_table.get(inst_index)) |value| return value;

View File

@ -49,7 +49,12 @@ pub const Context = opaque {
extern fn LLVMConstStringInContext(C: *const Context, Str: [*]const u8, Length: c_uint, DontNullTerminate: Bool) *const Value;
pub const constStruct = LLVMConstStructInContext;
extern fn LLVMConstStructInContext(C: *const Context, ConstantVals: [*]*const Value, Count: c_uint, Packed: Bool) *const Value;
extern fn LLVMConstStructInContext(
C: *const Context,
ConstantVals: [*]const *const Value,
Count: c_uint,
Packed: Bool,
) *const Value;
pub const createBasicBlock = LLVMCreateBasicBlockInContext;
extern fn LLVMCreateBasicBlockInContext(C: *const Context, Name: [*:0]const u8) *const BasicBlock;
@ -100,6 +105,13 @@ pub const Value = opaque {
pub const setAliasee = LLVMAliasSetAliasee;
extern fn LLVMAliasSetAliasee(Alias: *const Value, Aliasee: *const Value) void;
pub const constInBoundsGEP = LLVMConstInBoundsGEP;
extern fn LLVMConstInBoundsGEP(
ConstantVal: *const Value,
ConstantIndices: [*]const *const Value,
NumIndices: c_uint,
) *const Value;
};
pub const Type = opaque {
@ -113,7 +125,7 @@ pub const Type = opaque {
extern fn LLVMConstInt(IntTy: *const Type, N: c_ulonglong, SignExtend: Bool) *const Value;
pub const constArray = LLVMConstArray;
extern fn LLVMConstArray(ElementTy: *const Type, ConstantVals: ?[*]*const Value, Length: c_uint) *const Value;
extern fn LLVMConstArray(ElementTy: *const Type, ConstantVals: [*]*const Value, Length: c_uint) *const Value;
pub const getUndef = LLVMGetUndef;
extern fn LLVMGetUndef(Ty: *const Type) *const Value;

View File

@ -1526,6 +1526,8 @@ pub const Type = extern union {
.var_args_param => unreachable,
.@"struct" => {
const s = self.castTag(.@"struct").?.data;
assert(s.status == .have_layout);
@panic("TODO abiSize struct");
},
.enum_simple, .enum_full, .enum_nonexhaustive => {
@ -2768,6 +2770,26 @@ pub const Type = extern union {
}
}
pub fn structFieldCount(ty: Type) usize {
switch (ty.tag()) {
.@"struct" => {
const struct_obj = ty.castTag(.@"struct").?.data;
return struct_obj.fields.count();
},
else => unreachable,
}
}
pub fn structFieldType(ty: Type, index: usize) Type {
switch (ty.tag()) {
.@"struct" => {
const struct_obj = ty.castTag(.@"struct").?.data;
return struct_obj.fields.values()[index].ty;
},
else => unreachable,
}
}
pub fn declSrcLoc(ty: Type) Module.SrcLoc {
switch (ty.tag()) {
.enum_full, .enum_nonexhaustive => {

View File

@ -112,6 +112,10 @@ pub const Value = extern union {
/// This value is repeated some number of times. The amount of times to repeat
/// is stored externally.
repeated,
/// Each element stored as a `Value`.
array,
/// Pointer and length as sub `Value` objects.
slice,
float_16,
float_32,
float_64,
@ -217,6 +221,9 @@ pub const Value = extern union {
.enum_literal,
=> Payload.Bytes,
.array => Payload.Array,
.slice => Payload.Slice,
.enum_field_index => Payload.U32,
.ty => Payload.Ty,
@ -442,6 +449,28 @@ pub const Value = extern union {
};
return Value{ .ptr_otherwise = &new_payload.base };
},
.array => {
const payload = self.castTag(.array).?;
const new_payload = try allocator.create(Payload.Array);
new_payload.* = .{
.base = payload.base,
.data = try allocator.alloc(Value, payload.data.len),
};
std.mem.copy(Value, new_payload.data, payload.data);
return Value{ .ptr_otherwise = &new_payload.base };
},
.slice => {
const payload = self.castTag(.slice).?;
const new_payload = try allocator.create(Payload.Slice);
new_payload.* = .{
.base = payload.base,
.data = .{
.ptr = try payload.data.ptr.copy(allocator),
.len = try payload.data.len.copy(allocator),
},
};
return Value{ .ptr_otherwise = &new_payload.base };
},
.float_16 => return self.copyPayloadShallow(allocator, Payload.Float_16),
.float_32 => return self.copyPayloadShallow(allocator, Payload.Float_32),
.float_64 => return self.copyPayloadShallow(allocator, Payload.Float_64),
@ -605,6 +634,8 @@ pub const Value = extern union {
try out_stream.writeAll("(repeated) ");
val = val.castTag(.repeated).?.data;
},
.array => return out_stream.writeAll("(array)"),
.slice => return out_stream.writeAll("(slice)"),
.float_16 => return out_stream.print("{}", .{val.castTag(.float_16).?.data}),
.float_32 => return out_stream.print("{}", .{val.castTag(.float_32).?.data}),
.float_64 => return out_stream.print("{}", .{val.castTag(.float_64).?.data}),
@ -729,6 +760,8 @@ pub const Value = extern union {
.field_ptr,
.bytes,
.repeated,
.array,
.slice,
.float_16,
.float_32,
.float_64,
@ -1075,6 +1108,8 @@ pub const Value = extern union {
return orderAgainstZero(lhs).compare(op);
}
/// TODO we can't compare value equality without also knowing the type to treat
/// the values as
pub fn eql(a: Value, b: Value) bool {
const a_tag = a.tag();
const b_tag = b.tag();
@ -1109,6 +1144,8 @@ pub const Value = extern union {
return @truncate(u32, self.hash());
}
/// TODO we can't hash without also knowing the type of the value.
/// we have to hash as if there were a canonical value memory layout.
pub fn hash(self: Value) u64 {
var hasher = std.hash.Wyhash.init(0);
@ -1203,6 +1240,15 @@ pub const Value = extern union {
const payload = self.castTag(.bytes).?;
hasher.update(payload.data);
},
.repeated => {
@panic("TODO Value.hash for repeated");
},
.array => {
@panic("TODO Value.hash for array");
},
.slice => {
@panic("TODO Value.hash for slice");
},
.int_u64 => {
const payload = self.castTag(.int_u64).?;
std.hash.autoHash(&hasher, payload.data);
@ -1211,10 +1257,6 @@ pub const Value = extern union {
const payload = self.castTag(.int_i64).?;
std.hash.autoHash(&hasher, payload.data);
},
.repeated => {
const payload = self.castTag(.repeated).?;
std.hash.autoHash(&hasher, payload.data.hash());
},
.ref_val => {
const payload = self.castTag(.ref_val).?;
std.hash.autoHash(&hasher, payload.data.hash());
@ -1340,6 +1382,8 @@ pub const Value = extern union {
return switch (val.tag()) {
.empty_array => 0,
.bytes => val.castTag(.bytes).?.data.len,
.array => val.castTag(.array).?.data.len,
.slice => val.castTag(.slice).?.data.len.toUnsignedInt(),
.ref_val => sliceLen(val.castTag(.ref_val).?.data),
.decl_ref => {
const decl = val.castTag(.decl_ref).?.data;
@ -1364,6 +1408,9 @@ pub const Value = extern union {
// No matter the index; all the elements are the same!
.repeated => return self.castTag(.repeated).?.data,
.array => return self.castTag(.array).?.data[index],
.slice => return self.castTag(.slice).?.data.ptr.elemValue(allocator, index),
else => unreachable,
}
}
@ -1450,7 +1497,8 @@ pub const Value = extern union {
}
/// Valid for all types. Asserts the value is not undefined.
pub fn isType(self: Value) bool {
/// TODO this function is a code smell and should be deleted
fn isType(self: Value) bool {
return switch (self.tag()) {
.ty,
.int_type,
@ -1528,6 +1576,8 @@ pub const Value = extern union {
.field_ptr,
.bytes,
.repeated,
.array,
.slice,
.float_16,
.float_32,
.float_64,
@ -1638,6 +1688,19 @@ pub const Value = extern union {
data: []const u8,
};
pub const Array = struct {
base: Payload,
data: []Value,
};
pub const Slice = struct {
base: Payload,
data: struct {
ptr: Value,
len: Value,
},
};
pub const Ty = struct {
base: Payload,
data: Type,