wasm: All union/tuple/array tests passing

This implements improvements/fixes to get all the union, tuple, and array behavior tests passing.
Previously, we lowered parent pointers for field_ptr and element_ptr incompletely. This has now
been improved to recursively lower such pointer.

Also a fix was done to `generateSymbol` when checking a container's layout.
Previously it was assumed to always be a struct. However, the type can also be a tuple, and therefore
panicking. Updating to ask a type's container layout instead allows us to keep a singular branch for both cases.
This commit is contained in:
Luuk de Gram 2022-03-19 20:28:28 +01:00
parent 2041176c5e
commit 56590218c5
No known key found for this signature in database
GPG Key ID: A8CFE58E4DC7D664
2 changed files with 119 additions and 52 deletions

View File

@ -1816,18 +1816,103 @@ fn airWrapBinOp(self: *Self, inst: Air.Inst.Index, op: Op) InnerError!WValue {
return bin_local;
}
fn lowerParentPtr(self: *Self, ptr_val: Value, ptr_child_ty: Type) InnerError!WValue {
switch (ptr_val.tag()) {
.decl_ref_mut => {
const decl = ptr_val.castTag(.decl_ref_mut).?.data.decl;
return self.lowerParentPtrDecl(ptr_val, decl);
},
.decl_ref => {
const decl = ptr_val.castTag(.decl_ref).?.data;
return self.lowerParentPtrDecl(ptr_val, decl);
},
.variable => {
const decl = ptr_val.castTag(.variable).?.data.owner_decl;
return self.lowerParentPtrDecl(ptr_val, decl);
},
.field_ptr => {
const field_ptr = ptr_val.castTag(.field_ptr).?.data;
const parent_ty = field_ptr.container_ty;
const parent_ptr = try self.lowerParentPtr(field_ptr.container_ptr, parent_ty);
const offset = switch (parent_ty.zigTypeTag()) {
.Struct => blk: {
const offset = parent_ty.structFieldOffset(field_ptr.field_index, self.target);
break :blk offset;
},
.Union => blk: {
const layout: Module.Union.Layout = parent_ty.unionGetLayout(self.target);
if (layout.payload_size == 0) break :blk 0;
if (layout.payload_align > layout.tag_align) break :blk 0;
// tag is stored first so calculate offset from where payload starts
const offset = @intCast(u32, std.mem.alignForwardGeneric(u64, layout.tag_size, layout.tag_align));
break :blk offset;
},
else => unreachable,
};
return switch (parent_ptr) {
.memory => |ptr| WValue{
.memory_offset = .{
.pointer = ptr,
.offset = @intCast(u32, offset),
},
},
.memory_offset => |mem_off| WValue{
.memory_offset = .{
.pointer = mem_off.pointer,
.offset = @intCast(u32, offset) + mem_off.offset,
},
},
else => unreachable,
};
},
.elem_ptr => {
const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
const index = elem_ptr.index;
const offset = index * ptr_child_ty.abiSize(self.target);
const array_ptr = try self.lowerParentPtr(elem_ptr.array_ptr, elem_ptr.elem_ty);
return WValue{ .memory_offset = .{
.pointer = array_ptr.memory,
.offset = @intCast(u32, offset),
} };
},
else => |tag| return self.fail("TODO: Implement lowerParentPtr for tag: {}", .{tag}),
}
}
fn lowerParentPtrDecl(self: *Self, ptr_val: Value, decl: *Module.Decl) InnerError!WValue {
decl.markAlive();
var ptr_ty_payload: Type.Payload.ElemType = .{
.base = .{ .tag = .single_mut_pointer },
.data = decl.ty,
};
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
return self.lowerDeclRefValue(.{ .ty = ptr_ty, .val = ptr_val }, decl);
}
fn lowerDeclRefValue(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!WValue {
if (tv.ty.isSlice()) {
return WValue{ .memory = try self.bin_file.lowerUnnamedConst(self.decl, tv) };
} else if (decl.ty.zigTypeTag() != .Fn and !decl.ty.hasRuntimeBitsIgnoreComptime()) {
return WValue{ .imm32 = 0xaaaaaaaa };
}
decl.markAlive();
const target_sym_index = decl.link.wasm.sym_index;
if (decl.ty.zigTypeTag() == .Fn) {
try self.bin_file.addTableFunction(target_sym_index);
return WValue{ .function_index = target_sym_index };
} else return WValue{ .memory = target_sym_index };
}
fn lowerConstant(self: *Self, val: Value, ty: Type) InnerError!WValue {
if (val.isUndefDeep()) return self.emitUndefined(ty);
if (val.castTag(.decl_ref)) |decl_ref| {
const decl = decl_ref.data;
decl.markAlive();
const target_sym_index = decl.link.wasm.sym_index;
if (ty.isSlice()) {
return WValue{ .memory = try self.bin_file.lowerUnnamedConst(self.decl, .{ .ty = ty, .val = val }) };
} else if (decl.ty.zigTypeTag() == .Fn) {
try self.bin_file.addTableFunction(target_sym_index);
return WValue{ .function_index = target_sym_index };
} else return WValue{ .memory = target_sym_index };
return self.lowerDeclRefValue(.{ .ty = ty, .val = val }, decl);
}
switch (ty.zigTypeTag()) {
@ -1854,37 +1939,8 @@ fn lowerConstant(self: *Self, val: Value, ty: Type) InnerError!WValue {
else => unreachable,
},
.Pointer => switch (val.tag()) {
.elem_ptr => {
const elem_ptr = val.castTag(.elem_ptr).?.data;
const index = elem_ptr.index;
const offset = index * ty.childType().abiSize(self.target);
const array_ptr = try self.lowerConstant(elem_ptr.array_ptr, ty);
return WValue{ .memory_offset = .{
.pointer = array_ptr.memory,
.offset = @intCast(u32, offset),
} };
},
.field_ptr => {
const field_ptr = val.castTag(.field_ptr).?.data;
const container = field_ptr.container_ptr;
const parent_ptr = try self.lowerConstant(container, ty);
const offset = switch (container.tag()) {
.decl_ref => blk: {
const decl_ref = container.castTag(.decl_ref).?.data;
if (decl_ref.ty.castTag(.@"struct")) |_| {
const offset = decl_ref.ty.structFieldOffset(field_ptr.field_index, self.target);
break :blk offset;
}
return self.fail("Wasm TODO: field_ptr decl_ref for type '{}'", .{decl_ref.ty});
},
else => |tag| return self.fail("Wasm TODO: Implement field_ptr for value tag: '{s}'", .{tag}),
};
return WValue{ .memory_offset = .{
.pointer = parent_ptr.memory,
.offset = @intCast(u32, offset),
} };
.field_ptr, .elem_ptr => {
return self.lowerParentPtr(val, ty.childType());
},
.int_u64, .one => return WValue{ .imm32 = @intCast(u32, val.toUnsignedInt()) },
.zero, .null_value => return WValue{ .imm32 = 0 },
@ -1997,6 +2053,11 @@ fn valueAsI32(self: Self, val: Value, ty: Type) i32 {
return self.valueAsI32(tag_val, enum_full.tag_ty);
} else return @bitCast(i32, field_index.data);
},
.enum_numbered => {
const index = field_index.data;
const enum_data = ty.castTag(.enum_numbered).?.data;
return self.valueAsI32(enum_data.values.keys()[index], enum_data.tag_ty);
},
else => unreachable,
}
} else {
@ -2122,8 +2183,16 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: std.math.CompareOperator) Inner
return self.cmpBigInt(lhs, rhs, operand_ty, op);
}
try self.emitWValue(lhs);
try self.emitWValue(rhs);
// ensure that when we compare pointers, we emit
// the true pointer of a stack value, rather than the stack pointer.
switch (lhs) {
.stack_offset => try self.emitWValue(try self.buildPointerOffset(lhs, 0, .new)),
else => try self.emitWValue(lhs),
}
switch (rhs) {
.stack_offset => try self.emitWValue(try self.buildPointerOffset(rhs, 0, .new)),
else => try self.emitWValue(rhs),
}
const signedness: std.builtin.Signedness = blk: {
// by default we tell the operand type is unsigned (i.e. bools and enum values)
@ -3159,16 +3228,16 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
if (self.liveness.isUnused(inst)) return WValue{ .none = {} };
const vector_ty = self.air.typeOfIndex(inst);
const len = vector_ty.vectorLen();
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const result_ty = self.air.typeOfIndex(inst);
const len = @intCast(usize, result_ty.arrayLen());
const elements = @bitCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
switch (vector_ty.zigTypeTag()) {
switch (result_ty.zigTypeTag()) {
.Vector => return self.fail("TODO: Wasm backend: implement airAggregateInit for vectors", .{}),
.Array => {
const result = try self.allocStack(vector_ty);
const elem_ty = vector_ty.childType();
const result = try self.allocStack(result_ty);
const elem_ty = result_ty.childType();
const elem_size = @intCast(u32, elem_ty.abiSize(self.target));
// When the element type is by reference, we must copy the entire
@ -3197,13 +3266,12 @@ fn airAggregateInit(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
return result;
},
.Struct => {
const tuple = vector_ty.castTag(.tuple).?.data;
const result = try self.allocStack(vector_ty);
const result = try self.allocStack(result_ty);
const offset = try self.buildPointerOffset(result, 0, .new); // pointer to offset
for (elements) |elem, elem_index| {
if (tuple.values[elem_index].tag() != .unreachable_value) continue;
if (result_ty.structFieldValueComptime(elem_index) != null) continue;
const elem_ty = tuple.types[elem_index];
const elem_ty = result_ty.structFieldType(elem_index);
const elem_size = @intCast(u32, elem_ty.abiSize(self.target));
const value = try self.resolveInst(elem);
try self.store(offset, value, elem_ty, 0);

View File

@ -530,8 +530,7 @@ pub fn generateSymbol(
return Result{ .appended = {} };
},
.Struct => {
const struct_obj = typed_value.ty.castTag(.@"struct").?.data;
if (struct_obj.layout == .Packed) {
if (typed_value.ty.containerLayout() == .Packed) {
return Result{
.fail = try ErrorMsg.create(
bin_file.allocator,