From c18bc08e3c658f50faf7668f8940a11326f3947a Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Wed, 17 Nov 2021 08:58:08 +0100 Subject: [PATCH 1/6] wasm: Linker - emit stack pointer The self-hosted wasm linker now emits a mutable global. This entry represents the stack pointer, which has an initial value of offset table size + data size + stack size. Stack size can either be set by the user, or has the default of a single wasm page (64KiB). --- src/link/Wasm.zig | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig index a19d014dbe..cf05fcd94a 100644 --- a/src/link/Wasm.zig +++ b/src/link/Wasm.zig @@ -404,6 +404,8 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void { // The table contains all decl's with its corresponding offset into // the 'data' section const offset_table_size = @intCast(u32, self.offset_table.items.len * ptr_width); + // The size of the emulated stack + const stack_size = @intCast(u32, self.base.options.stack_size_override orelse std.wasm.page_size); // The size of the data, this together with `offset_table_size` amounts to the // total size of the 'data' section @@ -487,7 +489,7 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void { } // Memory section - if (data_size != 0) { + { const header_offset = try reserveVecSectionHeader(file); const writer = file.writer(); @@ -498,7 +500,7 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void { writer, try std.math.divCeil( u32, - offset_table_size + data_size, + offset_table_size + data_size + stack_size, std.wasm.page_size, ), ); @@ -511,6 +513,34 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void { ); } + // Global section (used to emit stack pointer) + { + // We emit the emulated stack at the end of the data section, + // 'growing' downwards towards the program memory. + // TODO: Have linker resolve the offset table, so we can emit the stack + // at the start so we can't overwrite program memory with the stack. + const sp_value = offset_table_size + data_size + std.wasm.page_size; + const mutable = true; // stack pointer MUST be mutable + const header_offset = try reserveVecSectionHeader(file); + const writer = file.writer(); + + try writer.writeByte(wasm.valtype(.i32)); + try writer.writeByte(@boolToInt(mutable)); + + // set the initial value of the stack pointer to the data size + stack size + try writer.writeByte(wasm.opcode(.i32_const)); + try leb.writeILEB128(writer, @bitCast(i32, sp_value)); + try writer.writeByte(wasm.opcode(.end)); + + try writeVecSectionHeader( + file, + header_offset, + .global, + @intCast(u32, (try file.getPos()) - header_offset - header_size), + @as(u32, 1), + ); + } + // Export section if (self.base.options.module) |module| { const header_offset = try reserveVecSectionHeader(file); From 261f13414b6bafbe075ce5066964b36a3a5b5e16 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Wed, 17 Nov 2021 09:08:32 +0100 Subject: [PATCH 2/6] wasm: Implement emulated stack All non-temporary locals will now use stack memory. When `airAlloc` is called, we create a new local, move the stack pointer, and write its offset into the local. Arguments act as a register and do not use any stack space. We no longer use offsets for binary operations, but instead write the result into a local. In this case, the local is simply used as a register, and does not require stack space. This allows us to ensure the order of instructions is correct, and we no longer require any patching/inserting at a specific offset. print_air was missing the logic to print the type of a `ty_str`. --- src/arch/wasm/CodeGen.zig | 169 ++++++++++++++++++++++++-------------- src/print_air.zig | 2 +- 2 files changed, 110 insertions(+), 61 deletions(-) diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index adfec99d49..ea8611fbe3 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -210,7 +210,12 @@ fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode { }, 32 => switch (args.valtype1.?) { .i64 => return .i64_store32, - .i32, .f32, .f64 => unreachable, + .i32 => return .i32_store, + .f32, .f64 => unreachable, + }, + 64 => switch (args.valtype1.?) { + .i64 => return .i64_store, + else => unreachable, }, else => unreachable, } @@ -529,6 +534,9 @@ global_error_set: std.StringHashMapUnmanaged(Module.ErrorInt), mir_instructions: std.MultiArrayList(Mir.Inst) = .{}, /// Contains extra data for MIR mir_extra: std.ArrayListUnmanaged(u32) = .{}, +/// When a function is executing, we store the the current stack pointer's value within this local. +/// This value is then used to restore the stack pointer to the original value at the return of the function. +initial_stack_value: WValue = .none, const InnerError = error{ OutOfMemory, @@ -686,9 +694,7 @@ fn emitWValue(self: *Self, val: WValue) InnerError!void { switch (val) { .multi_value => unreachable, // multi_value can never be written directly, and must be accessed individually .none, .mir_offset => {}, // no-op - .local => |idx| { - try self.addLabel(.local_get, idx); - }, + .local => |idx| try self.addLabel(.local_get, idx), .constant => |tv| try self.emitConstant(tv.val, tv.ty), // Creates a new constant on the stack } } @@ -884,6 +890,59 @@ pub fn gen(self: *Self, ty: Type, val: Value) InnerError!Result { } } +/// Retrieves the stack pointer's value from the global variable and stores +/// it in a local +fn initializeStack(self: *Self) !void { + // reserve space for immediate value + // get stack pointer global + // TODO: For now, we hardcode the stack pointer to index '0', + // once the linker is further implemented, we can replace this by inserting + // a relocation and have the linker resolve the correct index to the stack pointer global. + // NOTE: relocations of the type GLOBAL_INDEX_LEB are 5-bytes big + try self.addLabel(.global_get, 0); + + // Reserve a local to store the current stack pointer + // We can later use this local to set the stack pointer back to the value + // we have stored here. + self.initial_stack_value = try self.allocLocal(Type.initTag(.i32)); + + // save the value to the local + try self.addLabel(.local_set, self.initial_stack_value.local); +} + +/// Reads the stack pointer from `Context.initial_stack_value` and writes it +/// to the global stack pointer variable +fn restoreStackPointer(self: *Self) !void { + // only restore the pointer if it was initialized + if (self.initial_stack_value == .none) return; + // Get the original stack pointer's value + try self.emitWValue(self.initial_stack_value); + + // save its value in the global stack pointer + try self.addLabel(.global_set, 0); +} + +/// Moves the stack pointer by given `offset` +/// It does this by retrieving the stack pointer, subtracting `offset` and storing +/// the result back into the stack pointer. +fn moveStack(self: *Self, offset: u32, local: u32) !void { + if (offset == 0) return; + // Generates the following code: + // + // global.get 0 + // i32.const [offset] + // i32.sub + // global.set 0 + + // TODO: Rather than hardcode the stack pointer to position 0, + // have the linker resolve it. + try self.addLabel(.global_get, 0); + try self.addImm32(@bitCast(i32, offset)); + try self.addTag(.i32_sub); + try self.addLabel(.local_tee, local); + try self.addLabel(.global_set, 0); +} + fn genInst(self: *Self, inst: Air.Inst.Index) !WValue { const air_tags = self.air.instructions.items(.tag); return switch (air_tags[inst]) { @@ -963,6 +1022,7 @@ fn airRet(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = self.resolveInst(un_op); try self.emitWValue(operand); + try self.restoreStackPointer(); try self.addTag(.@"return"); return .none; } @@ -989,13 +1049,24 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } try self.addLabel(.call, target.link.wasm.symbol_index); - return .none; } fn airAlloc(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const elem_type = self.air.typeOfIndex(inst).elemType(); - return self.allocLocal(elem_type); + + // Initialize the stack + if (self.initial_stack_value == .none) { + try self.initializeStack(); + } + + const abi_size = elem_type.abiSize(self.target); + if (abi_size == 0) return WValue{ .none = {} }; + + const local = try self.allocLocal(elem_type); + try self.moveStack(@intCast(u32, abi_size), local.local); + + return local; } fn airStore(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1004,48 +1075,35 @@ fn airStore(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const lhs = self.resolveInst(bin_op.lhs); const rhs = self.resolveInst(bin_op.rhs); - switch (lhs) { - .multi_value => |multi_value| switch (rhs) { - // When assigning a value to a multi_value such as a struct, - // we simply assign the local_index to the rhs one. - // This allows us to update struct fields without having to individually - // set each local as each field's index will be calculated off the struct's base index - .multi_value => self.values.put(self.gpa, Air.refToIndex(bin_op.lhs).?, rhs) catch unreachable, // Instruction does not dominate all uses! - .constant, .none => { - // emit all values onto the stack if constant - try self.emitWValue(rhs); + // get lhs stack position + try self.emitWValue(lhs); + // get rhs value + try self.emitWValue(rhs); + + const ty = self.air.typeOf(bin_op.lhs); + const valtype = try self.typeToValtype(ty); + + const opcode = buildOpcode(.{ + .valtype1 = valtype, + .width = @intCast(u8, Type.abiSize(ty, self.target) * 8), // use bitsize instead of byte size + .op = .store, + }); + // store rhs value at stack pointer's location in memory + const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 0 }); + try self.addInst(.{ .tag = Mir.Inst.Tag.fromOpcode(opcode), .data = .{ .payload = mem_arg_index } }); - // for each local, pop the stack value into the local - // As the last element is on top of the stack, we must populate the locals - // in reverse. - var i: u32 = multi_value.count; - while (i > 0) : (i -= 1) { - try self.addLabel(.local_set, multi_value.index + i - 1); - } - }, - .local => { - // This can occur when we wrap a single value into a multi-value, - // such as wrapping a non-optional value into an optional. - // This means we must zero the null-tag, and set the payload. - assert(multi_value.count == 2); - // set payload - try self.emitWValue(rhs); - try self.addLabel(.local_set, multi_value.index + 1); - }, - else => unreachable, - }, - .local => |local| { - try self.emitWValue(rhs); - try self.addLabel(.local_set, local); - }, - else => unreachable, - } return .none; } fn airLoad(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; - return self.resolveInst(ty_op.operand); + const lhs = self.resolveInst(ty_op.operand); + + // load local's value from memory by its stack position + try self.emitWValue(lhs); + const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 0 }); + try self.addInst(.{ .tag = .i32_load, .data = .{ .payload = mem_arg_index } }); + return .none; } fn airArg(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1060,14 +1118,6 @@ fn airBinOp(self: *Self, inst: Air.Inst.Index, op: Op) InnerError!WValue { const lhs = self.resolveInst(bin_op.lhs); const rhs = self.resolveInst(bin_op.rhs); - // it's possible for both lhs and/or rhs to return an offset as well, - // in which case we return the first offset occurrence we find. - const offset = blk: { - if (lhs == .mir_offset) break :blk lhs.mir_offset; - if (rhs == .mir_offset) break :blk rhs.mir_offset; - break :blk self.mir_instructions.len; - }; - try self.emitWValue(lhs); try self.emitWValue(rhs); @@ -1078,7 +1128,11 @@ fn airBinOp(self: *Self, inst: Air.Inst.Index, op: Op) InnerError!WValue { .signedness = if (bin_ty.isSignedInt()) .signed else .unsigned, }); try self.addTag(Mir.Inst.Tag.fromOpcode(opcode)); - return WValue{ .mir_offset = offset }; + + // save the result in a temporary + const bin_local = try self.allocLocal(bin_ty); + try self.addLabel(.local_set, bin_local.local); + return bin_local; } fn airWrapBinOp(self: *Self, inst: Air.Inst.Index, op: Op) InnerError!WValue { @@ -1086,14 +1140,6 @@ fn airWrapBinOp(self: *Self, inst: Air.Inst.Index, op: Op) InnerError!WValue { const lhs = self.resolveInst(bin_op.lhs); const rhs = self.resolveInst(bin_op.rhs); - // it's possible for both lhs and/or rhs to return an offset as well, - // in which case we return the first offset occurrence we find. - const offset = blk: { - if (lhs == .mir_offset) break :blk lhs.mir_offset; - if (rhs == .mir_offset) break :blk rhs.mir_offset; - break :blk self.mir_instructions.len; - }; - try self.emitWValue(lhs); try self.emitWValue(rhs); @@ -1132,7 +1178,10 @@ fn airWrapBinOp(self: *Self, inst: Air.Inst.Index, op: Op) InnerError!WValue { return self.fail("TODO wasm: Integer wrapping for bitsizes larger than 64", .{}); } - return WValue{ .mir_offset = offset }; + // save the result in a temporary + const bin_local = try self.allocLocal(bin_ty); + try self.addLabel(.local_set, bin_local.local); + return bin_local; } fn emitConstant(self: *Self, val: Value, ty: Type) InnerError!void { diff --git a/src/print_air.zig b/src/print_air.zig index fb123b2ac6..dc6a1773e7 100644 --- a/src/print_air.zig +++ b/src/print_air.zig @@ -234,7 +234,7 @@ const Writer = struct { fn writeTyStr(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { const ty_str = w.air.instructions.items(.data)[inst].ty_str; const name = w.zir.nullTerminatedString(ty_str.str); - try s.print("\"{}\", {}", .{ std.zig.fmtEscapes(name), ty_str.ty }); + try s.print("\"{}\", {}", .{ std.zig.fmtEscapes(name), w.air.getRefType(ty_str.ty) }); } fn writeBinOp(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { From b2221e564490421265f9e3b2e398a89bbdfb0516 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Thu, 18 Nov 2021 21:33:10 +0100 Subject: [PATCH 3/6] wasm: Implement structs stored on the stack By calculating the abi size of the struct, we move the stack pointer and store each field depending on its size (i.e. a 1-byte field will use i32.store8). This commit adds all required opcodes to perform those stores and loads. This also gets rid of `mir_offset` as we now save results of binary operations into locals and emit its result onto the stack within condbr instead. This makes everything a lot simpler but also more robust. In the future, we could look into an algorithm to re-use such locals. For struct fields we use the new `local_with_offset` tag. This stores the struct's stack pointer as well as the field's offset from that stack pointer. `allocLocal` will now always allocate a single local, using a given type. --- src/arch/wasm/CodeGen.zig | 298 ++++++++++++++++++++------------------ src/arch/wasm/Emit.zig | 22 +++ src/arch/wasm/Mir.zig | 116 ++++++++++++++- 3 files changed, 296 insertions(+), 140 deletions(-) diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index ea8611fbe3..175a0b6b71 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -28,16 +28,15 @@ const WValue = union(enum) { local: u32, /// Holds a memoized typed value constant: TypedValue, - /// Offset position in the list of MIR instructions - mir_offset: usize, - /// Used for variables that create multiple locals on the stack when allocated - /// such as structs and optionals. - multi_value: struct { - /// The index of the first local variable - index: u32, - /// The count of local variables this `WValue` consists of. - /// i.e. an ErrorUnion has a 'count' of 2. - count: u32, + /// Used for types that contains of multiple areas within + /// a memory region in the stack. + /// The local represents the position in the stack, + /// whereas the offset represents the offset from that position. + local_with_offset: struct { + /// Index of the local variable + local: u32, + /// The offset from the local's stack position + offset: u32, }, }; @@ -187,7 +186,8 @@ fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode { }, 32 => switch (args.valtype1.?) { .i64 => if (args.signedness.? == .signed) return .i64_load32_s else return .i64_load32_u, - .i32, .f32, .f64 => unreachable, + .i32 => return .i32_load, + .f32, .f64 => unreachable, }, else => unreachable, } else switch (args.valtype1.?) { @@ -668,9 +668,10 @@ fn typeToValtype(self: *Self, ty: Type) InnerError!wasm.Valtype { .Bool, .Pointer, .ErrorSet, + .Struct, + .ErrorUnion, => wasm.Valtype.i32, - .Struct, .ErrorUnion, .Optional => unreachable, // Multi typed, must be handled individually. - else => |tag| self.fail("TODO - Wasm valtype for type '{s}'", .{tag}), + else => self.fail("TODO - Wasm valtype for type '{}'", .{ty}), }; } @@ -692,76 +693,21 @@ fn genBlockType(self: *Self, ty: Type) InnerError!u8 { /// Writes the bytecode depending on the given `WValue` in `val` fn emitWValue(self: *Self, val: WValue) InnerError!void { switch (val) { - .multi_value => unreachable, // multi_value can never be written directly, and must be accessed individually - .none, .mir_offset => {}, // no-op + .none => {}, // no-op + .local_with_offset => |with_off| try self.addLabel(.local_get, with_off.local), .local => |idx| try self.addLabel(.local_get, idx), .constant => |tv| try self.emitConstant(tv.val, tv.ty), // Creates a new constant on the stack } } -/// Creates one or multiple locals for a given `Type`. -/// Returns a corresponding `Wvalue` that can either be of tag -/// local or multi_value +/// Creates one locals for a given `Type`. +/// Returns a corresponding `Wvalue` with `local` as active tag fn allocLocal(self: *Self, ty: Type) InnerError!WValue { const initial_index = self.local_index; - switch (ty.zigTypeTag()) { - .Struct => { - // for each struct field, generate a local - const struct_data: *Module.Struct = ty.castTag(.@"struct").?.data; - const fields_len = @intCast(u32, struct_data.fields.count()); - try self.locals.ensureUnusedCapacity(self.gpa, fields_len); - for (struct_data.fields.values()) |*value| { - const val_type = try self.genValtype(value.ty); - self.locals.appendAssumeCapacity(val_type); - self.local_index += 1; - } - return WValue{ .multi_value = .{ - .index = initial_index, - .count = fields_len, - } }; - }, - .ErrorUnion => { - const payload_type = ty.errorUnionPayload(); - const val_type = try self.genValtype(payload_type); - - // we emit the error value as the first local, and the payload as the following. - // The first local is also used to find the index of the error and payload. - // - // TODO: Add support where the payload is a type that contains multiple locals such as a struct. - try self.locals.ensureUnusedCapacity(self.gpa, 2); - self.locals.appendAssumeCapacity(wasm.valtype(.i32)); // error values are always i32 - self.locals.appendAssumeCapacity(val_type); - self.local_index += 2; - - return WValue{ .multi_value = .{ - .index = initial_index, - .count = 2, - } }; - }, - .Optional => { - var opt_buf: Type.Payload.ElemType = undefined; - const child_type = ty.optionalChild(&opt_buf); - if (ty.isPtrLikeOptional()) { - return self.fail("TODO: wasm optional pointer", .{}); - } - - try self.locals.ensureUnusedCapacity(self.gpa, 2); - self.locals.appendAssumeCapacity(wasm.valtype(.i32)); // optional 'tag' for null-checking is always i32 - self.locals.appendAssumeCapacity(try self.genValtype(child_type)); - self.local_index += 2; - - return WValue{ .multi_value = .{ - .index = initial_index, - .count = 2, - } }; - }, - else => { - const valtype = try self.genValtype(ty); - try self.locals.append(self.gpa, valtype); - self.local_index += 1; - return WValue{ .local = initial_index }; - }, - } + const valtype = try self.genValtype(ty); + try self.locals.append(self.gpa, valtype); + self.local_index += 1; + return WValue{ .local = initial_index }; } fn genFunctype(self: *Self) InnerError!void { @@ -857,7 +803,7 @@ pub fn gen(self: *Self, ty: Type, val: Value) InnerError!Result { if (ty.sentinel()) |sentinel| { try self.code.appendSlice(payload.data); - switch (try self.gen(ty.elemType(), sentinel)) { + switch (try self.gen(ty.childType(), sentinel)) { .appended => return Result.appended, .externally_managed => |data| { try self.code.appendSlice(data); @@ -927,15 +873,8 @@ fn restoreStackPointer(self: *Self) !void { /// the result back into the stack pointer. fn moveStack(self: *Self, offset: u32, local: u32) !void { if (offset == 0) return; - // Generates the following code: - // - // global.get 0 - // i32.const [offset] - // i32.sub - // global.set 0 - // TODO: Rather than hardcode the stack pointer to position 0, - // have the linker resolve it. + // have the linker resolve its relocation try self.addLabel(.global_get, 0); try self.addImm32(@bitCast(i32, offset)); try self.addTag(.i32_sub); @@ -1053,17 +992,18 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } fn airAlloc(self: *Self, inst: Air.Inst.Index) InnerError!WValue { - const elem_type = self.air.typeOfIndex(inst).elemType(); + const child_type = self.air.typeOfIndex(inst).childType(); // Initialize the stack if (self.initial_stack_value == .none) { try self.initializeStack(); } - const abi_size = elem_type.abiSize(self.target); + const abi_size = child_type.abiSize(self.target); if (abi_size == 0) return WValue{ .none = {} }; - const local = try self.allocLocal(elem_type); + // local, containing the offset to the stack position + const local = try self.allocLocal(child_type); try self.moveStack(@intCast(u32, abi_size), local.local); return local; @@ -1074,43 +1014,100 @@ fn airStore(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const lhs = self.resolveInst(bin_op.lhs); const rhs = self.resolveInst(bin_op.rhs); + const ty = self.air.typeOf(bin_op.lhs).childType(); - // get lhs stack position + const offset: u32 = switch (lhs) { + .local_with_offset => |with_off| with_off.offset, + else => 0, + }; + + switch (ty.zigTypeTag()) { + .ErrorUnion, .Optional => { + var buf: Type.Payload.ElemType = undefined; + const payload_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionPayload() else ty.optionalChild(&buf); + const tag_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionSet() else Type.initTag(.i8); + const payload_offset = @intCast(u32, tag_ty.abiSize(self.target) / 8); + if (rhs == .constant) { + // constant will contain both tag and payload, + // so save those in 2 temporary locals before storing them + // in memory + try self.emitWValue(rhs); + const tag_local = try self.allocLocal(Type.initTag(.i32)); + const payload_local = try self.allocLocal(payload_ty); + + try self.addLabel(.local_set, payload_local.local); + try self.addLabel(.local_set, tag_local.local); + + try self.store(lhs, tag_local, tag_ty, 0); + try self.store(lhs, payload_local, payload_ty, payload_offset); + } else if (offset == 0) { + // tag is being set + try self.store(lhs, rhs, tag_ty, 0); + } else { + // payload is being set + try self.store(lhs, rhs, payload_ty, payload_offset); + } + }, + else => try self.store(lhs, rhs, ty, offset), + } + + return .none; +} + +fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerError!void { try self.emitWValue(lhs); - // get rhs value try self.emitWValue(rhs); - - const ty = self.air.typeOf(bin_op.lhs); const valtype = try self.typeToValtype(ty); - const opcode = buildOpcode(.{ .valtype1 = valtype, .width = @intCast(u8, Type.abiSize(ty, self.target) * 8), // use bitsize instead of byte size .op = .store, }); - // store rhs value at stack pointer's location in memory - const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 0 }); - try self.addInst(.{ .tag = Mir.Inst.Tag.fromOpcode(opcode), .data = .{ .payload = mem_arg_index } }); - return .none; + // store rhs value at stack pointer's location in memory + const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = offset, .alignment = 0 }); + try self.addInst(.{ .tag = Mir.Inst.Tag.fromOpcode(opcode), .data = .{ .payload = mem_arg_index } }); } fn airLoad(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; - const lhs = self.resolveInst(ty_op.operand); + const operand = self.resolveInst(ty_op.operand); + const ty = self.air.getRefType(ty_op.ty); + return switch (ty.zigTypeTag()) { + .Struct, .ErrorUnion => operand, + else => self.load(operand, ty, 0), + }; +} + +fn load(self: *Self, operand: WValue, ty: Type, offset: u32) InnerError!WValue { // load local's value from memory by its stack position - try self.emitWValue(lhs); - const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 0 }); - try self.addInst(.{ .tag = .i32_load, .data = .{ .payload = mem_arg_index } }); - return .none; + try self.emitWValue(operand); + // Build the opcode with the right bitsize + const signedness: std.builtin.Signedness = if (ty.isUnsignedInt()) .unsigned else .signed; + const opcode = buildOpcode(.{ + .valtype1 = try self.typeToValtype(ty), + .width = @intCast(u8, Type.abiSize(ty, self.target) * 8), // use bitsize instead of byte size + .op = .load, + .signedness = signedness, + }); + + const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = offset, .alignment = 0 }); + try self.addInst(.{ + .tag = Mir.Inst.Tag.fromOpcode(opcode), + .data = .{ .payload = mem_arg_index }, + }); + + // store the result in a local + const result = try self.allocLocal(ty); + try self.addLabel(.local_set, result.local); + return result; } fn airArg(self: *Self, inst: Air.Inst.Index) InnerError!WValue { _ = inst; - // arguments share the index with locals - defer self.local_index += 1; - return WValue{ .local = self.local_index }; + defer self.arg_index += 1; + return self.args[self.arg_index]; } fn airBinOp(self: *Self, inst: Air.Inst.Index, op: Op) InnerError!WValue { @@ -1388,14 +1385,8 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { // insert blocks at the position of `offset` so // the condition can jump to it - const offset = switch (condition) { - .mir_offset => |offset| offset, - else => blk: { - const offset = self.mir_instructions.len; - try self.emitWValue(condition); - break :blk offset; - }, - }; + const offset = self.mir_instructions.len; + try self.emitWValue(condition); // result type is always noreturn, so use `block_empty` as type. try self.startBlock(.block, wasm.block_empty, offset); @@ -1415,10 +1406,6 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } fn airCmp(self: *Self, inst: Air.Inst.Index, op: std.math.CompareOperator) InnerError!WValue { - // save offset, so potential conditions can insert blocks in front of - // the comparison that we can later jump back to - const offset = self.mir_instructions.len; - const data: Air.Inst.Data = self.air.instructions.items(.data)[inst]; const lhs = self.resolveInst(data.bin_op.lhs); const rhs = self.resolveInst(data.bin_op.rhs); @@ -1447,7 +1434,10 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: std.math.CompareOperator) Inner .signedness = signedness, }); try self.addTag(Mir.Inst.Tag.fromOpcode(opcode)); - return WValue{ .mir_offset = offset }; + + const cmp_tmp = try self.allocLocal(lhs_ty); + try self.addLabel(.local_set, cmp_tmp.local); + return cmp_tmp; } fn airBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1468,7 +1458,6 @@ fn airBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { fn airNot(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; - const offset = self.mir_instructions.len; const operand = self.resolveInst(ty_op.operand); try self.emitWValue(operand); @@ -1478,7 +1467,10 @@ fn airNot(self: *Self, inst: Air.Inst.Index) InnerError!WValue { try self.addImm32(0); try self.addTag(.i32_eq); - return WValue{ .mir_offset = offset }; + // save the result in the local + const not_tmp = try self.allocLocal(self.air.getRefType(ty_op.ty)); + try self.addLabel(.local_set, not_tmp.local); + return not_tmp; } fn airBreakpoint(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1504,24 +1496,45 @@ fn airStructFieldPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const extra = self.air.extraData(Air.StructField, ty_pl.payload); const struct_ptr = self.resolveInst(extra.data.struct_operand); - return structFieldPtr(struct_ptr, extra.data.field_index); + const struct_ty = self.air.typeOf(extra.data.struct_operand).childType(); + const offset = std.math.cast(u32, struct_ty.structFieldOffset(extra.data.field_index, self.target)) catch { + return self.fail("Field type '{}' too big to fit into stack frame", .{ + struct_ty.structFieldType(extra.data.field_index), + }); + }; + return structFieldPtr(struct_ptr, offset); } + fn airStructFieldPtrIndex(self: *Self, inst: Air.Inst.Index, index: u32) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; const struct_ptr = self.resolveInst(ty_op.operand); - return structFieldPtr(struct_ptr, index); + const struct_ty = self.air.typeOf(ty_op.operand).childType(); + const offset = std.math.cast(u32, struct_ty.structFieldOffset(index, self.target)) catch { + return self.fail("Field type '{}' too big to fit into stack frame", .{ + struct_ty.structFieldType(index), + }); + }; + return structFieldPtr(struct_ptr, offset); } -fn structFieldPtr(struct_ptr: WValue, index: u32) InnerError!WValue { - return WValue{ .local = struct_ptr.multi_value.index + index }; + +fn structFieldPtr(struct_ptr: WValue, offset: u32) InnerError!WValue { + return WValue{ .local_with_offset = .{ .local = struct_ptr.local, .offset = offset } }; } fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) InnerError!WValue { if (self.liveness.isUnused(inst)) return WValue.none; const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; - const extra = self.air.extraData(Air.StructField, ty_pl.payload).data; - const struct_multivalue = self.resolveInst(extra.struct_operand).multi_value; - return WValue{ .local = struct_multivalue.index + extra.field_index }; + const struct_field = self.air.extraData(Air.StructField, ty_pl.payload).data; + const struct_ty = self.air.typeOf(struct_field.struct_operand); + const operand = self.resolveInst(struct_field.struct_operand); + const field_index = struct_field.field_index; + const field_ty = struct_ty.structFieldType(field_index); + if (!field_ty.hasCodeGenBits()) return WValue.none; + const offset = std.math.cast(u32, struct_ty.structFieldOffset(field_index, self.target)) catch { + return self.fail("Field type '{}' too big to fit into stack frame", .{field_ty}); + }; + return try self.load(operand, field_ty, offset); } fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1675,25 +1688,32 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { fn airIsErr(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError!WValue { const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = self.resolveInst(un_op); - const offset = self.mir_instructions.len; + const err_ty = self.air.typeOf(un_op).errorUnionSet(); + + // load the error tag value + try self.emitWValue(operand); + const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 0 }); + try self.addInst(.{ + .tag = .i32_load, + .data = .{ .payload = mem_arg_index }, + }); - // load the error value which is positioned at multi_value's index - try self.emitWValue(.{ .local = operand.multi_value.index }); // Compare the error value with '0' try self.addImm32(0); try self.addTag(Mir.Inst.Tag.fromOpcode(opcode)); - return WValue{ .mir_offset = offset }; + const is_err_tmp = try self.allocLocal(err_ty); + try self.addLabel(.local_set, is_err_tmp.local); + return is_err_tmp; } fn airUnwrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = self.resolveInst(ty_op.operand); - // The index of multi_value contains the error code. To get the initial index of the payload we get - // the following index. Next, convert it to a `WValue.local` - // - // TODO: Check if payload is a type that requires a multi_value as well and emit that instead. i.e. a struct. - return WValue{ .local = operand.multi_value.index + 1 }; + const err_ty = self.air.typeOf(ty_op.operand); + const offset = @intCast(u32, err_ty.errorUnionSet().abiSize(self.target) / 8); + + return self.load(operand, self.air.getRefType(ty_op.ty), offset); } fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1728,8 +1748,8 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError! const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = self.resolveInst(un_op); - // load the null value which is positioned at multi_value's index - try self.emitWValue(.{ .local = operand.multi_value.index }); + // load the null value which is positioned at local_with_offset's index + try self.emitWValue(.{ .local = operand.local_with_offset.local }); try self.addImm32(0); try self.addTag(Mir.Inst.Tag.fromOpcode(opcode)); @@ -1743,7 +1763,7 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError! fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = self.resolveInst(ty_op.operand); - return WValue{ .local = operand.multi_value.index + 1 }; + return WValue{ .local = operand.local_with_offset.local + 1 }; } fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue { diff --git a/src/arch/wasm/Emit.zig b/src/arch/wasm/Emit.zig index 0b09a889f3..d70f0c5ee5 100644 --- a/src/arch/wasm/Emit.zig +++ b/src/arch/wasm/Emit.zig @@ -60,8 +60,30 @@ pub fn emitMir(emit: *Emit) InnerError!void { // memory instructions .i32_load => try emit.emitMemArg(tag, inst), + .i64_load => try emit.emitMemArg(tag, inst), + .f32_load => try emit.emitMemArg(tag, inst), + .f64_load => try emit.emitMemArg(tag, inst), + .i32_load8_s => try emit.emitMemArg(tag, inst), + .i32_load8_u => try emit.emitMemArg(tag, inst), + .i32_load16_s => try emit.emitMemArg(tag, inst), + .i32_load16_u => try emit.emitMemArg(tag, inst), + .i64_load8_s => try emit.emitMemArg(tag, inst), + .i64_load8_u => try emit.emitMemArg(tag, inst), + .i64_load16_s => try emit.emitMemArg(tag, inst), + .i64_load16_u => try emit.emitMemArg(tag, inst), + .i64_load32_s => try emit.emitMemArg(tag, inst), + .i64_load32_u => try emit.emitMemArg(tag, inst), .i32_store => try emit.emitMemArg(tag, inst), + .i64_store => try emit.emitMemArg(tag, inst), + .f32_store => try emit.emitMemArg(tag, inst), + .f64_store => try emit.emitMemArg(tag, inst), + .i32_store8 => try emit.emitMemArg(tag, inst), + .i32_store16 => try emit.emitMemArg(tag, inst), + .i64_store8 => try emit.emitMemArg(tag, inst), + .i64_store16 => try emit.emitMemArg(tag, inst), + .i64_store32 => try emit.emitMemArg(tag, inst), + // Instructions with an index that do not require relocations .local_get => try emit.emitLabel(tag, inst), .local_set => try emit.emitLabel(tag, inst), .local_tee => try emit.emitLabel(tag, inst), diff --git a/src/arch/wasm/Mir.zig b/src/arch/wasm/Mir.zig index 5e20b64d39..dbe894f212 100644 --- a/src/arch/wasm/Mir.zig +++ b/src/arch/wasm/Mir.zig @@ -97,11 +97,125 @@ pub const Inst = struct { /// /// Uses `payload` of type `MemArg`. i32_load = 0x28, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i64_load = 0x29, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + f32_load = 0x2A, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + f64_load = 0x2B, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i32_load8_s = 0x2C, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i32_load8_u = 0x2D, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i32_load16_s = 0x2E, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i32_load16_u = 0x2F, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i64_load8_s = 0x30, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i64_load8_u = 0x31, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i64_load16_s = 0x32, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i64_load16_u = 0x33, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i64_load32_s = 0x34, + /// Loads a value from memory onto the stack, based on the signedness + /// and bitsize of the type. + /// + /// Uses `payload` with type `MemArg` + i64_load32_u = 0x35, /// Pops 2 values from the stack, where the first value represents the value to write into memory /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. /// /// Uses `payload` of type `MemArg`. i32_store = 0x36, + /// Pops 2 values from the stack, where the first value represents the value to write into memory + /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. + /// + /// Uses `Payload` with type `MemArg` + i64_store = 0x37, + /// Pops 2 values from the stack, where the first value represents the value to write into memory + /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. + /// + /// Uses `Payload` with type `MemArg` + f32_store = 0x38, + /// Pops 2 values from the stack, where the first value represents the value to write into memory + /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. + /// + /// Uses `Payload` with type `MemArg` + f64_store = 0x39, + /// Pops 2 values from the stack, where the first value represents the value to write into memory + /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. + /// + /// Uses `Payload` with type `MemArg` + i32_store8 = 0x3A, + /// Pops 2 values from the stack, where the first value represents the value to write into memory + /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. + /// + /// Uses `Payload` with type `MemArg` + i32_store16 = 0x3B, + /// Pops 2 values from the stack, where the first value represents the value to write into memory + /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. + /// + /// Uses `Payload` with type `MemArg` + i64_store8 = 0x3C, + /// Pops 2 values from the stack, where the first value represents the value to write into memory + /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. + /// + /// Uses `Payload` with type `MemArg` + i64_store16 = 0x3D, + /// Pops 2 values from the stack, where the first value represents the value to write into memory + /// and the second value represents the offset into memory where the value must be written to. + /// This opcode is typed and expects the stack value's type to be equal to this opcode's type. + /// + /// Uses `Payload` with type `MemArg` + i64_store32 = 0x3E, /// Returns the memory size in amount of pages. /// /// Uses `nop` @@ -247,7 +361,7 @@ pub const Inst = struct { /// From a given wasm opcode, returns a MIR tag. pub fn fromOpcode(opcode: std.wasm.Opcode) Tag { - return @intToEnum(Tag, @enumToInt(opcode)); + return @intToEnum(Tag, @enumToInt(opcode)); // Given `Opcode` is not present as a tag for MIR yet } /// Returns a wasm opcode from a given MIR tag. From 460b3d39eae8d294efbe2e5762ea38c93c352d63 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Thu, 18 Nov 2021 22:24:19 +0100 Subject: [PATCH 4/6] wasm: Implement error unions as well as returning them This implements basic calling convention resolving. This means that for types such as an error union, we will now allocate space on the stack to store the result. This result will then be saved in a temporary local at the callsite. --- src/arch/wasm/CodeGen.zig | 132 ++++++++++++++++++++++++++++++-------- 1 file changed, 107 insertions(+), 25 deletions(-) diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index 175a0b6b71..50085bc378 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -514,6 +514,9 @@ func_type_data: ArrayList(u8), /// NOTE: arguments share the index with locals therefore the first variable /// will have the index that comes after the last argument's index local_index: u32 = 0, +/// The index of the current argument. +/// Used to track which argument is being referenced in `airArg`. +arg_index: u32 = 0, /// If codegen fails, an error messages will be allocated and saved in `err_msg` err_msg: *Module.ErrorMsg, /// Current block depth. Used to calculate the relative difference between a break @@ -537,6 +540,13 @@ mir_extra: std.ArrayListUnmanaged(u32) = .{}, /// When a function is executing, we store the the current stack pointer's value within this local. /// This value is then used to restore the stack pointer to the original value at the return of the function. initial_stack_value: WValue = .none, +/// Arguments of this function declaration +/// This will be set after `resolveCallingConventionValues` +args: []WValue = undefined, +/// This will only be `.none` if the function returns void, or returns an immediate. +/// When it returns a pointer to the stack, the `.local` tag will be active and must be populated +/// before this function returns its execution to the caller. +return_value: WValue = .none, const InnerError = error{ OutOfMemory, @@ -736,17 +746,8 @@ fn genFunctype(self: *Self) InnerError!void { .Void, .NoReturn => try leb.writeULEB128(writer, @as(u32, 0)), .Struct => return self.fail("TODO: Implement struct as return type for wasm", .{}), .Optional => return self.fail("TODO: Implement optionals as return type for wasm", .{}), - .ErrorUnion => { - const val_type = try self.genValtype(return_type.errorUnionPayload()); - - // write down the amount of return values - try leb.writeULEB128(writer, @as(u32, 2)); - try writer.writeByte(wasm.valtype(.i32)); // error code is always an i32 integer. - try writer.writeByte(val_type); - }, else => { try leb.writeULEB128(writer, @as(u32, 1)); - // Can we maybe get the source index of the return type? const val_type = try self.genValtype(return_type); try writer.writeByte(val_type); }, @@ -757,6 +758,12 @@ pub fn genFunc(self: *Self) InnerError!Result { try self.genFunctype(); // TODO: check for and handle death of instructions + var cc_result = try self.resolveCallingConventionValues(self.decl.ty); + defer cc_result.deinit(self.gpa); + + self.args = cc_result.args; + self.return_value = cc_result.return_value; + // Generate MIR for function body try self.genBody(self.air.getMainBody()); // End of function body @@ -836,9 +843,67 @@ pub fn gen(self: *Self, ty: Type, val: Value) InnerError!Result { } } +const CallWValues = struct { + args: []WValue, + return_value: WValue, + + fn deinit(self: *CallWValues, gpa: *Allocator) void { + gpa.free(self.args); + self.* = undefined; + } +}; + +fn resolveCallingConventionValues(self: *Self, fn_ty: Type) InnerError!CallWValues { + const cc = fn_ty.fnCallingConvention(); + const param_types = try self.gpa.alloc(Type, fn_ty.fnParamLen()); + defer self.gpa.free(param_types); + fn_ty.fnParamTypes(param_types); + var result: CallWValues = .{ + .args = try self.gpa.alloc(WValue, param_types.len), + .return_value = .none, + }; + errdefer self.gpa.free(result.args); + switch (cc) { + .Naked => return result, + .Unspecified, .C => { + for (param_types) |ty, ty_index| { + if (!ty.hasCodeGenBits()) { + result.args[ty_index] = .{ .none = {} }; + continue; + } + + result.args[ty_index] = .{ .local = self.local_index }; + self.local_index += 1; + } + + const ret_ty = fn_ty.fnReturnType(); + switch (ret_ty.zigTypeTag()) { + .ErrorUnion => result.return_value = try self.allocLocal(Type.initTag(.i32)), + .Int, .Float, .Bool, .Void, .NoReturn => {}, + else => return self.fail("TODO: Implement function return type {}", .{ret_ty}), + } + + // Check if we store the result as a pointer to the stack rather than + // by value + if (result.return_value != .none) { + if (self.initial_stack_value == .none) try self.initializeStack(); + const offset = std.math.cast(u32, ret_ty.abiSize(self.target)) catch { + return self.fail("Return type '{}' too big for stack frame", .{ret_ty}); + }; + + try self.moveStack(offset, result.return_value.local); + } + }, + else => return self.fail("TODO implement function parameters for cc '{}' on wasm", .{cc}), + } + return result; +} + /// Retrieves the stack pointer's value from the global variable and stores /// it in a local +/// Asserts `initial_stack_value` is `.none` fn initializeStack(self: *Self) !void { + assert(self.initial_stack_value == .none); // reserve space for immediate value // get stack pointer global // TODO: For now, we hardcode the stack pointer to index '0', @@ -917,8 +982,8 @@ fn genInst(self: *Self, inst: Air.Inst.Index) !WValue { .dbg_stmt => WValue.none, .intcast => self.airIntcast(inst), - .is_err => self.airIsErr(inst, .i32_ne), - .is_non_err => self.airIsErr(inst, .i32_eq), + .is_err => self.airIsErr(inst, .i32_eq), + .is_non_err => self.airIsErr(inst, .i32_ne), .is_null => self.airIsNull(inst, .i32_ne), .is_non_null => self.airIsNull(inst, .i32_eq), @@ -960,7 +1025,14 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { fn airRet(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = self.resolveInst(un_op); - try self.emitWValue(operand); + // result must be stored in the stack and we return a pointer + // to the stack instead + if (self.return_value != .none) { + try self.store(self.return_value, operand, self.decl.ty.fnReturnType(), 0); + try self.emitWValue(self.return_value); + } else { + try self.emitWValue(operand); + } try self.restoreStackPointer(); try self.addTag(.@"return"); return .none; @@ -988,7 +1060,16 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } try self.addLabel(.call, target.link.wasm.symbol_index); - return .none; + + const ret_ty = target.ty.fnReturnType(); + switch (ret_ty.zigTypeTag()) { + .ErrorUnion => { + const result_local = try self.allocLocal(ret_ty); + try self.addLabel(.local_set, result_local.local); + return result_local; + }, + else => return WValue.none, + } } fn airAlloc(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1021,6 +1102,11 @@ fn airStore(self: *Self, inst: Air.Inst.Index) InnerError!WValue { else => 0, }; + try self.store(lhs, rhs, ty, offset); + return .none; +} + +fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerError!void { switch (ty.zigTypeTag()) { .ErrorUnion, .Optional => { var buf: Type.Payload.ElemType = undefined; @@ -1039,22 +1125,18 @@ fn airStore(self: *Self, inst: Air.Inst.Index) InnerError!WValue { try self.addLabel(.local_set, tag_local.local); try self.store(lhs, tag_local, tag_ty, 0); - try self.store(lhs, payload_local, payload_ty, payload_offset); - } else if (offset == 0) { - // tag is being set - try self.store(lhs, rhs, tag_ty, 0); + return try self.store(lhs, payload_local, payload_ty, payload_offset); } else { - // payload is being set - try self.store(lhs, rhs, payload_ty, payload_offset); + // Load values from `rhs` stack position and store in `lhs` instead + const tag_local = try self.load(rhs, tag_ty, 0); + const payload_local = try self.load(rhs, payload_ty, payload_offset); + + try self.store(lhs, tag_local, tag_ty, 0); + return try self.store(lhs, payload_local, payload_ty, payload_offset); } }, - else => try self.store(lhs, rhs, ty, offset), + else => {}, } - - return .none; -} - -fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerError!void { try self.emitWValue(lhs); try self.emitWValue(rhs); const valtype = try self.typeToValtype(ty); From ec5220405b0218f892f6a1636ddd04d791017309 Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Fri, 19 Nov 2021 22:15:53 +0100 Subject: [PATCH 5/6] wasm: Implement optionals and ensure correct alignment Rather than writing the alignment in its natural form, wasm binaries encode the alignment of types as the exponent of a power of 2. So rather than performing this encoding during AIR->MIR, we do this while emitting MIR->binary encoding. This allows us to keep alignment logic to its natural form while doing calculations (Which is what we need during linking as well). We also implement optionals and pointers to an optional. --- src/arch/wasm/CodeGen.zig | 78 ++++++++++++++++++++++++++++----------- src/arch/wasm/Emit.zig | 5 ++- 2 files changed, 61 insertions(+), 22 deletions(-) diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index 50085bc378..6d25393c4f 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -211,7 +211,8 @@ fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode { 32 => switch (args.valtype1.?) { .i64 => return .i64_store32, .i32 => return .i32_store, - .f32, .f64 => unreachable, + .f32 => return .f32_store, + .f64 => unreachable, }, 64 => switch (args.valtype1.?) { .i64 => return .i64_store, @@ -680,6 +681,7 @@ fn typeToValtype(self: *Self, ty: Type) InnerError!wasm.Valtype { .ErrorSet, .Struct, .ErrorUnion, + .Optional, => wasm.Valtype.i32, else => self.fail("TODO - Wasm valtype for type '{}'", .{ty}), }; @@ -878,7 +880,7 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) InnerError!CallWValu const ret_ty = fn_ty.fnReturnType(); switch (ret_ty.zigTypeTag()) { - .ErrorUnion => result.return_value = try self.allocLocal(Type.initTag(.i32)), + .ErrorUnion, .Optional => result.return_value = try self.allocLocal(Type.initTag(.i32)), .Int, .Float, .Bool, .Void, .NoReturn => {}, else => return self.fail("TODO: Implement function return type {}", .{ret_ty}), } @@ -1063,7 +1065,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ret_ty = target.ty.fnReturnType(); switch (ret_ty.zigTypeTag()) { - .ErrorUnion => { + .ErrorUnion, .Optional => { const result_local = try self.allocLocal(ret_ty); try self.addLabel(.local_set, result_local.local); return result_local; @@ -1111,14 +1113,15 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro .ErrorUnion, .Optional => { var buf: Type.Payload.ElemType = undefined; const payload_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionPayload() else ty.optionalChild(&buf); - const tag_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionSet() else Type.initTag(.i8); - const payload_offset = @intCast(u32, tag_ty.abiSize(self.target) / 8); + const tag_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionSet() else Type.initTag(.u8); + const payload_offset = @intCast(u32, tag_ty.abiSize(self.target)); + if (rhs == .constant) { // constant will contain both tag and payload, // so save those in 2 temporary locals before storing them // in memory try self.emitWValue(rhs); - const tag_local = try self.allocLocal(Type.initTag(.i32)); + const tag_local = try self.allocLocal(tag_ty); const payload_local = try self.allocLocal(payload_ty); try self.addLabel(.local_set, payload_local.local); @@ -1147,8 +1150,14 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro }); // store rhs value at stack pointer's location in memory - const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = offset, .alignment = 0 }); - try self.addInst(.{ .tag = Mir.Inst.Tag.fromOpcode(opcode), .data = .{ .payload = mem_arg_index } }); + const mem_arg_index = try self.addExtra(Mir.MemArg{ + .offset = offset, + .alignment = ty.abiAlignment(self.target), + }); + try self.addInst(.{ + .tag = Mir.Inst.Tag.fromOpcode(opcode), + .data = .{ .payload = mem_arg_index }, + }); } fn airLoad(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1157,8 +1166,11 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty = self.air.getRefType(ty_op.ty); return switch (ty.zigTypeTag()) { - .Struct, .ErrorUnion => operand, - else => self.load(operand, ty, 0), + .Struct, .ErrorUnion, .Optional => operand, // pass as pointer + else => switch (operand) { + .local_with_offset => |with_offset| try self.load(operand, ty, with_offset.offset), + else => try self.load(operand, ty, 0), + }, }; } @@ -1174,7 +1186,10 @@ fn load(self: *Self, operand: WValue, ty: Type, offset: u32) InnerError!WValue { .signedness = signedness, }); - const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = offset, .alignment = 0 }); + const mem_arg_index = try self.addExtra(Mir.MemArg{ + .offset = offset, + .alignment = ty.abiAlignment(self.target), + }); try self.addInst(.{ .tag = Mir.Inst.Tag.fromOpcode(opcode), .data = .{ .payload = mem_arg_index }, @@ -1301,7 +1316,7 @@ fn emitConstant(self: *Self, val: Value, ty: Type) InnerError!void { // memory instruction followed by their memarg immediate // memarg ::== x:u32, y:u32 => {align x, offset y} - const extra_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 0 }); + const extra_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 4 }); try self.addInst(.{ .tag = .i32_load, .data = .{ .payload = extra_index } }); } else return self.fail("Wasm TODO: emitConstant for other const pointer tag {s}", .{val.tag()}); }, @@ -1774,7 +1789,10 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError!W // load the error tag value try self.emitWValue(operand); - const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 0 }); + const mem_arg_index = try self.addExtra(Mir.MemArg{ + .offset = 0, + .alignment = err_ty.abiAlignment(self.target), + }); try self.addInst(.{ .tag = .i32_load, .data = .{ .payload = mem_arg_index }, @@ -1830,22 +1848,40 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError! const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = self.resolveInst(un_op); - // load the null value which is positioned at local_with_offset's index - try self.emitWValue(.{ .local = operand.local_with_offset.local }); + // load the null tag value + try self.emitWValue(operand); + const mem_arg_index = try self.addExtra(Mir.MemArg{ .offset = 0, .alignment = 1 }); + try self.addInst(.{ + .tag = .i32_load8_u, + .data = .{ .payload = mem_arg_index }, + }); + + // Compare the error value with '0' try self.addImm32(0); try self.addTag(Mir.Inst.Tag.fromOpcode(opcode)); - // we save the result in a new local - const local = try self.allocLocal(Type.initTag(.i32)); - try self.addLabel(.local_set, local.local); - - return local; + const is_null_tmp = try self.allocLocal(Type.initTag(.u8)); + try self.addLabel(.local_set, is_null_tmp.local); + return is_null_tmp; } fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = self.resolveInst(ty_op.operand); - return WValue{ .local = operand.local_with_offset.local + 1 }; + const opt_ty = self.air.typeOf(ty_op.operand); + + // For pointers we simply return its stack address, rather than + // loading its value + if (opt_ty.zigTypeTag() == .Pointer) { + return WValue{ .local_with_offset = .{ .local = operand.local, .offset = 1 } }; + } + + if (opt_ty.isPtrLikeOptional()) return operand; + + var buf: Type.Payload.ElemType = undefined; + const child_ty = opt_ty.optionalChild(&buf); + + return self.load(operand, child_ty, @as(u32, 1)); // null tag is 1 byte } fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue { diff --git a/src/arch/wasm/Emit.zig b/src/arch/wasm/Emit.zig index d70f0c5ee5..9506cef277 100644 --- a/src/arch/wasm/Emit.zig +++ b/src/arch/wasm/Emit.zig @@ -251,7 +251,10 @@ fn emitMemArg(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) !void { const extra_index = emit.mir.instructions.items(.data)[inst].payload; const mem_arg = emit.mir.extraData(Mir.MemArg, extra_index).data; try emit.code.append(@enumToInt(tag)); - try leb128.writeULEB128(emit.code.writer(), mem_arg.alignment); + + // wasm encodes alignment as power of 2, rather than natural alignment + const encoded_alignment = mem_arg.alignment >> 1; + try leb128.writeULEB128(emit.code.writer(), encoded_alignment); try leb128.writeULEB128(emit.code.writer(), mem_arg.offset); } From deb8d0765b46f75546f4342ad9078dbe6ad7b9be Mon Sep 17 00:00:00 2001 From: Luuk de Gram Date: Sat, 20 Nov 2021 21:28:39 +0100 Subject: [PATCH 6/6] wasm: Fix text cases and add pointer test cases Ensure all previous test cases are still passing, as well as add some basic tests for now for testing pointers to the stack. This means we can start implementing wasm's C ABI found at: https://github.com/WebAssembly/tool-conventions/blob/main/BasicCABI.md We also simplified the block logic by always using 'void' block types and instead writing the value to a local, which can then be referenced by continues instructions, as done currently by AIR. Besides this, we also no longer need to insert blocks at an offset, as we simply write the saved temporary after we create the block. --- src/arch/wasm/CodeGen.zig | 165 ++++++++++++++++++++++++++------------ test/stage2/wasm.zig | 35 ++++++++ 2 files changed, 150 insertions(+), 50 deletions(-) diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index 6d25393c4f..831f7be34a 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -187,7 +187,13 @@ fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode { 32 => switch (args.valtype1.?) { .i64 => if (args.signedness.? == .signed) return .i64_load32_s else return .i64_load32_u, .i32 => return .i32_load, - .f32, .f64 => unreachable, + .f32 => return .f32_load, + .f64 => unreachable, + }, + 64 => switch (args.valtype1.?) { + .i64 => return .i64_load, + .f64 => return .f64_load, + else => unreachable, }, else => unreachable, } else switch (args.valtype1.?) { @@ -216,6 +222,7 @@ fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode { }, 64 => switch (args.valtype1.?) { .i64 => return .i64_store, + .f64 => return .f64_store, else => unreachable, }, else => unreachable, @@ -505,7 +512,10 @@ gpa: *mem.Allocator, /// Table to save `WValue`'s generated by an `Air.Inst` values: ValueTable, /// Mapping from Air.Inst.Index to block ids -blocks: std.AutoArrayHashMapUnmanaged(Air.Inst.Index, u32) = .{}, +blocks: std.AutoArrayHashMapUnmanaged(Air.Inst.Index, struct { + label: u32, + value: WValue, +}) = .{}, /// `bytes` contains the wasm bytecode belonging to the 'code' section. code: ArrayList(u8), /// Contains the generated function type bytecode for the current function @@ -984,8 +994,8 @@ fn genInst(self: *Self, inst: Air.Inst.Index) !WValue { .dbg_stmt => WValue.none, .intcast => self.airIntcast(inst), - .is_err => self.airIsErr(inst, .i32_eq), - .is_non_err => self.airIsErr(inst, .i32_ne), + .is_err => self.airIsErr(inst, .i32_ne), + .is_non_err => self.airIsErr(inst, .i32_eq), .is_null => self.airIsNull(inst, .i32_ne), .is_non_null => self.airIsNull(inst, .i32_eq), @@ -1065,12 +1075,12 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ret_ty = target.ty.fnReturnType(); switch (ret_ty.zigTypeTag()) { - .ErrorUnion, .Optional => { + .Void, .NoReturn => return WValue.none, + else => { const result_local = try self.allocLocal(ret_ty); try self.addLabel(.local_set, result_local.local); return result_local; }, - else => return WValue.none, } } @@ -1086,7 +1096,7 @@ fn airAlloc(self: *Self, inst: Air.Inst.Index) InnerError!WValue { if (abi_size == 0) return WValue{ .none = {} }; // local, containing the offset to the stack position - const local = try self.allocLocal(child_type); + const local = try self.allocLocal(Type.initTag(.i32)); // always pointer therefore i32 try self.moveStack(@intCast(u32, abi_size), local.local); return local; @@ -1114,30 +1124,65 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro var buf: Type.Payload.ElemType = undefined; const payload_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionPayload() else ty.optionalChild(&buf); const tag_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionSet() else Type.initTag(.u8); - const payload_offset = @intCast(u32, tag_ty.abiSize(self.target)); + const payload_offset = if (ty.zigTypeTag() == .ErrorUnion) + @intCast(u32, tag_ty.abiSize(self.target)) + else + @intCast(u32, ty.abiSize(self.target) - payload_ty.abiSize(self.target)); - if (rhs == .constant) { - // constant will contain both tag and payload, - // so save those in 2 temporary locals before storing them - // in memory - try self.emitWValue(rhs); - const tag_local = try self.allocLocal(tag_ty); - const payload_local = try self.allocLocal(payload_ty); + switch (rhs) { + .constant => { + // constant will contain both tag and payload, + // so save those in 2 temporary locals before storing them + // in memory + try self.emitWValue(rhs); + const tag_local = try self.allocLocal(tag_ty); + const payload_local = try self.allocLocal(payload_ty); - try self.addLabel(.local_set, payload_local.local); - try self.addLabel(.local_set, tag_local.local); + try self.addLabel(.local_set, payload_local.local); + try self.addLabel(.local_set, tag_local.local); - try self.store(lhs, tag_local, tag_ty, 0); - return try self.store(lhs, payload_local, payload_ty, payload_offset); - } else { - // Load values from `rhs` stack position and store in `lhs` instead - const tag_local = try self.load(rhs, tag_ty, 0); - const payload_local = try self.load(rhs, payload_ty, payload_offset); + try self.store(lhs, tag_local, tag_ty, 0); + return try self.store(lhs, payload_local, payload_ty, payload_offset); + }, + .local => { + // Load values from `rhs` stack position and store in `lhs` instead + const tag_local = try self.load(rhs, tag_ty, 0); + const payload_local = try self.load(rhs, payload_ty, payload_offset); - try self.store(lhs, tag_local, tag_ty, 0); - return try self.store(lhs, payload_local, payload_ty, payload_offset); + try self.store(lhs, tag_local, tag_ty, 0); + return try self.store(lhs, payload_local, payload_ty, payload_offset); + }, + .local_with_offset => |with_offset| { + const tag_local = try self.allocLocal(tag_ty); + try self.addImm32(0); + try self.store(lhs, tag_local, tag_ty, 0); + + return try self.store( + lhs, + .{ .local = with_offset.local }, + payload_ty, + with_offset.offset, + ); + }, + else => unreachable, } }, + .Struct => { + // we are copying a struct with its fields. + // Replace this with a wasm memcpy instruction once we support that feature. + const fields_len = ty.structFieldCount(); + var index: usize = 0; + while (index < fields_len) : (index += 1) { + const field_ty = ty.structFieldType(index); + if (!field_ty.hasCodeGenBits()) continue; + const field_offset = std.math.cast(u32, ty.structFieldOffset(index, self.target)) catch { + return self.fail("Field type '{}' too big to fit into stack frame", .{field_ty}); + }; + const field_local = try self.load(rhs, field_ty, field_offset); + try self.store(lhs, field_local, field_ty, field_offset); + } + return; + }, else => {}, } try self.emitWValue(lhs); @@ -1429,21 +1474,29 @@ fn airBlock(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const extra = self.air.extraData(Air.Block, ty_pl.payload); const body = self.air.extra[extra.end..][0..extra.data.body_len]; - try self.startBlock(.block, block_ty, null); + // if block_ty is non-empty, we create a register to store the temporary value + const block_result: WValue = if (block_ty != wasm.block_empty) + try self.allocLocal(self.air.getRefType(ty_pl.ty)) + else + WValue.none; + + try self.startBlock(.block, wasm.block_empty); // Here we set the current block idx, so breaks know the depth to jump // to when breaking out. - try self.blocks.putNoClobber(self.gpa, inst, self.block_depth); + try self.blocks.putNoClobber(self.gpa, inst, .{ + .label = self.block_depth, + .value = block_result, + }); try self.genBody(body); try self.endBlock(); - return .none; + return block_result; } /// appends a new wasm block to the code section and increases the `block_depth` by 1 -fn startBlock(self: *Self, block_tag: wasm.Opcode, valtype: u8, with_offset: ?usize) !void { +fn startBlock(self: *Self, block_tag: wasm.Opcode, valtype: u8) !void { self.block_depth += 1; - const offset = with_offset orelse self.mir_instructions.len; - try self.addInstAt(offset, .{ + try self.addInst(.{ .tag = Mir.Inst.Tag.fromOpcode(block_tag), .data = .{ .block_type = valtype }, }); @@ -1462,7 +1515,7 @@ fn airLoop(self: *Self, inst: Air.Inst.Index) InnerError!WValue { // result type of loop is always 'noreturn', meaning we can always // emit the wasm type 'block_empty'. - try self.startBlock(.loop, wasm.block_empty, null); + try self.startBlock(.loop, wasm.block_empty); try self.genBody(body); // breaking to the index of a loop block will continue the loop instead @@ -1480,13 +1533,10 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len]; // TODO: Handle death instructions for then and else body - // insert blocks at the position of `offset` so - // the condition can jump to it - const offset = self.mir_instructions.len; - try self.emitWValue(condition); - // result type is always noreturn, so use `block_empty` as type. - try self.startBlock(.block, wasm.block_empty, offset); + try self.startBlock(.block, wasm.block_empty); + // emit the conditional value + try self.emitWValue(condition); // we inserted the block in front of the condition // so now check if condition matches. If not, break outside this block @@ -1539,15 +1589,20 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: std.math.CompareOperator) Inner fn airBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const br = self.air.instructions.items(.data)[inst].br; + const block = self.blocks.get(br.block_inst).?; // if operand has codegen bits we should break with a value if (self.air.typeOf(br.operand).hasCodeGenBits()) { try self.emitWValue(self.resolveInst(br.operand)); + + if (block.value != .none) { + try self.addLabel(.local_set, block.value.local); + } } // We map every block to its block index. // We then determine how far we have to jump to it by subtracting it from current block depth - const idx: u32 = self.block_depth - self.blocks.get(br.block_inst).?; + const idx: u32 = self.block_depth - block.label; try self.addLabel(.br, idx); return .none; @@ -1677,7 +1732,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } case_list.appendAssumeCapacity(.{ .values = values, .body = case_body }); - try self.startBlock(.block, blocktype, null); + try self.startBlock(.block, blocktype); } // When the highest and lowest values are seperated by '50', @@ -1690,7 +1745,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const else_body = self.air.extra[extra_index..][0..switch_br.data.else_body_len]; const has_else_body = else_body.len != 0; if (has_else_body) { - try self.startBlock(.block, blocktype, null); + try self.startBlock(.block, blocktype); } if (!is_sparse) { @@ -1698,7 +1753,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { // The value 'target' represents the index into the table. // Each index in the table represents a label to the branch // to jump to. - try self.startBlock(.block, blocktype, null); + try self.startBlock(.block, blocktype); try self.emitWValue(target); if (lowest < 0) { // since br_table works using indexes, starting from '0', we must ensure all values @@ -1754,7 +1809,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { try self.addLabel(.br_if, 0); } else { // in multi-value prongs we must check if any prongs match the target value. - try self.startBlock(.block, blocktype, null); + try self.startBlock(.block, blocktype); for (case.values) |value| { try self.emitWValue(target); try self.emitConstant(value.value, target_ty); @@ -1794,7 +1849,7 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError!W .alignment = err_ty.abiAlignment(self.target), }); try self.addInst(.{ - .tag = .i32_load, + .tag = .i32_load16_u, .data = .{ .payload = mem_arg_index }, }); @@ -1811,14 +1866,14 @@ fn airUnwrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = self.resolveInst(ty_op.operand); const err_ty = self.air.typeOf(ty_op.operand); - const offset = @intCast(u32, err_ty.errorUnionSet().abiSize(self.target) / 8); - - return self.load(operand, self.air.getRefType(ty_op.ty), offset); + const offset = @intCast(u32, err_ty.errorUnionSet().abiSize(self.target)); + return self.load(operand, err_ty.errorUnionPayload(), offset); } fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; - return self.resolveInst(ty_op.operand); + _ = ty_op; + return self.fail("TODO: wasm airWrapErrUnionPayload", .{}); } fn airIntcast(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1880,8 +1935,9 @@ fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue { var buf: Type.Payload.ElemType = undefined; const child_ty = opt_ty.optionalChild(&buf); + const offset = opt_ty.abiSize(self.target) - child_ty.abiSize(self.target); - return self.load(operand, child_ty, @as(u32, 1)); // null tag is 1 byte + return self.load(operand, child_ty, @intCast(u32, offset)); } fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1893,5 +1949,14 @@ fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue fn airWrapOptional(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_op = self.air.instructions.items(.data)[inst].ty_op; - return self.resolveInst(ty_op.operand); + const operand = self.resolveInst(ty_op.operand); + + const op_ty = self.air.typeOf(ty_op.operand); + const optional_ty = self.air.getRefType(ty_op.ty); + const offset = optional_ty.abiSize(self.target) - op_ty.abiSize(self.target); + + return WValue{ .local_with_offset = .{ + .local = operand.local, + .offset = @intCast(u32, offset), + } }; } diff --git a/test/stage2/wasm.zig b/test/stage2/wasm.zig index ab400f0a53..b28834e9b7 100644 --- a/test/stage2/wasm.zig +++ b/test/stage2/wasm.zig @@ -740,4 +740,39 @@ pub fn addCases(ctx: *TestContext) !void { \\} , "0\n"); } + + { + var case = ctx.exe("wasm pointers", wasi); + + case.addCompareOutput( + \\pub export fn _start() u32 { + \\ var x: u32 = 0; + \\ + \\ foo(&x); + \\ return x; + \\} + \\ + \\fn foo(x: *u32)void { + \\ x.* = 2; + \\} + , "2\n"); + + case.addCompareOutput( + \\pub export fn _start() u32 { + \\ var x: u32 = 0; + \\ + \\ foo(&x); + \\ bar(&x); + \\ return x; + \\} + \\ + \\fn foo(x: *u32)void { + \\ x.* = 2; + \\} + \\ + \\fn bar(x: *u32) void { + \\ x.* += 2; + \\} + , "4\n"); + } }