diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 4af9b01257..78e71730df 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -2437,7 +2437,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { try cg.airArg(inst); - try cg.resetTemps(); + try cg.resetTemps(@enumFromInt(0)); cg.checkInvariantsAfterAirInst(); }, else => break, @@ -2477,7 +2477,6 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .shuffle => try cg.airShuffle(inst), .reduce => try cg.airReduce(inst), .reduce_optimized => try cg.airReduce(inst), - .aggregate_init => try cg.airAggregateInit(inst), // zig fmt: on .arg => if (cg.debug_output != .none) { @@ -80843,6 +80842,74 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { for (ops[1..]) |op| try op.die(cg); try res[0].finish(inst, &.{ty_op.operand}, ops[0..1], cg); }, + .aggregate_init => |air_tag| if (use_old) try cg.airAggregateInit(inst) else fallback: { + const ty_pl = air_datas[@intFromEnum(inst)].ty_pl; + const agg_ty = ty_pl.ty.toType(); + if ((agg_ty.isVector(zcu) and agg_ty.childType(zcu).toIntern() == .bool_type) or + (agg_ty.zigTypeTag(zcu) == .@"struct" and agg_ty.containerLayout(zcu) == .@"packed")) break :fallback try cg.airAggregateInit(inst); + var res = try cg.tempAllocMem(agg_ty); + const reset_index = cg.next_temp_index; + var bt = cg.liveness.iterateBigTomb(inst); + switch (ip.indexToKey(agg_ty.toIntern())) { + inline .array_type, .vector_type => |sequence_type| { + const elems: []const Air.Inst.Ref = @ptrCast(cg.air.extra[ty_pl.payload..][0..@intCast(sequence_type.len)]); + const elem_size = Type.fromInterned(sequence_type.child).abiSize(zcu); + var elem_disp: u31 = 0; + for (elems) |elem_ref| { + var elem = try cg.tempFromOperand(elem_ref, bt.feed()); + try res.write(&elem, .{ .disp = elem_disp }, cg); + try elem.die(cg); + try cg.resetTemps(reset_index); + elem_disp += @intCast(elem_size); + } + if (@hasField(@TypeOf(sequence_type), "sentinel") and sequence_type.sentinel != .none) { + var sentinel = try cg.tempFromValue(.fromInterned(sequence_type.sentinel)); + try res.write(&sentinel, .{ .disp = elem_disp }, cg); + try sentinel.die(cg); + } + }, + .struct_type => { + const loaded_struct = ip.loadStructType(agg_ty.toIntern()); + const elems: []const Air.Inst.Ref = @ptrCast(cg.air.extra[ty_pl.payload..][0..loaded_struct.field_types.len]); + switch (loaded_struct.layout) { + .auto, .@"extern" => { + for (elems, 0..) |elem_ref, field_index| { + const elem_dies = bt.feed(); + if (loaded_struct.fieldIsComptime(ip, field_index)) continue; + var elem = try cg.tempFromOperand(elem_ref, elem_dies); + try res.write(&elem, .{ .disp = @intCast(loaded_struct.offsets.get(ip)[field_index]) }, cg); + try elem.die(cg); + try cg.resetTemps(reset_index); + } + }, + .@"packed" => return cg.fail("failed to select {s} {}", .{ + @tagName(air_tag), + agg_ty.fmt(pt), + }), + } + }, + .tuple_type => |tuple_type| { + const elems: []const Air.Inst.Ref = @ptrCast(cg.air.extra[ty_pl.payload..][0..tuple_type.types.len]); + var elem_disp: u31 = 0; + for (elems, 0..) |elem_ref, field_index| { + const elem_dies = bt.feed(); + if (tuple_type.values.get(ip)[field_index] != .none) continue; + const field_type = Type.fromInterned(tuple_type.types.get(ip)[field_index]); + elem_disp = @intCast(field_type.abiAlignment(zcu).forward(elem_disp)); + var elem = try cg.tempFromOperand(elem_ref, elem_dies); + try res.write(&elem, .{ .disp = elem_disp }, cg); + try elem.die(cg); + try cg.resetTemps(reset_index); + elem_disp += @intCast(field_type.abiSize(zcu)); + } + }, + else => return cg.fail("failed to select {s} {}", .{ + @tagName(air_tag), + agg_ty.fmt(pt), + }), + } + try res.finish(inst, &.{}, &.{}, cg); + }, .union_init => if (use_old) try cg.airUnionInit(inst) else { const ty_pl = air_datas[@intFromEnum(inst)].ty_pl; const extra = cg.air.extraData(Air.UnionInit, ty_pl.payload).data; @@ -82199,14 +82266,14 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .c_va_start => try cg.airVaStart(inst), .work_item_id, .work_group_size, .work_group_id => unreachable, } - try cg.resetTemps(); + try cg.resetTemps(@enumFromInt(0)); cg.checkInvariantsAfterAirInst(); } verbose_tracking_log.debug("{}", .{cg.fmtTracking()}); } -fn genLazy(self: *CodeGen, lazy_sym: link.File.LazySymbol) InnerError!void { - const pt = self.pt; +fn genLazy(cg: *CodeGen, lazy_sym: link.File.LazySymbol) InnerError!void { + const pt = cg.pt; const zcu = pt.zcu; const ip = &zcu.intern_pool; switch (ip.indexToKey(lazy_sym.ty)) { @@ -82215,97 +82282,98 @@ fn genLazy(self: *CodeGen, lazy_sym: link.File.LazySymbol) InnerError!void { wip_mir_log.debug("{}.@tagName:", .{enum_ty.fmt(pt)}); const param_regs = abi.getCAbiIntParamRegs(.auto); - const param_locks = self.register_manager.lockRegsAssumeUnused(2, param_regs[0..2].*); - defer for (param_locks) |lock| self.register_manager.unlockReg(lock); + const param_locks = cg.register_manager.lockRegsAssumeUnused(2, param_regs[0..2].*); + defer for (param_locks) |lock| cg.register_manager.unlockReg(lock); const ret_mcv: MCValue = .{ .register_pair = param_regs[0..2].* }; - const enum_mcv: MCValue = .{ .register = param_regs[0] }; + var enum_temp = try cg.tempInit(enum_ty, .{ .register = param_regs[0] }); - const data_reg = try self.register_manager.allocReg(null, abi.RegisterClass.gp); - const data_lock = self.register_manager.lockRegAssumeUnused(data_reg); - defer self.register_manager.unlockReg(data_lock); - try self.genLazySymbolRef(.lea, data_reg, .{ .kind = .const_data, .ty = lazy_sym.ty }); + const data_reg = try cg.register_manager.allocReg(null, abi.RegisterClass.gp); + const data_lock = cg.register_manager.lockRegAssumeUnused(data_reg); + defer cg.register_manager.unlockReg(data_lock); + try cg.genLazySymbolRef(.lea, data_reg, .{ .kind = .const_data, .ty = lazy_sym.ty }); var data_off: i32 = 0; + const reset_index = cg.next_temp_index; const tag_names = ip.loadEnumType(lazy_sym.ty).names; for (0..tag_names.len) |tag_index| { - var enum_temp = try self.tempInit(enum_ty, enum_mcv); - const tag_name_len = tag_names.get(ip)[tag_index].length(ip); - var tag_temp = try self.tempFromValue(try pt.enumValueFieldIndex(enum_ty, @intCast(tag_index))); - const cc_temp = enum_temp.cmpInts(.neq, &tag_temp, self) catch |err| switch (err) { + var tag_temp = try cg.tempFromValue(try pt.enumValueFieldIndex(enum_ty, @intCast(tag_index))); + const cc_temp = enum_temp.cmpInts(.neq, &tag_temp, cg) catch |err| switch (err) { error.SelectFailed => unreachable, else => |e| return e, }; - try enum_temp.die(self); - try tag_temp.die(self); - const skip_reloc = try self.asmJccReloc(cc_temp.tracking(self).short.eflags, undefined); - try cc_temp.die(self); - try self.resetTemps(); + try tag_temp.die(cg); + const skip_reloc = try cg.asmJccReloc(cc_temp.tracking(cg).short.eflags, undefined); + try cc_temp.die(cg); + try cg.resetTemps(reset_index); - try self.genSetReg( + try cg.genSetReg( ret_mcv.register_pair[0], .usize, .{ .register_offset = .{ .reg = data_reg, .off = data_off } }, .{}, ); - try self.genSetReg(ret_mcv.register_pair[1], .usize, .{ .immediate = tag_name_len }, .{}); - try self.asmOpOnly(.{ ._, .ret }); + try cg.genSetReg(ret_mcv.register_pair[1], .usize, .{ .immediate = tag_name_len }, .{}); + try cg.asmOpOnly(.{ ._, .ret }); - self.performReloc(skip_reloc); + cg.performReloc(skip_reloc); data_off += @intCast(tag_name_len + 1); } + try enum_temp.die(cg); - try self.genSetReg(ret_mcv.register_pair[0], .usize, .{ .immediate = 0 }, .{}); - try self.asmOpOnly(.{ ._, .ret }); + try cg.genSetReg(ret_mcv.register_pair[0], .usize, .{ .immediate = 0 }, .{}); + try cg.asmOpOnly(.{ ._, .ret }); }, .error_set_type => |error_set_type| { const err_ty: Type = .fromInterned(lazy_sym.ty); wip_mir_log.debug("{}.@errorCast:", .{err_ty.fmt(pt)}); const param_regs = abi.getCAbiIntParamRegs(.auto); - const param_locks = self.register_manager.lockRegsAssumeUnused(2, param_regs[0..2].*); - defer for (param_locks) |lock| self.register_manager.unlockReg(lock); + const param_locks = cg.register_manager.lockRegsAssumeUnused(2, param_regs[0..2].*); + defer for (param_locks) |lock| cg.register_manager.unlockReg(lock); const ret_mcv: MCValue = .{ .register = param_regs[0] }; const err_mcv: MCValue = .{ .register = param_regs[0] }; + var err_temp = try cg.tempInit(err_ty, err_mcv); const ExpectedContents = [32]Mir.Inst.Index; var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) = - std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa); + std.heap.stackFallback(@sizeOf(ExpectedContents), cg.gpa); const allocator = stack.get(); const relocs = try allocator.alloc(Mir.Inst.Index, error_set_type.names.len); defer allocator.free(relocs); + const reset_index = cg.next_temp_index; for (0.., relocs) |tag_index, *reloc| { - var err_temp = try self.tempInit(err_ty, err_mcv); - - var tag_temp = try self.tempInit(.anyerror, .{ + var tag_temp = try cg.tempInit(.anyerror, .{ .immediate = ip.getErrorValueIfExists(error_set_type.names.get(ip)[tag_index]).?, }); - const cc_temp = err_temp.cmpInts(.eq, &tag_temp, self) catch |err| switch (err) { + const cc_temp = err_temp.cmpInts(.eq, &tag_temp, cg) catch |err| switch (err) { error.SelectFailed => unreachable, else => |e| return e, }; - try err_temp.die(self); - try tag_temp.die(self); - reloc.* = try self.asmJccReloc(cc_temp.tracking(self).short.eflags, undefined); - try cc_temp.die(self); - try self.resetTemps(); + try tag_temp.die(cg); + reloc.* = try cg.asmJccReloc(cc_temp.tracking(cg).short.eflags, undefined); + try cc_temp.die(cg); + try cg.resetTemps(reset_index); } + try err_temp.die(cg); - try self.genCopy(.usize, ret_mcv, .{ .immediate = 0 }, .{}); - for (relocs) |reloc| self.performReloc(reloc); + try cg.genCopy(.usize, ret_mcv, .{ .immediate = 0 }, .{}); + for (relocs) |reloc| cg.performReloc(reloc); assert(ret_mcv.register == err_mcv.register); - try self.asmOpOnly(.{ ._, .ret }); + try cg.asmOpOnly(.{ ._, .ret }); }, - else => return self.fail( + else => return cg.fail( "TODO implement {s} for {}", .{ @tagName(lazy_sym.kind), Type.fromInterned(lazy_sym.ty).fmt(pt) }, ), } + try cg.resetTemps(@enumFromInt(0)); + cg.checkInvariantsAfterAirInst(); } fn getValue(self: *CodeGen, value: MCValue, inst: ?Air.Inst.Index) !void { @@ -93621,17 +93689,17 @@ fn lowerBlock(self: *CodeGen, inst: Air.Inst.Index, body: []const Air.Inst.Index } fn lowerSwitchBr( - self: *CodeGen, + cg: *CodeGen, inst: Air.Inst.Index, switch_br: Air.UnwrappedSwitch, condition: MCValue, condition_dies: bool, is_loop: bool, ) !void { - const zcu = self.pt.zcu; - const condition_ty = self.typeOf(switch_br.operand); - const condition_int_info = self.intInfo(condition_ty).?; - const condition_int_ty = try self.pt.intType(condition_int_info.signedness, condition_int_info.bits); + const zcu = cg.pt.zcu; + const condition_ty = cg.typeOf(switch_br.operand); + const condition_int_info = cg.intInfo(condition_ty).?; + const condition_int_ty = try cg.pt.intType(condition_int_info.signedness, condition_int_info.bits); const ExpectedContents = extern struct { liveness_deaths: [1 << 8 | 1]Air.Inst.Index, @@ -93639,15 +93707,15 @@ fn lowerSwitchBr( relocs: [1 << 6]Mir.Inst.Index, }; var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) = - std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa); + std.heap.stackFallback(@sizeOf(ExpectedContents), cg.gpa); const allocator = stack.get(); - const state = try self.saveState(); + const state = try cg.saveState(); - const liveness = try self.liveness.getSwitchBr(allocator, inst, switch_br.cases_len + 1); + const liveness = try cg.liveness.getSwitchBr(allocator, inst, switch_br.cases_len + 1); defer allocator.free(liveness.deaths); - if (!self.mod.pic and self.target.ofmt == .elf) table: { + if (!cg.mod.pic and cg.target.ofmt == .elf) table: { var prong_items: u32 = 0; var min: ?Value = null; var max: ?Value = null; @@ -93690,41 +93758,41 @@ fn lowerSwitchBr( if (prong_items < table_len >> 2) break :table; // no more than 75% waste const condition_index = if (condition_dies and condition.isModifiable()) condition else condition_index: { - const condition_index = try self.allocTempRegOrMem(condition_ty, true); - try self.genCopy(condition_ty, condition_index, condition, .{}); + const condition_index = try cg.allocTempRegOrMem(condition_ty, true); + try cg.genCopy(condition_ty, condition_index, condition, .{}); break :condition_index condition_index; }; - try self.spillEflagsIfOccupied(); - if (min.?.orderAgainstZero(zcu).compare(.neq)) try self.genBinOpMir( + try cg.spillEflagsIfOccupied(); + if (min.?.orderAgainstZero(zcu).compare(.neq)) try cg.genBinOpMir( .{ ._, .sub }, condition_ty, condition_index, .{ .air_ref = Air.internedToRef(min.?.toIntern()) }, ); const else_reloc = if (switch_br.else_body_len > 0) else_reloc: { - var cond_temp = try self.tempInit(condition_ty, condition_index); - var table_max_temp = try self.tempFromValue(try self.pt.intValue(condition_int_ty, table_len - 1)); - const cc_temp = cond_temp.cmpInts(.gt, &table_max_temp, self) catch |err| switch (err) { + var cond_temp = try cg.tempInit(condition_ty, condition_index); + var table_max_temp = try cg.tempFromValue(try cg.pt.intValue(condition_int_ty, table_len - 1)); + const cc_temp = cond_temp.cmpInts(.gt, &table_max_temp, cg) catch |err| switch (err) { error.SelectFailed => unreachable, else => |e| return e, }; - try cond_temp.die(self); - try table_max_temp.die(self); - const else_reloc = try self.asmJccReloc(cc_temp.tracking(self).short.eflags, undefined); - try cc_temp.die(self); + try cond_temp.die(cg); + try table_max_temp.die(cg); + const else_reloc = try cg.asmJccReloc(cc_temp.tracking(cg).short.eflags, undefined); + try cc_temp.die(cg); break :else_reloc else_reloc; } else undefined; - const table_start: u31 = @intCast(self.mir_table.items.len); + const table_start: u31 = @intCast(cg.mir_table.items.len); { const condition_index_reg = if (condition_index.isRegister()) condition_index.getReg().? else - try self.copyToTmpRegister(.usize, condition_index); - const condition_index_lock = self.register_manager.lockReg(condition_index_reg); - defer if (condition_index_lock) |lock| self.register_manager.unlockReg(lock); - try self.truncateRegister(condition_ty, condition_index_reg); - const ptr_size = @divExact(self.target.ptrBitWidth(), 8); - try self.asmMemory(.{ ._mp, .j }, .{ + try cg.copyToTmpRegister(.usize, condition_index); + const condition_index_lock = cg.register_manager.lockReg(condition_index_reg); + defer if (condition_index_lock) |lock| cg.register_manager.unlockReg(lock); + try cg.truncateRegister(condition_ty, condition_index_reg); + const ptr_size = @divExact(cg.target.ptrBitWidth(), 8); + try cg.asmMemory(.{ ._mp, .j }, .{ .base = .table, .mod = .{ .rm = .{ .size = .ptr, @@ -93735,32 +93803,32 @@ fn lowerSwitchBr( }); } const else_reloc_marker: u32 = 0; - assert(self.mir_instructions.len > else_reloc_marker); - try self.mir_table.appendNTimes(self.gpa, else_reloc_marker, table_len); - if (is_loop) try self.loop_switches.putNoClobber(self.gpa, inst, .{ + assert(cg.mir_instructions.len > else_reloc_marker); + try cg.mir_table.appendNTimes(cg.gpa, else_reloc_marker, table_len); + if (is_loop) try cg.loop_switches.putNoClobber(cg.gpa, inst, .{ .start = table_start, .len = table_len, .min = min.?, .else_relocs = if (switch_br.else_body_len > 0) .{ .forward = .empty } else .@"unreachable", }); defer if (is_loop) { - var loop_switch_data = self.loop_switches.fetchRemove(inst).?.value; + var loop_switch_data = cg.loop_switches.fetchRemove(inst).?.value; switch (loop_switch_data.else_relocs) { .@"unreachable", .backward => {}, - .forward => |*else_relocs| else_relocs.deinit(self.gpa), + .forward => |*else_relocs| else_relocs.deinit(cg.gpa), } }; var cases_it = switch_br.iterateCases(); while (cases_it.next()) |case| { { - const table = self.mir_table.items[table_start..][0..table_len]; + const table = cg.mir_table.items[table_start..][0..table_len]; for (case.items) |item| { const val = Value.fromInterned(item.toInterned().?); var val_space: Value.BigIntSpace = undefined; const val_bigint = val.toBigInt(&val_space, zcu); var index_bigint: std.math.big.int.Mutable = .{ .limbs = limbs, .positive = undefined, .len = undefined }; index_bigint.sub(val_bigint, min_bigint); - table[index_bigint.toConst().to(u10) catch unreachable] = @intCast(self.mir_instructions.len); + table[index_bigint.toConst().to(u10) catch unreachable] = @intCast(cg.mir_instructions.len); } for (case.ranges) |range| { var low_space: Value.BigIntSpace = undefined; @@ -93772,14 +93840,14 @@ fn lowerSwitchBr( const start = index_bigint.toConst().to(u10) catch unreachable; index_bigint.sub(high_bigint, min_bigint); const end = @as(u11, index_bigint.toConst().to(u10) catch unreachable) + 1; - @memset(table[start..end], @intCast(self.mir_instructions.len)); + @memset(table[start..end], @intCast(cg.mir_instructions.len)); } } - for (liveness.deaths[case.idx]) |operand| try self.processDeath(operand); + for (liveness.deaths[case.idx]) |operand| try cg.processDeath(operand); - try self.genBodyBlock(case.body); - try self.restoreState(state, &.{}, .{ + try cg.genBodyBlock(case.body); + try cg.restoreState(state, &.{}, .{ .emit_instructions = false, .update_tracking = true, .resurrect = true, @@ -93790,21 +93858,21 @@ fn lowerSwitchBr( const else_body = cases_it.elseBody(); const else_deaths = liveness.deaths.len - 1; - for (liveness.deaths[else_deaths]) |operand| try self.processDeath(operand); + for (liveness.deaths[else_deaths]) |operand| try cg.processDeath(operand); - self.performReloc(else_reloc); + cg.performReloc(else_reloc); if (is_loop) { - const loop_switch_data = self.loop_switches.getPtr(inst).?; - for (loop_switch_data.else_relocs.forward.items) |reloc| self.performReloc(reloc); - loop_switch_data.else_relocs.forward.deinit(self.gpa); - loop_switch_data.else_relocs = .{ .backward = @intCast(self.mir_instructions.len) }; + const loop_switch_data = cg.loop_switches.getPtr(inst).?; + for (loop_switch_data.else_relocs.forward.items) |reloc| cg.performReloc(reloc); + loop_switch_data.else_relocs.forward.deinit(cg.gpa); + loop_switch_data.else_relocs = .{ .backward = @intCast(cg.mir_instructions.len) }; } - for (self.mir_table.items[table_start..][0..table_len]) |*entry| if (entry.* == else_reloc_marker) { - entry.* = @intCast(self.mir_instructions.len); + for (cg.mir_table.items[table_start..][0..table_len]) |*entry| if (entry.* == else_reloc_marker) { + entry.* = @intCast(cg.mir_instructions.len); }; - try self.genBodyBlock(else_body); - try self.restoreState(state, &.{}, .{ + try cg.genBodyBlock(else_body); + try cg.restoreState(state, &.{}, .{ .emit_instructions = false, .update_tracking = true, .resurrect = true, @@ -93819,9 +93887,12 @@ fn lowerSwitchBr( const relocs = try allocator.alloc(Mir.Inst.Index, case.items.len + case.ranges.len); defer allocator.free(relocs); - try self.spillEflagsIfOccupied(); + var cond_temp = try cg.tempInit(condition_ty, condition); + const reset_index = cg.next_temp_index; + + try cg.spillEflagsIfOccupied(); for (case.items, relocs[0..case.items.len]) |item, *reloc| { - const item_mcv = try self.resolveInst(item); + const item_mcv = try cg.resolveInst(item); const cc: Condition = switch (condition) { .eflags => |cc| switch (item_mcv.immediate) { 0 => cc.negate(), @@ -93829,27 +93900,24 @@ fn lowerSwitchBr( else => unreachable, }, else => cc: { - var cond_temp = try self.tempInit(condition_ty, condition); - var item_temp = try self.tempInit(condition_ty, item_mcv); - const cc_temp = cond_temp.cmpInts(.eq, &item_temp, self) catch |err| switch (err) { + var item_temp = try cg.tempInit(condition_ty, item_mcv); + const cc_temp = cond_temp.cmpInts(.eq, &item_temp, cg) catch |err| switch (err) { error.SelectFailed => unreachable, else => |e| return e, }; - try cond_temp.die(self); - try item_temp.die(self); - const cc = cc_temp.tracking(self).short.eflags; - try cc_temp.die(self); - try self.resetTemps(); + try item_temp.die(cg); + const cc = cc_temp.tracking(cg).short.eflags; + try cc_temp.die(cg); + try cg.resetTemps(reset_index); break :cc cc; }, }; - reloc.* = try self.asmJccReloc(cc, undefined); + reloc.* = try cg.asmJccReloc(cc, undefined); } for (case.ranges, relocs[case.items.len..]) |range, *reloc| { - var cond_temp = try self.tempInit(condition_ty, condition); - const min_mcv = try self.resolveInst(range[0]); - const max_mcv = try self.resolveInst(range[1]); + const min_mcv = try cg.resolveInst(range[0]); + const max_mcv = try cg.resolveInst(range[1]); // `null` means always false. const lt_min = cc: switch (condition) { .eflags => |cc| switch (min_mcv.immediate) { @@ -93858,19 +93926,19 @@ fn lowerSwitchBr( else => unreachable, }, else => { - var min_temp = try self.tempInit(condition_ty, min_mcv); - const cc_temp = cond_temp.cmpInts(.lt, &min_temp, self) catch |err| switch (err) { + var min_temp = try cg.tempInit(condition_ty, min_mcv); + const cc_temp = cond_temp.cmpInts(.lt, &min_temp, cg) catch |err| switch (err) { error.SelectFailed => unreachable, else => |e| return e, }; - try min_temp.die(self); - const cc = cc_temp.tracking(self).short.eflags; - try cc_temp.die(self); + try min_temp.die(cg); + const cc = cc_temp.tracking(cg).short.eflags; + try cc_temp.die(cg); break :cc cc; }, }; const lt_min_reloc = if (lt_min) |cc| r: { - break :r try self.asmJccReloc(cc, undefined); + break :r try cg.asmJccReloc(cc, undefined); } else null; // `null` means always true. const lte_max = switch (condition) { @@ -93880,38 +93948,41 @@ fn lowerSwitchBr( else => unreachable, }, else => cc: { - var max_temp = try self.tempInit(condition_ty, max_mcv); - const cc_temp = cond_temp.cmpInts(.lte, &max_temp, self) catch |err| switch (err) { + var max_temp = try cg.tempInit(condition_ty, max_mcv); + const cc_temp = cond_temp.cmpInts(.lte, &max_temp, cg) catch |err| switch (err) { error.SelectFailed => unreachable, else => |e| return e, }; - try max_temp.die(self); - const cc = cc_temp.tracking(self).short.eflags; - try cc_temp.die(self); + try max_temp.die(cg); + const cc = cc_temp.tracking(cg).short.eflags; + try cc_temp.die(cg); break :cc cc; }, }; - try cond_temp.die(self); - try self.resetTemps(); + try cg.resetTemps(reset_index); // "Success" case is in `reloc`.... if (lte_max) |cc| { - reloc.* = try self.asmJccReloc(cc, undefined); + reloc.* = try cg.asmJccReloc(cc, undefined); } else { - reloc.* = try self.asmJmpReloc(undefined); + reloc.* = try cg.asmJmpReloc(undefined); } // ...and "fail" case falls through to next checks. - if (lt_min_reloc) |r| self.performReloc(r); + if (lt_min_reloc) |r| cg.performReloc(r); } - // The jump to skip this case if the conditions all failed. - const skip_case_reloc = try self.asmJmpReloc(undefined); + try cond_temp.die(cg); + try cg.resetTemps(@enumFromInt(0)); + cg.checkInvariantsAfterAirInst(); - for (liveness.deaths[case.idx]) |operand| try self.processDeath(operand); + // The jump to skip this case if the conditions all failed. + const skip_case_reloc = try cg.asmJmpReloc(undefined); + + for (liveness.deaths[case.idx]) |operand| try cg.processDeath(operand); // Relocate all success cases to the body we're about to generate. - for (relocs) |reloc| self.performReloc(reloc); - try self.genBodyBlock(case.body); - try self.restoreState(state, &.{}, .{ + for (relocs) |reloc| cg.performReloc(reloc); + try cg.genBodyBlock(case.body); + try cg.restoreState(state, &.{}, .{ .emit_instructions = false, .update_tracking = true, .resurrect = true, @@ -93919,16 +93990,16 @@ fn lowerSwitchBr( }); // Relocate the "skip" branch to fall through to the next case. - self.performReloc(skip_case_reloc); + cg.performReloc(skip_case_reloc); } if (switch_br.else_body_len > 0) { const else_body = cases_it.elseBody(); const else_deaths = liveness.deaths.len - 1; - for (liveness.deaths[else_deaths]) |operand| try self.processDeath(operand); + for (liveness.deaths[else_deaths]) |operand| try cg.processDeath(operand); - try self.genBodyBlock(else_body); - try self.restoreState(state, &.{}, .{ + try cg.genBodyBlock(else_body); + try cg.restoreState(state, &.{}, .{ .emit_instructions = false, .update_tracking = true, .resurrect = true, @@ -101339,6 +101410,7 @@ const Temp = struct { const val_mcv = val.tracking(cg).short; switch (val_mcv) { else => |mcv| std.debug.panic("{s}: {}\n", .{ @src().fn_name, mcv }), + .none => {}, .undef => if (opts.safe) { var dst_ptr = try cg.tempInit(.usize, dst.tracking(cg).short.address().offset(opts.disp)); var pat = try cg.tempInit(.u8, .{ .immediate = 0xaa }); @@ -105786,9 +105858,9 @@ const Temp = struct { }; }; -fn resetTemps(cg: *CodeGen) InnerError!void { +fn resetTemps(cg: *CodeGen, from_index: Temp.Index) InnerError!void { var any_valid = false; - for (0..@intFromEnum(cg.next_temp_index)) |temp_index| { + for (@intFromEnum(from_index)..@intFromEnum(cg.next_temp_index)) |temp_index| { const temp: Temp.Index = @enumFromInt(temp_index); if (temp.isValid(cg)) { any_valid = true; @@ -105800,7 +105872,7 @@ fn resetTemps(cg: *CodeGen) InnerError!void { cg.temp_type[temp_index] = undefined; } if (any_valid) return cg.fail("failed to kill all temps", .{}); - cg.next_temp_index = @enumFromInt(0); + cg.next_temp_index = from_index; } fn reuseTemp( @@ -105889,70 +105961,75 @@ fn tempMemFromValue(cg: *CodeGen, value: Value) InnerError!Temp { return cg.tempInit(value.typeOf(cg.pt.zcu), try cg.lowerUav(value)); } -fn tempFromOperand( - cg: *CodeGen, - inst: Air.Inst.Index, - op_index: Liveness.OperandInt, - op_ref: Air.Inst.Ref, - ignore_death: bool, -) InnerError!Temp { +fn tempFromOperand(cg: *CodeGen, op_ref: Air.Inst.Ref, op_dies: bool) InnerError!Temp { const zcu = cg.pt.zcu; const ip = &zcu.intern_pool; - if (ignore_death or !cg.liveness.operandDies(inst, op_index)) { - if (op_ref.toIndex()) |op_inst| return .{ .index = op_inst }; - const val = op_ref.toInterned().?; - const gop = try cg.const_tracking.getOrPut(cg.gpa, val); - if (!gop.found_existing) gop.value_ptr.* = .init(init: { - const const_mcv = try cg.genTypedValue(.fromInterned(val)); - switch (const_mcv) { - .lea_tlv => |tlv_sym| switch (cg.bin_file.tag) { - .elf, .macho => { - if (cg.mod.pic) { - try cg.spillRegisters(&.{ .rdi, .rax }); - } else { - try cg.spillRegisters(&.{.rax}); - } - const frame_index = try cg.allocFrameIndex(.init(.{ - .size = 8, - .alignment = .@"8", - })); - try cg.genSetMem( - .{ .frame = frame_index }, - 0, - .usize, - .{ .lea_symbol = .{ .sym_index = tlv_sym } }, - .{}, - ); - break :init .{ .load_frame = .{ .index = frame_index } }; - }, - else => break :init const_mcv, + if (op_dies) { + const temp_index = cg.next_temp_index; + const temp: Temp = .{ .index = temp_index.toIndex() }; + const op_inst = op_ref.toIndex().?; + const tracking = cg.getResolvedInstValue(op_inst); + temp_index.tracking(cg).* = tracking.*; + if (!cg.reuseTemp(temp.index, op_inst, tracking)) return .{ .index = op_ref.toIndex().? }; + cg.temp_type[@intFromEnum(temp_index)] = cg.typeOf(op_ref); + cg.next_temp_index = @enumFromInt(@intFromEnum(temp_index) + 1); + return temp; + } + + if (op_ref.toIndex()) |op_inst| return .{ .index = op_inst }; + const val = op_ref.toInterned().?; + const gop = try cg.const_tracking.getOrPut(cg.gpa, val); + if (!gop.found_existing) gop.value_ptr.* = .init(init: { + const const_mcv = try cg.genTypedValue(.fromInterned(val)); + switch (const_mcv) { + .lea_tlv => |tlv_sym| switch (cg.bin_file.tag) { + .elf, .macho => { + if (cg.mod.pic) { + try cg.spillRegisters(&.{ .rdi, .rax }); + } else { + try cg.spillRegisters(&.{.rax}); + } + const frame_index = try cg.allocFrameIndex(.init(.{ + .size = 8, + .alignment = .@"8", + })); + try cg.genSetMem( + .{ .frame = frame_index }, + 0, + .usize, + .{ .lea_symbol = .{ .sym_index = tlv_sym } }, + .{}, + ); + break :init .{ .load_frame = .{ .index = frame_index } }; }, else => break :init const_mcv, - } - }); - return cg.tempInit(.fromInterned(ip.typeOf(val)), gop.value_ptr.short); - } - - const temp_index = cg.next_temp_index; - const temp: Temp = .{ .index = temp_index.toIndex() }; - const op_inst = op_ref.toIndex().?; - const tracking = cg.getResolvedInstValue(op_inst); - temp_index.tracking(cg).* = tracking.*; - if (!cg.reuseTemp(temp.index, op_inst, tracking)) return .{ .index = op_ref.toIndex().? }; - cg.temp_type[@intFromEnum(temp_index)] = cg.typeOf(op_ref); - cg.next_temp_index = @enumFromInt(@intFromEnum(temp_index) + 1); - return temp; + }, + else => break :init const_mcv, + } + }); + return cg.tempInit(.fromInterned(ip.typeOf(val)), gop.value_ptr.short); } -inline fn tempsFromOperands(cg: *CodeGen, inst: Air.Inst.Index, op_refs: anytype) InnerError![op_refs.len]Temp { - var temps: [op_refs.len]Temp = undefined; - inline for (&temps, 0.., op_refs) |*temp, op_index, op_ref| { - temp.* = try cg.tempFromOperand(inst, op_index, op_ref, inline for (0..op_index) |prev_op_index| { - if (op_ref == op_refs[prev_op_index]) break true; - } else false); - } - return temps; +fn tempsFromOperandsInner( + cg: *CodeGen, + inst: Air.Inst.Index, + op_temps: []Temp, + op_refs: []const Air.Inst.Ref, +) InnerError!void { + for (op_temps, 0.., op_refs) |*op_temp, op_index, op_ref| op_temp.* = try cg.tempFromOperand(op_ref, for (op_refs[0..op_index]) |prev_op_ref| { + if (op_ref == prev_op_ref) break false; + } else cg.liveness.operandDies(inst, @intCast(op_index))); +} + +inline fn tempsFromOperands( + cg: *CodeGen, + inst: Air.Inst.Index, + op_refs: anytype, +) InnerError![op_refs.len]Temp { + var op_temps: [op_refs.len]Temp = undefined; + try cg.tempsFromOperandsInner(inst, &op_temps, &op_refs); + return op_temps; } const Operand = union(enum) { diff --git a/src/main.zig b/src/main.zig index 1075993846..3d6e96b91e 100644 --- a/src/main.zig +++ b/src/main.zig @@ -39,7 +39,7 @@ test { _ = Package; } -const thread_stack_size = 32 << 20; +const thread_stack_size = 50 << 20; pub const std_options: std.Options = .{ .wasiCwd = wasi_cwd,