From 14bda4130a9a7f8b529b12bc74a1d6caa71b9545 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 2 Sep 2025 23:10:18 -0700 Subject: [PATCH] llvm backend: remove canElideLoad mechanism --- src/Air/Liveness.zig | 495 ------------------------------------------- src/codegen/llvm.zig | 102 ++------- 2 files changed, 21 insertions(+), 576 deletions(-) diff --git a/src/Air/Liveness.zig b/src/Air/Liveness.zig index 65e78e089e..58169730e8 100644 --- a/src/Air/Liveness.zig +++ b/src/Air/Liveness.zig @@ -207,501 +207,6 @@ pub fn operandDies(l: Liveness, inst: Air.Inst.Index, operand: OperandInt) bool return (l.tomb_bits[usize_index] & mask) != 0; } -const OperandCategory = enum { - /// The operand lives on, but this instruction cannot possibly mutate memory. - none, - /// The operand lives on and this instruction can mutate memory. - write, - /// The operand dies at this instruction. - tomb, - /// The operand lives on, and this instruction is noreturn. - noret, - /// This instruction is too complicated for analysis, no information is available. - complex, -}; - -/// Given an instruction that we are examining, and an operand that we are looking for, -/// returns a classification. -pub fn categorizeOperand( - l: Liveness, - air: Air, - zcu: *Zcu, - inst: Air.Inst.Index, - operand: Air.Inst.Index, - ip: *const InternPool, -) OperandCategory { - const air_tags = air.instructions.items(.tag); - const air_datas = air.instructions.items(.data); - const operand_ref = operand.toRef(); - switch (air_tags[@intFromEnum(inst)]) { - .add, - .add_safe, - .add_wrap, - .add_sat, - .add_optimized, - .sub, - .sub_safe, - .sub_wrap, - .sub_sat, - .sub_optimized, - .mul, - .mul_safe, - .mul_wrap, - .mul_sat, - .mul_optimized, - .div_float, - .div_trunc, - .div_floor, - .div_exact, - .rem, - .mod, - .bit_and, - .bit_or, - .xor, - .cmp_lt, - .cmp_lte, - .cmp_eq, - .cmp_gte, - .cmp_gt, - .cmp_neq, - .bool_and, - .bool_or, - .array_elem_val, - .slice_elem_val, - .ptr_elem_val, - .shl, - .shl_exact, - .shl_sat, - .shr, - .shr_exact, - .min, - .max, - .div_float_optimized, - .div_trunc_optimized, - .div_floor_optimized, - .div_exact_optimized, - .rem_optimized, - .mod_optimized, - .neg_optimized, - .cmp_lt_optimized, - .cmp_lte_optimized, - .cmp_eq_optimized, - .cmp_gte_optimized, - .cmp_gt_optimized, - .cmp_neq_optimized, - => { - const o = air_datas[@intFromEnum(inst)].bin_op; - if (o.lhs == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - if (o.rhs == operand_ref) return matchOperandSmallIndex(l, inst, 1, .none); - return .none; - }, - - .store, - .store_safe, - .atomic_store_unordered, - .atomic_store_monotonic, - .atomic_store_release, - .atomic_store_seq_cst, - .set_union_tag, - .memset, - .memset_safe, - .memcpy, - .memmove, - => { - const o = air_datas[@intFromEnum(inst)].bin_op; - if (o.lhs == operand_ref) return matchOperandSmallIndex(l, inst, 0, .write); - if (o.rhs == operand_ref) return matchOperandSmallIndex(l, inst, 1, .write); - return .write; - }, - - .vector_store_elem => { - const o = air_datas[@intFromEnum(inst)].vector_store_elem; - const extra = air.extraData(Air.Bin, o.payload).data; - if (o.vector_ptr == operand_ref) return matchOperandSmallIndex(l, inst, 0, .write); - if (extra.lhs == operand_ref) return matchOperandSmallIndex(l, inst, 1, .none); - if (extra.rhs == operand_ref) return matchOperandSmallIndex(l, inst, 2, .none); - return .write; - }, - - .arg, - .alloc, - .inferred_alloc, - .inferred_alloc_comptime, - .ret_ptr, - .trap, - .breakpoint, - .repeat, - .switch_dispatch, - .dbg_stmt, - .dbg_empty_stmt, - .unreach, - .ret_addr, - .frame_addr, - .wasm_memory_size, - .err_return_trace, - .save_err_return_trace_index, - .runtime_nav_ptr, - .c_va_start, - .work_item_id, - .work_group_size, - .work_group_id, - => return .none, - - .not, - .bitcast, - .load, - .fpext, - .fptrunc, - .intcast, - .intcast_safe, - .trunc, - .optional_payload, - .optional_payload_ptr, - .wrap_optional, - .unwrap_errunion_payload, - .unwrap_errunion_err, - .unwrap_errunion_payload_ptr, - .unwrap_errunion_err_ptr, - .wrap_errunion_payload, - .wrap_errunion_err, - .slice_ptr, - .slice_len, - .ptr_slice_len_ptr, - .ptr_slice_ptr_ptr, - .struct_field_ptr_index_0, - .struct_field_ptr_index_1, - .struct_field_ptr_index_2, - .struct_field_ptr_index_3, - .array_to_slice, - .int_from_float, - .int_from_float_optimized, - .int_from_float_safe, - .int_from_float_optimized_safe, - .float_from_int, - .get_union_tag, - .clz, - .ctz, - .popcount, - .byte_swap, - .bit_reverse, - .splat, - .error_set_has_value, - .addrspace_cast, - .c_va_arg, - .c_va_copy, - .abs, - => { - const o = air_datas[@intFromEnum(inst)].ty_op; - if (o.operand == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - - .optional_payload_ptr_set, - .errunion_payload_ptr_set, - => { - const o = air_datas[@intFromEnum(inst)].ty_op; - if (o.operand == operand_ref) return matchOperandSmallIndex(l, inst, 0, .write); - return .write; - }, - - .is_null, - .is_non_null, - .is_null_ptr, - .is_non_null_ptr, - .is_err, - .is_non_err, - .is_err_ptr, - .is_non_err_ptr, - .is_named_enum_value, - .tag_name, - .error_name, - .sqrt, - .sin, - .cos, - .tan, - .exp, - .exp2, - .log, - .log2, - .log10, - .floor, - .ceil, - .round, - .trunc_float, - .neg, - .cmp_lt_errors_len, - .c_va_end, - => { - const o = air_datas[@intFromEnum(inst)].un_op; - if (o == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - - .ret, - .ret_safe, - .ret_load, - => { - const o = air_datas[@intFromEnum(inst)].un_op; - if (o == operand_ref) return matchOperandSmallIndex(l, inst, 0, .noret); - return .noret; - }, - - .set_err_return_trace => { - const o = air_datas[@intFromEnum(inst)].un_op; - if (o == operand_ref) return matchOperandSmallIndex(l, inst, 0, .write); - return .write; - }, - - .add_with_overflow, - .sub_with_overflow, - .mul_with_overflow, - .shl_with_overflow, - .ptr_add, - .ptr_sub, - .ptr_elem_ptr, - .slice_elem_ptr, - .slice, - => { - const ty_pl = air_datas[@intFromEnum(inst)].ty_pl; - const extra = air.extraData(Air.Bin, ty_pl.payload).data; - if (extra.lhs == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - if (extra.rhs == operand_ref) return matchOperandSmallIndex(l, inst, 1, .none); - return .none; - }, - - .dbg_var_ptr, - .dbg_var_val, - .dbg_arg_inline, - => { - const o = air_datas[@intFromEnum(inst)].pl_op.operand; - if (o == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - - .prefetch => { - const prefetch = air_datas[@intFromEnum(inst)].prefetch; - if (prefetch.ptr == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - - .call, .call_always_tail, .call_never_tail, .call_never_inline => { - const inst_data = air_datas[@intFromEnum(inst)].pl_op; - const callee = inst_data.operand; - const extra = air.extraData(Air.Call, inst_data.payload); - const args = @as([]const Air.Inst.Ref, @ptrCast(air.extra.items[extra.end..][0..extra.data.args_len])); - if (args.len + 1 <= bpi - 1) { - if (callee == operand_ref) return matchOperandSmallIndex(l, inst, 0, .write); - for (args, 0..) |arg, i| { - if (arg == operand_ref) return matchOperandSmallIndex(l, inst, @as(OperandInt, @intCast(i + 1)), .write); - } - return .write; - } - var bt = l.iterateBigTomb(inst); - if (bt.feed()) { - if (callee == operand_ref) return .tomb; - } else { - if (callee == operand_ref) return .write; - } - for (args) |arg| { - if (bt.feed()) { - if (arg == operand_ref) return .tomb; - } else { - if (arg == operand_ref) return .write; - } - } - return .write; - }, - .select => { - const pl_op = air_datas[@intFromEnum(inst)].pl_op; - const extra = air.extraData(Air.Bin, pl_op.payload).data; - if (pl_op.operand == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - if (extra.lhs == operand_ref) return matchOperandSmallIndex(l, inst, 1, .none); - if (extra.rhs == operand_ref) return matchOperandSmallIndex(l, inst, 2, .none); - return .none; - }, - .shuffle_one => { - const unwrapped = air.unwrapShuffleOne(zcu, inst); - if (unwrapped.operand == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - .shuffle_two => { - const unwrapped = air.unwrapShuffleTwo(zcu, inst); - if (unwrapped.operand_a == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - if (unwrapped.operand_b == operand_ref) return matchOperandSmallIndex(l, inst, 1, .none); - return .none; - }, - .reduce, .reduce_optimized => { - const reduce = air_datas[@intFromEnum(inst)].reduce; - if (reduce.operand == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - .cmp_vector, .cmp_vector_optimized => { - const extra = air.extraData(Air.VectorCmp, air_datas[@intFromEnum(inst)].ty_pl.payload).data; - if (extra.lhs == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - if (extra.rhs == operand_ref) return matchOperandSmallIndex(l, inst, 1, .none); - return .none; - }, - .aggregate_init => { - const ty_pl = air_datas[@intFromEnum(inst)].ty_pl; - const aggregate_ty = ty_pl.ty.toType(); - const len = @as(usize, @intCast(aggregate_ty.arrayLenIp(ip))); - const elements = @as([]const Air.Inst.Ref, @ptrCast(air.extra.items[ty_pl.payload..][0..len])); - - if (elements.len <= bpi - 1) { - for (elements, 0..) |elem, i| { - if (elem == operand_ref) return matchOperandSmallIndex(l, inst, @as(OperandInt, @intCast(i)), .none); - } - return .none; - } - - var bt = l.iterateBigTomb(inst); - for (elements) |elem| { - if (bt.feed()) { - if (elem == operand_ref) return .tomb; - } else { - if (elem == operand_ref) return .write; - } - } - return .write; - }, - .union_init => { - const extra = air.extraData(Air.UnionInit, air_datas[@intFromEnum(inst)].ty_pl.payload).data; - if (extra.init == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - .struct_field_ptr, .struct_field_val => { - const extra = air.extraData(Air.StructField, air_datas[@intFromEnum(inst)].ty_pl.payload).data; - if (extra.struct_operand == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - .field_parent_ptr => { - const extra = air.extraData(Air.FieldParentPtr, air_datas[@intFromEnum(inst)].ty_pl.payload).data; - if (extra.field_ptr == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - .cmpxchg_strong, .cmpxchg_weak => { - const extra = air.extraData(Air.Cmpxchg, air_datas[@intFromEnum(inst)].ty_pl.payload).data; - if (extra.ptr == operand_ref) return matchOperandSmallIndex(l, inst, 0, .write); - if (extra.expected_value == operand_ref) return matchOperandSmallIndex(l, inst, 1, .write); - if (extra.new_value == operand_ref) return matchOperandSmallIndex(l, inst, 2, .write); - return .write; - }, - .mul_add => { - const pl_op = air_datas[@intFromEnum(inst)].pl_op; - const extra = air.extraData(Air.Bin, pl_op.payload).data; - if (extra.lhs == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - if (extra.rhs == operand_ref) return matchOperandSmallIndex(l, inst, 1, .none); - if (pl_op.operand == operand_ref) return matchOperandSmallIndex(l, inst, 2, .none); - return .none; - }, - .atomic_load => { - const ptr = air_datas[@intFromEnum(inst)].atomic_load.ptr; - if (ptr == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - .atomic_rmw => { - const pl_op = air_datas[@intFromEnum(inst)].pl_op; - const extra = air.extraData(Air.AtomicRmw, pl_op.payload).data; - if (pl_op.operand == operand_ref) return matchOperandSmallIndex(l, inst, 0, .write); - if (extra.operand == operand_ref) return matchOperandSmallIndex(l, inst, 1, .write); - return .write; - }, - - .br => { - const br = air_datas[@intFromEnum(inst)].br; - if (br.operand == operand_ref) return matchOperandSmallIndex(l, operand, 0, .noret); - return .noret; - }, - .assembly => { - return .complex; - }, - .block, .dbg_inline_block => |tag| { - const ty_pl = air_datas[@intFromEnum(inst)].ty_pl; - const body: []const Air.Inst.Index = @ptrCast(switch (tag) { - inline .block, .dbg_inline_block => |comptime_tag| body: { - const extra = air.extraData(switch (comptime_tag) { - .block => Air.Block, - .dbg_inline_block => Air.DbgInlineBlock, - else => unreachable, - }, ty_pl.payload); - break :body air.extra.items[extra.end..][0..extra.data.body_len]; - }, - else => unreachable, - }); - - if (body.len == 1 and air_tags[@intFromEnum(body[0])] == .cond_br) { - // Peephole optimization for "panic-like" conditionals, which have - // one empty branch and another which calls a `noreturn` function. - // This allows us to infer that safety checks do not modify memory, - // as far as control flow successors are concerned. - - const inst_data = air_datas[@intFromEnum(body[0])].pl_op; - const cond_extra = air.extraData(Air.CondBr, inst_data.payload); - if (inst_data.operand == operand_ref and operandDies(l, body[0], 0)) - return .tomb; - - if (cond_extra.data.then_body_len > 2 or cond_extra.data.else_body_len > 2) - return .complex; - - const then_body: []const Air.Inst.Index = @ptrCast(air.extra.items[cond_extra.end..][0..cond_extra.data.then_body_len]); - const else_body: []const Air.Inst.Index = @ptrCast(air.extra.items[cond_extra.end + cond_extra.data.then_body_len ..][0..cond_extra.data.else_body_len]); - if (then_body.len > 1 and air_tags[@intFromEnum(then_body[1])] != .unreach) - return .complex; - if (else_body.len > 1 and air_tags[@intFromEnum(else_body[1])] != .unreach) - return .complex; - - var operand_live: bool = true; - for (&[_]Air.Inst.Index{ then_body[0], else_body[0] }) |cond_inst| { - if (l.categorizeOperand(air, zcu, cond_inst, operand, ip) == .tomb) - operand_live = false; - - switch (air_tags[@intFromEnum(cond_inst)]) { - .br => { // Breaks immediately back to block - const br = air_datas[@intFromEnum(cond_inst)].br; - if (br.block_inst != inst) - return .complex; - }, - .call => {}, // Calls a noreturn function - else => return .complex, - } - } - return if (operand_live) .none else .tomb; - } - - return .complex; - }, - - .@"try", - .try_cold, - .try_ptr, - .try_ptr_cold, - .loop, - .cond_br, - .switch_br, - .loop_switch_br, - => return .complex, - - .wasm_memory_grow => { - const pl_op = air_datas[@intFromEnum(inst)].pl_op; - if (pl_op.operand == operand_ref) return matchOperandSmallIndex(l, inst, 0, .none); - return .none; - }, - } -} - -fn matchOperandSmallIndex( - l: Liveness, - inst: Air.Inst.Index, - operand: OperandInt, - default: OperandCategory, -) OperandCategory { - if (operandDies(l, inst, operand)) { - return .tomb; - } else { - return default; - } -} - /// Higher level API. pub const CondBrSlices = struct { then_deaths: []const Air.Inst.Index, diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index ebdcdb6bcb..fd9972d867 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -4980,8 +4980,8 @@ pub const FuncGen = struct { .breakpoint => try self.airBreakpoint(inst), .ret_addr => try self.airRetAddr(inst), .frame_addr => try self.airFrameAddress(inst), - .@"try" => try self.airTry(body[i..], false), - .try_cold => try self.airTry(body[i..], true), + .@"try" => try self.airTry(inst, false), + .try_cold => try self.airTry(inst, true), .try_ptr => try self.airTryPtr(inst, false), .try_ptr_cold => try self.airTryPtr(inst, true), .intcast => try self.airIntCast(inst, false), @@ -4989,7 +4989,7 @@ pub const FuncGen = struct { .trunc => try self.airTrunc(inst), .fptrunc => try self.airFptrunc(inst), .fpext => try self.airFpext(inst), - .load => try self.airLoad(body[i..]), + .load => try self.airLoad(inst), .not => try self.airNot(inst), .store => try self.airStore(inst, false), .store_safe => try self.airStore(inst, true), @@ -5045,7 +5045,7 @@ pub const FuncGen = struct { .atomic_store_seq_cst => try self.airAtomicStore(inst, .seq_cst), .struct_field_ptr => try self.airStructFieldPtr(inst), - .struct_field_val => try self.airStructFieldVal(body[i..]), + .struct_field_val => try self.airStructFieldVal(inst), .struct_field_ptr_index_0 => try self.airStructFieldPtrIndex(inst, 0), .struct_field_ptr_index_1 => try self.airStructFieldPtrIndex(inst, 1), @@ -5054,18 +5054,18 @@ pub const FuncGen = struct { .field_parent_ptr => try self.airFieldParentPtr(inst), - .array_elem_val => try self.airArrayElemVal(body[i..]), - .slice_elem_val => try self.airSliceElemVal(body[i..]), + .array_elem_val => try self.airArrayElemVal(inst), + .slice_elem_val => try self.airSliceElemVal(inst), .slice_elem_ptr => try self.airSliceElemPtr(inst), - .ptr_elem_val => try self.airPtrElemVal(body[i..]), + .ptr_elem_val => try self.airPtrElemVal(inst), .ptr_elem_ptr => try self.airPtrElemPtr(inst), - .optional_payload => try self.airOptionalPayload(body[i..]), + .optional_payload => try self.airOptionalPayload(inst), .optional_payload_ptr => try self.airOptionalPayloadPtr(inst), .optional_payload_ptr_set => try self.airOptionalPayloadPtrSet(inst), - .unwrap_errunion_payload => try self.airErrUnionPayload(body[i..], false), - .unwrap_errunion_payload_ptr => try self.airErrUnionPayload(body[i..], true), + .unwrap_errunion_payload => try self.airErrUnionPayload(inst, false), + .unwrap_errunion_payload_ptr => try self.airErrUnionPayload(inst, true), .unwrap_errunion_err => try self.airErrUnionErr(inst, false), .unwrap_errunion_err_ptr => try self.airErrUnionErr(inst, true), .errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst), @@ -6266,19 +6266,14 @@ pub const FuncGen = struct { // No need to reset the insert cursor since this instruction is noreturn. } - fn airTry(self: *FuncGen, body_tail: []const Air.Inst.Index, err_cold: bool) !Builder.Value { - const pt = self.ng.pt; - const zcu = pt.zcu; - const inst = body_tail[0]; + fn airTry(self: *FuncGen, inst: Air.Inst.Index, err_cold: bool) !Builder.Value { const pl_op = self.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const err_union = try self.resolveInst(pl_op.operand); const extra = self.air.extraData(Air.Try, pl_op.payload); const body: []const Air.Inst.Index = @ptrCast(self.air.extra.items[extra.end..][0..extra.data.body_len]); const err_union_ty = self.typeOf(pl_op.operand); - const payload_ty = self.typeOfIndex(inst); - const can_elide_load = if (isByRef(payload_ty, zcu)) self.canElideLoad(body_tail) else false; const is_unused = self.liveness.isUnused(inst); - return lowerTry(self, err_union, body, err_union_ty, false, can_elide_load, is_unused, err_cold); + return lowerTry(self, err_union, body, err_union_ty, false, false, is_unused, err_cold); } fn airTryPtr(self: *FuncGen, inst: Air.Inst.Index, err_cold: bool) !Builder.Value { @@ -6824,11 +6819,10 @@ pub const FuncGen = struct { return self.wip.gepStruct(slice_llvm_ty, slice_ptr, index, ""); } - fn airSliceElemVal(self: *FuncGen, body_tail: []const Air.Inst.Index) !Builder.Value { + fn airSliceElemVal(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { const o = self.ng.object; const pt = self.ng.pt; const zcu = pt.zcu; - const inst = body_tail[0]; const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op; const slice_ty = self.typeOf(bin_op.lhs); const slice = try self.resolveInst(bin_op.lhs); @@ -6838,9 +6832,6 @@ pub const FuncGen = struct { const base_ptr = try self.wip.extractValue(slice, &.{0}, ""); const ptr = try self.wip.gep(.inbounds, llvm_elem_ty, base_ptr, &.{index}, ""); if (isByRef(elem_ty, zcu)) { - if (self.canElideLoad(body_tail)) - return ptr; - self.maybeMarkAllowZeroAccess(slice_ty.ptrInfo(zcu)); const slice_align = (slice_ty.ptrAlignment(zcu).min(elem_ty.abiAlignment(zcu))).toLlvm(); @@ -6867,11 +6858,10 @@ pub const FuncGen = struct { return self.wip.gep(.inbounds, llvm_elem_ty, base_ptr, &.{index}, ""); } - fn airArrayElemVal(self: *FuncGen, body_tail: []const Air.Inst.Index) !Builder.Value { + fn airArrayElemVal(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { const o = self.ng.object; const pt = self.ng.pt; const zcu = pt.zcu; - const inst = body_tail[0]; const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op; const array_ty = self.typeOf(bin_op.lhs); @@ -6884,9 +6874,7 @@ pub const FuncGen = struct { try o.builder.intValue(try o.lowerType(pt, Type.usize), 0), rhs, }; if (isByRef(elem_ty, zcu)) { - const elem_ptr = - try self.wip.gep(.inbounds, array_llvm_ty, array_llvm_val, &indices, ""); - if (canElideLoad(self, body_tail)) return elem_ptr; + const elem_ptr = try self.wip.gep(.inbounds, array_llvm_ty, array_llvm_val, &indices, ""); const elem_alignment = elem_ty.abiAlignment(zcu).toLlvm(); return self.loadByRef(elem_ptr, elem_ty, elem_alignment, .normal); } else { @@ -6900,11 +6888,10 @@ pub const FuncGen = struct { return self.wip.extractElement(array_llvm_val, rhs, ""); } - fn airPtrElemVal(self: *FuncGen, body_tail: []const Air.Inst.Index) !Builder.Value { + fn airPtrElemVal(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { const o = self.ng.object; const pt = self.ng.pt; const zcu = pt.zcu; - const inst = body_tail[0]; const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op; const ptr_ty = self.typeOf(bin_op.lhs); const elem_ty = ptr_ty.childType(zcu); @@ -6918,10 +6905,7 @@ pub const FuncGen = struct { else &.{rhs}, ""); if (isByRef(elem_ty, zcu)) { - if (self.canElideLoad(body_tail)) return ptr; - self.maybeMarkAllowZeroAccess(ptr_ty.ptrInfo(zcu)); - const ptr_align = (ptr_ty.ptrAlignment(zcu).min(elem_ty.abiAlignment(zcu))).toLlvm(); return self.loadByRef(ptr, elem_ty, ptr_align, if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal); } @@ -6974,11 +6958,10 @@ pub const FuncGen = struct { return self.fieldPtr(inst, struct_ptr, struct_ptr_ty, field_index); } - fn airStructFieldVal(self: *FuncGen, body_tail: []const Air.Inst.Index) !Builder.Value { + fn airStructFieldVal(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { const o = self.ng.object; const pt = self.ng.pt; const zcu = pt.zcu; - const inst = body_tail[0]; const ty_pl = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const struct_field = self.air.extraData(Air.StructField, ty_pl.payload).data; const struct_ty = self.typeOf(struct_field.struct_operand); @@ -7052,9 +7035,6 @@ pub const FuncGen = struct { .flags = .{ .alignment = alignment }, }); if (isByRef(field_ty, zcu)) { - if (canElideLoad(self, body_tail)) - return field_ptr; - assert(alignment != .none); const field_alignment = alignment.toLlvm(); return self.loadByRef(field_ptr, field_ty, field_alignment, .normal); @@ -7070,7 +7050,6 @@ pub const FuncGen = struct { try self.wip.gepStruct(union_llvm_ty, struct_llvm_val, payload_index, ""); const payload_alignment = layout.payload_align.toLlvm(); if (isByRef(field_ty, zcu)) { - if (canElideLoad(self, body_tail)) return field_ptr; return self.loadByRef(field_ptr, field_ty, payload_alignment, .normal); } else { return self.loadTruncate(.normal, field_ty, field_ptr, payload_alignment); @@ -7829,11 +7808,10 @@ pub const FuncGen = struct { return self.wip.gepStruct(optional_llvm_ty, operand, 0, ""); } - fn airOptionalPayload(self: *FuncGen, body_tail: []const Air.Inst.Index) !Builder.Value { + fn airOptionalPayload(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { const o = self.ng.object; const pt = self.ng.pt; const zcu = pt.zcu; - const inst = body_tail[0]; const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op; const operand = try self.resolveInst(ty_op.operand); const optional_ty = self.typeOf(ty_op.operand); @@ -7846,19 +7824,13 @@ pub const FuncGen = struct { } const opt_llvm_ty = try o.lowerType(pt, optional_ty); - const can_elide_load = if (isByRef(payload_ty, zcu)) self.canElideLoad(body_tail) else false; - return self.optPayloadHandle(opt_llvm_ty, operand, optional_ty, can_elide_load); + return self.optPayloadHandle(opt_llvm_ty, operand, optional_ty, false); } - fn airErrUnionPayload( - self: *FuncGen, - body_tail: []const Air.Inst.Index, - operand_is_ptr: bool, - ) !Builder.Value { + fn airErrUnionPayload(self: *FuncGen, inst: Air.Inst.Index, operand_is_ptr: bool) !Builder.Value { const o = self.ng.object; const pt = self.ng.pt; const zcu = pt.zcu; - const inst = body_tail[0]; const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op; const operand = try self.resolveInst(ty_op.operand); const operand_ty = self.typeOf(ty_op.operand); @@ -7877,7 +7849,6 @@ pub const FuncGen = struct { const payload_alignment = payload_ty.abiAlignment(zcu).toLlvm(); const payload_ptr = try self.wip.gepStruct(err_union_llvm_ty, operand, offset, ""); if (isByRef(payload_ty, zcu)) { - if (self.canElideLoad(body_tail)) return payload_ptr; return self.loadByRef(payload_ptr, payload_ty, payload_alignment, .normal); } const payload_llvm_ty = err_union_llvm_ty.structFields(&o.builder)[offset]; @@ -9740,45 +9711,14 @@ pub const FuncGen = struct { return .none; } - /// As an optimization, we want to avoid unnecessary copies of isByRef=true - /// types. Here, we scan forward in the current block, looking to see if - /// this load dies before any side effects occur. In such case, we can - /// safely return the operand without making a copy. - /// - /// The first instruction of `body_tail` is the one whose copy we want to elide. - fn canElideLoad(fg: *FuncGen, body_tail: []const Air.Inst.Index) bool { - const zcu = fg.ng.pt.zcu; - const ip = &zcu.intern_pool; - for (body_tail[1..]) |body_inst| { - switch (fg.liveness.categorizeOperand(fg.air, zcu, body_inst, body_tail[0], ip)) { - .none => continue, - .write, .noret, .complex => return false, - .tomb => return true, - } - } - // The only way to get here is to hit the end of a loop instruction - // (implicit repeat). - return false; - } - - fn airLoad(fg: *FuncGen, body_tail: []const Air.Inst.Index) !Builder.Value { + fn airLoad(fg: *FuncGen, inst: Air.Inst.Index) !Builder.Value { const pt = fg.ng.pt; const zcu = pt.zcu; - const inst = body_tail[0]; const ty_op = fg.air.instructions.items(.data)[@intFromEnum(inst)].ty_op; const ptr_ty = fg.typeOf(ty_op.operand); const ptr_info = ptr_ty.ptrInfo(zcu); const ptr = try fg.resolveInst(ty_op.operand); - - elide: { - if (ptr_info.flags.alignment != .none) break :elide; - if (!isByRef(Type.fromInterned(ptr_info.child), zcu)) break :elide; - if (!canElideLoad(fg, body_tail)) break :elide; - return ptr; - } - fg.maybeMarkAllowZeroAccess(ptr_info); - return fg.load(ptr, ptr_ty); }