Merge pull request #22572 from jacobly0/new-error-trace

compiler: include error trace in all functions, implement for x86_64 backend
This commit is contained in:
Matthew Lugg 2025-01-22 16:48:27 +00:00 committed by GitHub
commit 0e815c652d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 174 additions and 106 deletions

View File

@ -1150,9 +1150,10 @@ pub const panicInactiveUnionField = Panic.inactiveUnionField;
/// To be deleted after zig1.wasm is updated.
pub const panic_messages = Panic.messages;
pub noinline fn returnError(st: *StackTrace) void {
pub noinline fn returnError() void {
@branchHint(.unlikely);
@setRuntimeSafety(false);
const st = @errorReturnTrace().?;
if (st.index < st.instruction_addresses.len)
st.instruction_addresses[st.index] = @returnAddress();
st.index += 1;

View File

@ -2294,17 +2294,6 @@ pub const Key = union(enum) {
return @atomicLoad(FuncAnalysis, func.analysisPtr(ip), .unordered);
}
pub fn setCallsOrAwaitsErrorableFn(func: Func, ip: *InternPool, value: bool) void {
const extra_mutex = &ip.getLocal(func.tid).mutate.extra.mutex;
extra_mutex.lock();
defer extra_mutex.unlock();
const analysis_ptr = func.analysisPtr(ip);
var analysis = analysis_ptr.*;
analysis.calls_or_awaits_errorable_fn = value;
@atomicStore(FuncAnalysis, analysis_ptr, analysis, .release);
}
pub fn setBranchHint(func: Func, ip: *InternPool, hint: std.builtin.BranchHint) void {
const extra_mutex = &ip.getLocal(func.tid).mutate.extra.mutex;
extra_mutex.lock();
@ -5975,7 +5964,7 @@ pub const FuncAnalysis = packed struct(u32) {
is_analyzed: bool,
branch_hint: std.builtin.BranchHint,
is_noinline: bool,
calls_or_awaits_errorable_fn: bool,
has_error_trace: bool,
/// True if this function has an inferred error set.
inferred_error_set: bool,
disable_instrumentation: bool,
@ -9007,7 +8996,7 @@ pub fn getFuncDecl(
.is_analyzed = false,
.branch_hint = .none,
.is_noinline = key.is_noinline,
.calls_or_awaits_errorable_fn = false,
.has_error_trace = false,
.inferred_error_set = false,
.disable_instrumentation = false,
},
@ -9116,7 +9105,7 @@ pub fn getFuncDeclIes(
.is_analyzed = false,
.branch_hint = .none,
.is_noinline = key.is_noinline,
.calls_or_awaits_errorable_fn = false,
.has_error_trace = false,
.inferred_error_set = true,
.disable_instrumentation = false,
},
@ -9312,7 +9301,7 @@ pub fn getFuncInstance(
.is_analyzed = false,
.branch_hint = .none,
.is_noinline = arg.is_noinline,
.calls_or_awaits_errorable_fn = false,
.has_error_trace = false,
.inferred_error_set = false,
.disable_instrumentation = false,
},
@ -9410,7 +9399,7 @@ pub fn getFuncInstanceIes(
.is_analyzed = false,
.branch_hint = .none,
.is_noinline = arg.is_noinline,
.calls_or_awaits_errorable_fn = false,
.has_error_trace = false,
.inferred_error_set = true,
.disable_instrumentation = false,
},
@ -12174,7 +12163,7 @@ pub fn funcAnalysisUnordered(ip: *const InternPool, func: Index) FuncAnalysis {
return @atomicLoad(FuncAnalysis, ip.funcAnalysisPtr(func), .unordered);
}
pub fn funcSetCallsOrAwaitsErrorableFn(ip: *InternPool, func: Index) void {
pub fn funcSetHasErrorTrace(ip: *InternPool, func: Index, has_error_trace: bool) void {
const unwrapped_func = func.unwrap(ip);
const extra_mutex = &ip.getLocal(unwrapped_func.tid).mutate.extra.mutex;
extra_mutex.lock();
@ -12182,7 +12171,7 @@ pub fn funcSetCallsOrAwaitsErrorableFn(ip: *InternPool, func: Index) void {
const analysis_ptr = ip.funcAnalysisPtr(func);
var analysis = analysis_ptr.*;
analysis.calls_or_awaits_errorable_fn = true;
analysis.has_error_trace = has_error_trace;
@atomicStore(FuncAnalysis, analysis_ptr, analysis, .release);
}

View File

@ -7198,14 +7198,6 @@ fn zirCall(
const call_dbg_node: Zir.Inst.Index = @enumFromInt(@intFromEnum(inst) - 1);
const call_inst = try sema.analyzeCall(block, func, func_ty, callee_src, call_src, modifier, ensure_result_used, args_info, call_dbg_node, .call);
switch (sema.owner.unwrap()) {
.@"comptime", .type, .memoized_state, .nav_ty, .nav_val => input_is_error = false,
.func => |owner_func| if (!zcu.intern_pool.funcAnalysisUnordered(owner_func).calls_or_awaits_errorable_fn) {
// No errorable fn actually called; we have no error return trace
input_is_error = false;
},
}
if (block.ownerModule().error_tracing and
!block.isComptime() and !block.is_typeof and (input_is_error or pop_error_return_trace))
{
@ -7872,6 +7864,12 @@ fn analyzeCall(
}
break :msg msg;
});
if (func_ty_info.cc == .auto) {
switch (sema.owner.unwrap()) {
.@"comptime", .nav_ty, .nav_val, .type, .memoized_state => {},
.func => |owner_func| ip.funcSetHasErrorTrace(owner_func, true),
}
}
for (args, 0..) |arg, arg_idx| {
try sema.validateRuntimeValue(block, args_info.argSrc(block, arg_idx), arg);
}
@ -7946,13 +7944,6 @@ fn analyzeCall(
try zcu.ensureFuncBodyAnalysisQueued(runtime_func_val.toIntern());
}
switch (sema.owner.unwrap()) {
.@"comptime", .nav_ty, .nav_val, .type, .memoized_state => {},
.func => |owner_func| if (resolved_ret_ty.isError(zcu)) {
ip.funcSetCallsOrAwaitsErrorableFn(owner_func);
},
}
const call_tag: Air.Inst.Tag = switch (modifier) {
.auto, .no_async => .call,
.never_tail => .call_never_tail,
@ -19706,16 +19697,16 @@ fn retWithErrTracing(
.bool_false => false,
else => true,
};
// This means we're returning something that might be an error!
// This should only be possible with the `auto` cc, so we definitely have an error trace.
assert(pt.zcu.intern_pool.funcAnalysisUnordered(sema.owner.unwrap().func).has_error_trace);
const gpa = sema.gpa;
const stack_trace_ty = try sema.getBuiltinType(src, .StackTrace);
try stack_trace_ty.resolveFields(pt);
const ptr_stack_trace_ty = try pt.singleMutPtrType(stack_trace_ty);
const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty);
const return_err_fn = Air.internedToRef(try sema.getBuiltin(src, .returnError));
const args: [1]Air.Inst.Ref = .{err_return_trace};
if (!need_check) {
try sema.callBuiltin(block, src, return_err_fn, .never_inline, &args, .@"error return");
try sema.callBuiltin(block, src, return_err_fn, .never_inline, &.{}, .@"error return");
_ = try block.addUnOp(ret_tag, operand);
return;
}
@ -19726,7 +19717,7 @@ fn retWithErrTracing(
var else_block = block.makeSubBlock();
defer else_block.instructions.deinit(gpa);
try sema.callBuiltin(&else_block, src, return_err_fn, .never_inline, &args, .@"error return");
try sema.callBuiltin(&else_block, src, return_err_fn, .never_inline, &.{}, .@"error return");
_ = try else_block.addUnOp(ret_tag, operand);
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.CondBr).@"struct".fields.len +
@ -19837,7 +19828,7 @@ fn restoreErrRetIndex(sema: *Sema, start_block: *Block, src: LazySrcLoc, target_
return;
}
if (!zcu.intern_pool.funcAnalysisUnordered(sema.owner.unwrap().func).calls_or_awaits_errorable_fn) return;
if (!zcu.intern_pool.funcAnalysisUnordered(sema.owner.unwrap().func).has_error_trace) return;
if (!start_block.ownerModule().error_tracing) return;
assert(saved_index != .none); // The .error_return_trace_index field was dropped somewhere
@ -21123,7 +21114,7 @@ fn getErrorReturnTrace(sema: *Sema, block: *Block) CompileError!Air.Inst.Ref {
const opt_ptr_stack_trace_ty = try pt.optionalType(ptr_stack_trace_ty.toIntern());
switch (sema.owner.unwrap()) {
.func => |func| if (ip.funcAnalysisUnordered(func).calls_or_awaits_errorable_fn and block.ownerModule().error_tracing) {
.func => |func| if (ip.funcAnalysisUnordered(func).has_error_trace and block.ownerModule().error_tracing) {
return block.addTy(.err_return_trace, opt_ptr_stack_trace_ty);
},
.@"comptime", .nav_ty, .nav_val, .type, .memoized_state => {},
@ -27096,6 +27087,10 @@ fn preparePanicId(sema: *Sema, src: LazySrcLoc, panic_id: Zcu.PanicId) !InternPo
const zcu = sema.pt.zcu;
try sema.ensureMemoizedStateResolved(src, .panic);
try zcu.ensureFuncBodyAnalysisQueued(zcu.builtin_decl_values.get(.@"Panic.call"));
switch (sema.owner.unwrap()) {
.@"comptime", .nav_ty, .nav_val, .type, .memoized_state => {},
.func => |owner_func| zcu.intern_pool.funcSetHasErrorTrace(owner_func, true),
}
return zcu.builtin_decl_values.get(panic_id.toBuiltin());
}

View File

@ -2596,7 +2596,7 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
}
// reset in case calls to errorable functions are removed.
func.setCallsOrAwaitsErrorableFn(ip, false);
ip.funcSetHasErrorTrace(func_index, fn_ty_info.cc == .auto);
// First few indexes of extra are reserved and set at the end.
const reserved_count = @typeInfo(Air.ExtraIndex).@"enum".fields.len;
@ -2707,11 +2707,9 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
func.setBranchHint(ip, sema.branch_hint orelse .none);
// If we don't get an error return trace from a caller, create our own.
if (func.analysisUnordered(ip).calls_or_awaits_errorable_fn and
zcu.comp.config.any_error_tracing and
!sema.fn_ret_ty.isError(zcu))
{
if (zcu.comp.config.any_error_tracing and func.analysisUnordered(ip).has_error_trace and fn_ty_info.cc != .auto) {
// We're using an error trace, but didn't start out with one from the caller.
// We'll have to create it at the start of the function.
sema.setupErrorReturnTrace(&inner_block, last_arg_index) catch |err| switch (err) {
error.ComptimeReturn => unreachable,
error.ComptimeBreak => unreachable,

View File

@ -33,6 +33,8 @@ const FrameIndex = bits.FrameIndex;
const InnerError = codegen.CodeGenError || error{OutOfRegisters};
const err_ret_trace_index: Air.Inst.Index = @enumFromInt(std.math.maxInt(u32));
gpa: Allocator,
pt: Zcu.PerThread,
air: Air,
@ -55,6 +57,7 @@ va_info: union {
win64: struct {},
},
ret_mcv: InstTracking,
err_ret_trace_reg: Register,
fn_type: Type,
src_loc: Zcu.LazySrcLoc,
@ -626,6 +629,7 @@ const InstTracking = struct {
switch (self.long) {
.none => self.long = try cg.allocRegOrMem(inst, false),
.load_frame => {},
.lea_frame => return,
.reserved_frame => |index| self.long = .{ .load_frame = .{ .index = index } },
else => unreachable,
}
@ -887,6 +891,7 @@ pub fn generate(
.args = undefined, // populated after `resolveCallingConventionValues`
.va_info = undefined, // populated after `resolveCallingConventionValues`
.ret_mcv = undefined, // populated after `resolveCallingConventionValues`
.err_ret_trace_reg = undefined, // populated after `resolveCallingConventionValues`
.fn_type = fn_type,
.src_loc = src_loc,
.end_di_line = func.rbrace_line,
@ -935,6 +940,7 @@ pub fn generate(
function.args = call_info.args;
function.ret_mcv = call_info.return_value;
function.err_ret_trace_reg = call_info.err_ret_trace_reg;
function.frame_allocs.set(@intFromEnum(FrameIndex.ret_addr), .init(.{
.size = Type.usize.abiSize(zcu),
.alignment = Type.usize.abiAlignment(zcu).min(call_info.stack_align),
@ -962,6 +968,14 @@ pub fn generate(
} },
.x86_64_win => .{ .win64 = .{} },
};
if (call_info.err_ret_trace_reg != .none) {
function.register_manager.getRegAssumeFree(call_info.err_ret_trace_reg, err_ret_trace_index);
try function.inst_tracking.putNoClobber(
gpa,
err_ret_trace_index,
.init(.{ .register = call_info.err_ret_trace_reg }),
);
}
function.gen() catch |err| switch (err) {
error.CodegenFail => return error.CodegenFail,
@ -1042,6 +1056,7 @@ pub fn generateLazy(
.args = undefined,
.va_info = undefined,
.ret_mcv = undefined,
.err_ret_trace_reg = undefined,
.fn_type = undefined,
.src_loc = src_loc,
.end_di_line = undefined, // no debug info yet
@ -2503,9 +2518,6 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void {
.optional_payload => try cg.airOptionalPayload(inst),
.unwrap_errunion_err => try cg.airUnwrapErrUnionErr(inst),
.unwrap_errunion_payload => try cg.airUnwrapErrUnionPayload(inst),
.err_return_trace => try cg.airErrReturnTrace(inst),
.set_err_return_trace => try cg.airSetErrReturnTrace(inst),
.save_err_return_trace_index=> try cg.airSaveErrReturnTraceIndex(inst),
.wrap_optional => try cg.airWrapOptional(inst),
.wrap_errunion_payload => try cg.airWrapErrUnionPayload(inst),
@ -11236,12 +11248,46 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void {
.wasm_memory_size => unreachable,
.wasm_memory_grow => unreachable,
.err_return_trace => {
const ert: Temp = .{ .index = err_ret_trace_index };
try ert.moveTo(inst, cg);
},
.set_err_return_trace => {
const un_op = air_datas[@intFromEnum(inst)].un_op;
var ops = try cg.tempsFromOperands(inst, .{un_op});
switch (ops[0].unwrap(cg)) {
.ref => {
const result = try cg.allocRegOrMem(err_ret_trace_index, true);
try cg.genCopy(.usize, result, ops[0].tracking(cg).short, .{});
tracking_log.debug("{} => {} (birth)", .{ err_ret_trace_index, result });
cg.inst_tracking.putAssumeCapacityNoClobber(err_ret_trace_index, .init(result));
},
.temp => |temp_index| {
const temp_tracking = temp_index.tracking(cg);
tracking_log.debug("{} => {} (birth)", .{ err_ret_trace_index, temp_tracking.short });
cg.inst_tracking.putAssumeCapacityNoClobber(err_ret_trace_index, temp_tracking.*);
assert(cg.reuseTemp(err_ret_trace_index, temp_index.toIndex(), temp_tracking));
},
.err_ret_trace => unreachable,
}
},
.addrspace_cast => {
const ty_op = air_datas[@intFromEnum(inst)].ty_op;
var ops = try cg.tempsFromOperands(inst, .{ty_op.operand});
try ops[0].moveTo(inst, cg);
},
.save_err_return_trace_index => {
const ty_pl = air_datas[@intFromEnum(inst)].ty_pl;
const agg_ty = ty_pl.ty.toType();
assert(agg_ty.containerLayout(zcu) != .@"packed");
var ert: Temp = .{ .index = err_ret_trace_index };
var res = try ert.load(.usize, .{ .disp = @intCast(agg_ty.structFieldOffset(ty_pl.payload, zcu)) }, cg);
try ert.die(cg);
try res.moveTo(inst, cg);
},
.vector_store_elem => return cg.fail("TODO implement vector_store_elem", .{}),
.c_va_arg => try cg.airVaArg(inst),
@ -11697,7 +11743,7 @@ fn restoreState(self: *CodeGen, state: State, deaths: []const Air.Inst.Index, co
const target_maybe_inst = if (state.free_registers.isSet(reg_index)) null else target_slot;
if (std.debug.runtime_safety) if (target_maybe_inst) |target_inst|
assert(self.inst_tracking.getIndex(target_inst).? < state.inst_tracking_len);
if (opts.emit_instructions) {
if (opts.emit_instructions and current_maybe_inst != target_maybe_inst) {
if (current_maybe_inst) |current_inst|
try self.inst_tracking.getPtr(current_inst).?.spill(self, current_inst);
if (target_maybe_inst) |target_inst|
@ -11709,7 +11755,7 @@ fn restoreState(self: *CodeGen, state: State, deaths: []const Air.Inst.Index, co
self.register_manager.freeRegIndex(reg_index);
}
if (target_maybe_inst) |target_inst| {
self.register_manager.getRegIndexAssumeFree(reg_index, target_maybe_inst);
self.register_manager.getRegIndexAssumeFree(reg_index, target_inst);
self.inst_tracking.getPtr(target_inst).?.trackMaterialize(target_inst, reg_tracking);
}
} else if (target_maybe_inst) |_|
@ -11750,9 +11796,10 @@ pub fn spillEflagsIfOccupied(self: *CodeGen) !void {
}
}
pub fn spillCallerPreservedRegs(self: *CodeGen, cc: std.builtin.CallingConvention.Tag) !void {
pub fn spillCallerPreservedRegs(self: *CodeGen, cc: std.builtin.CallingConvention.Tag, ignore_reg: Register) !void {
switch (cc) {
inline .auto, .x86_64_sysv, .x86_64_win => |tag| try self.spillRegisters(abi.getCallerPreservedRegs(tag)),
inline .auto, .x86_64_sysv, .x86_64_win => |tag| inline for (comptime abi.getCallerPreservedRegs(tag)) |reg|
if (reg != ignore_reg) try self.register_manager.getKnownReg(reg, null),
else => unreachable,
}
}
@ -14406,22 +14453,6 @@ fn genUnwrapErrUnionPayloadPtrMir(
return result;
}
fn airErrReturnTrace(self: *CodeGen, inst: Air.Inst.Index) !void {
_ = inst;
return self.fail("TODO implement airErrReturnTrace for {}", .{self.target.cpu.arch});
//return self.finishAir(inst, result, .{ .none, .none, .none });
}
fn airSetErrReturnTrace(self: *CodeGen, inst: Air.Inst.Index) !void {
_ = inst;
return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch});
}
fn airSaveErrReturnTraceIndex(self: *CodeGen, inst: Air.Inst.Index) !void {
_ = inst;
return self.fail("TODO implement airSaveErrReturnTraceIndex for {}", .{self.target.cpu.arch});
}
fn airWrapOptional(self: *CodeGen, inst: Air.Inst.Index) !void {
const pt = self.pt;
const zcu = pt.zcu;
@ -21188,7 +21219,7 @@ fn genCall(self: *CodeGen, info: union(enum) {
}
try self.spillEflagsIfOccupied();
try self.spillCallerPreservedRegs(fn_info.cc);
try self.spillCallerPreservedRegs(fn_info.cc, call_info.err_ret_trace_reg);
// set stack arguments first because this can clobber registers
// also clobber spill arguments as we go
@ -21273,6 +21304,24 @@ fn genCall(self: *CodeGen, info: union(enum) {
else => unreachable,
};
if (call_info.err_ret_trace_reg != .none) {
if (self.inst_tracking.getPtr(err_ret_trace_index)) |err_ret_trace| {
if (switch (err_ret_trace.short) {
.register => |reg| call_info.err_ret_trace_reg != reg,
else => true,
}) {
try self.register_manager.getReg(call_info.err_ret_trace_reg, err_ret_trace_index);
try reg_locks.append(self.register_manager.lockReg(call_info.err_ret_trace_reg));
try self.genSetReg(call_info.err_ret_trace_reg, .usize, err_ret_trace.short, .{});
err_ret_trace.trackMaterialize(err_ret_trace_index, .{
.long = err_ret_trace.long,
.short = .{ .register = call_info.err_ret_trace_reg },
});
}
}
}
// now we are free to set register arguments
switch (call_info.return_value.long) {
.none, .unreach => {},
@ -21447,6 +21496,17 @@ fn airRet(self: *CodeGen, inst: Air.Inst.Index, safety: bool) !void {
else => unreachable,
}
self.ret_mcv.liveOut(self, inst);
if (self.err_ret_trace_reg != .none) {
if (self.inst_tracking.getPtr(err_ret_trace_index)) |err_ret_trace| {
if (switch (err_ret_trace.short) {
.register => |reg| self.err_ret_trace_reg != reg,
else => true,
}) try self.genSetReg(self.err_ret_trace_reg, .usize, err_ret_trace.short, .{});
err_ret_trace.liveOut(self, err_ret_trace_index);
}
}
try self.finishAir(inst, .unreach, .{ un_op, .none, .none });
// TODO optimization opportunity: figure out when we can emit this as a 2 byte instruction
@ -21467,6 +21527,17 @@ fn airRetLoad(self: *CodeGen, inst: Air.Inst.Index) !void {
else => unreachable,
}
self.ret_mcv.liveOut(self, inst);
if (self.err_ret_trace_reg != .none) {
if (self.inst_tracking.getPtr(err_ret_trace_index)) |err_ret_trace| {
if (switch (err_ret_trace.short) {
.register => |reg| self.err_ret_trace_reg != reg,
else => true,
}) try self.genSetReg(self.err_ret_trace_reg, .usize, err_ret_trace.short, .{});
err_ret_trace.liveOut(self, err_ret_trace_index);
}
}
try self.finishAir(inst, .unreach, .{ un_op, .none, .none });
// TODO optimization opportunity: figure out when we can emit this as a 2 byte instruction
@ -26098,8 +26169,13 @@ fn airTagName(self: *CodeGen, inst: Air.Inst.Index) !void {
stack_frame_align.* = stack_frame_align.max(needed_call_frame.abi_align);
}
const err_ret_trace_reg = if (zcu.comp.config.any_error_tracing) err_ret_trace_reg: {
const param_gpr = abi.getCAbiIntParamRegs(.auto);
break :err_ret_trace_reg param_gpr[param_gpr.len - 1];
} else .none;
try self.spillEflagsIfOccupied();
try self.spillCallerPreservedRegs(.auto);
try self.spillCallerPreservedRegs(.auto, err_ret_trace_reg);
const param_regs = abi.getCAbiIntParamRegs(.auto);
@ -28564,6 +28640,7 @@ const CallMCValues = struct {
stack_align: InternPool.Alignment,
gp_count: u32,
fp_count: u32,
err_ret_trace_reg: Register,
fn deinit(self: *CallMCValues, func: *CodeGen) void {
func.gpa.free(self.args);
@ -28598,6 +28675,7 @@ fn resolveCallingConventionValues(
.stack_align = undefined,
.gp_count = 0,
.fp_count = 0,
.err_ret_trace_reg = .none,
};
errdefer self.gpa.free(result.args);
@ -28842,6 +28920,11 @@ fn resolveCallingConventionValues(
var param_x87 = abi.getCAbiX87ParamRegs(cc);
var param_sse = abi.getCAbiSseParamRegs(cc, self.target);
if (zcu.comp.config.any_error_tracing) {
result.err_ret_trace_reg = param_gpr[param_gpr.len - 1];
param_gpr = param_gpr[0 .. param_gpr.len - 1];
}
// Return values
result.return_value = if (ret_ty.isNoReturn(zcu))
.init(.unreach)
@ -29159,16 +29242,8 @@ fn typeOf(self: *CodeGen, inst: Air.Inst.Ref) Type {
}
fn typeOfIndex(self: *CodeGen, inst: Air.Inst.Index) Type {
const pt = self.pt;
const zcu = pt.zcu;
const temp: Temp = .{ .index = inst };
return switch (temp.unwrap(self)) {
.ref => switch (self.air.instructions.items(.tag)[@intFromEnum(inst)]) {
.loop_switch_br => self.typeOf(self.air.unwrapSwitch(inst).operand),
else => self.air.typeOfIndex(inst, &zcu.intern_pool),
},
.temp => temp.typeOf(self),
};
return temp.typeOf(self);
}
fn intCompilerRtAbiName(int_bits: u32) u8 {
@ -29336,10 +29411,12 @@ const Temp = struct {
fn unwrap(temp: Temp, cg: *CodeGen) union(enum) {
ref: Air.Inst.Ref,
temp: Index,
err_ret_trace,
} {
switch (temp.index.unwrap()) {
.ref => |ref| return .{ .ref = ref },
.target => |target_index| {
if (temp.index == err_ret_trace_index) return .err_ret_trace;
const temp_index: Index = @enumFromInt(target_index);
assert(temp_index.isValid(cg));
return .{ .temp = temp_index };
@ -29349,14 +29426,18 @@ const Temp = struct {
fn typeOf(temp: Temp, cg: *CodeGen) Type {
return switch (temp.unwrap(cg)) {
.ref => |ref| cg.typeOf(ref),
.ref => switch (cg.air.instructions.items(.tag)[@intFromEnum(temp.index)]) {
.loop_switch_br => cg.typeOf(cg.air.unwrapSwitch(temp.index).operand),
else => cg.air.typeOfIndex(temp.index, &cg.pt.zcu.intern_pool),
},
.temp => |temp_index| temp_index.typeOf(cg),
.err_ret_trace => .usize,
};
}
fn isMut(temp: Temp, cg: *CodeGen) bool {
return switch (temp.unwrap(cg)) {
.ref => false,
.ref, .err_ret_trace => false,
.temp => |temp_index| switch (temp_index.tracking(cg).short) {
.none,
.unreach,
@ -29456,7 +29537,7 @@ const Temp = struct {
fn toOffset(temp: *Temp, off: i32, cg: *CodeGen) !void {
if (off == 0) return;
switch (temp.unwrap(cg)) {
.ref => {},
.ref, .err_ret_trace => {},
.temp => |temp_index| {
const temp_tracking = temp_index.tracking(cg);
switch (temp_tracking.short) {
@ -29617,6 +29698,7 @@ const Temp = struct {
},
}
},
.err_ret_trace => unreachable,
}
const new_temp = try temp.getLimb(limb_ty, limb_index, cg);
try temp.die(cg);
@ -29633,7 +29715,7 @@ const Temp = struct {
}
fn toReg(temp: *Temp, new_reg: Register, cg: *CodeGen) !bool {
const val, const ty = val_ty: switch (temp.unwrap(cg)) {
const val, const ty: Type = val_ty: switch (temp.unwrap(cg)) {
.ref => |ref| .{ temp.tracking(cg).short, cg.typeOf(ref) },
.temp => |temp_index| {
const temp_tracking = temp_index.tracking(cg);
@ -29641,6 +29723,7 @@ const Temp = struct {
temp_tracking.short.register == new_reg) return false;
break :val_ty .{ temp_tracking.short, temp_index.typeOf(cg) };
},
.err_ret_trace => .{ temp.tracking(cg).short, .usize },
};
const new_temp_index = cg.next_temp_index;
try cg.register_manager.getReg(new_reg, new_temp_index.toIndex());
@ -30167,7 +30250,7 @@ const Temp = struct {
fn moveTo(temp: Temp, inst: Air.Inst.Index, cg: *CodeGen) !void {
if (cg.liveness.isUnused(inst)) try temp.die(cg) else switch (temp.unwrap(cg)) {
.ref => {
.ref, .err_ret_trace => {
const result = try cg.allocRegOrMem(inst, true);
try cg.genCopy(cg.typeOfIndex(inst), result, temp.tracking(cg).short, .{});
tracking_log.debug("{} => {} (birth)", .{ inst, result });
@ -30184,7 +30267,7 @@ const Temp = struct {
fn die(temp: Temp, cg: *CodeGen) !void {
switch (temp.unwrap(cg)) {
.ref => {},
.ref, .err_ret_trace => {},
.temp => |temp_index| try temp_index.tracking(cg).die(cg, temp_index.toIndex()),
}
}

View File

@ -1497,8 +1497,7 @@ pub const Object = struct {
.unsigned => try attributes.addRetAttr(.zeroext, &o.builder),
};
const err_return_tracing = Type.fromInterned(fn_info.return_type).isError(zcu) and
comp.config.any_error_tracing;
const err_return_tracing = fn_info.cc == .auto and comp.config.any_error_tracing;
const err_ret_trace: Builder.Value = if (err_return_tracing) param: {
const param = wip.arg(llvm_arg_i);
@ -2805,9 +2804,7 @@ pub const Object = struct {
debug_param_types.appendAssumeCapacity(try o.lowerDebugType(Type.void));
}
if (Type.fromInterned(fn_info.return_type).isError(zcu) and
zcu.comp.config.any_error_tracing)
{
if (fn_info.cc == .auto and zcu.comp.config.any_error_tracing) {
const ptr_ty = try pt.singleMutPtrType(try o.getStackTraceType());
debug_param_types.appendAssumeCapacity(try o.lowerDebugType(ptr_ty));
}
@ -2970,8 +2967,7 @@ pub const Object = struct {
llvm_arg_i += 1;
}
const err_return_tracing = Type.fromInterned(fn_info.return_type).isError(zcu) and
zcu.comp.config.any_error_tracing;
const err_return_tracing = fn_info.cc == .auto and zcu.comp.config.any_error_tracing;
if (err_return_tracing) {
try attributes.addParamAttr(llvm_arg_i, .nonnull, &o.builder);
@ -3736,9 +3732,7 @@ pub const Object = struct {
try llvm_params.append(o.gpa, .ptr);
}
if (Type.fromInterned(fn_info.return_type).isError(zcu) and
zcu.comp.config.any_error_tracing)
{
if (fn_info.cc == .auto and zcu.comp.config.any_error_tracing) {
const ptr_ty = try pt.singleMutPtrType(try o.getStackTraceType());
try llvm_params.append(o.gpa, try o.lowerType(ptr_ty));
}
@ -5483,7 +5477,7 @@ pub const FuncGen = struct {
break :blk ret_ptr;
};
const err_return_tracing = return_type.isError(zcu) and zcu.comp.config.any_error_tracing;
const err_return_tracing = fn_info.cc == .auto and zcu.comp.config.any_error_tracing;
if (err_return_tracing) {
assert(self.err_ret_trace != .none);
try llvm_args.append(self.err_ret_trace);
@ -5762,6 +5756,8 @@ pub const FuncGen = struct {
const panic_nav = ip.getNav(panic_func.owner_nav);
const fn_info = zcu.typeToFunc(Type.fromInterned(panic_nav.typeOf(ip))).?;
const panic_global = try o.resolveLlvmFunction(panic_func.owner_nav);
const has_err_trace = zcu.comp.config.any_error_tracing and fn_info.cc == .auto;
if (has_err_trace) assert(fg.err_ret_trace != .none);
_ = try fg.wip.callIntrinsicAssumeCold();
_ = try fg.wip.call(
.normal,
@ -5769,7 +5765,13 @@ pub const FuncGen = struct {
.none,
panic_global.typeOf(&o.builder),
panic_global.toValue(&o.builder),
&.{
if (has_err_trace) &.{
fg.err_ret_trace,
msg_ptr.toValue(),
try o.builder.intValue(llvm_usize, msg_len),
try o.builder.nullValue(.ptr),
null_opt_addr_global.toValue(),
} else &.{
msg_ptr.toValue(),
try o.builder.intValue(llvm_usize, msg_len),
try o.builder.nullValue(.ptr),

View File

@ -708,7 +708,7 @@ pub inline fn backendSupportsFeature(backend: std.builtin.CompilerBackend, compt
else => false,
},
.error_return_trace => switch (backend) {
.stage2_llvm => true,
.stage2_llvm, .stage2_x86_64 => true,
else => false,
},
.is_named_enum_value => switch (backend) {

Binary file not shown.

View File

@ -15,6 +15,6 @@ pub export fn entry() void {
// error
//
// :7:25: error: unable to resolve comptime value
// :7:25: note: initializer of comptime-only struct 'tmp.S.foo__anon_165.C' must be comptime-known
// :7:25: note: initializer of comptime-only struct 'tmp.S.foo__anon_441.C' must be comptime-known
// :4:16: note: struct requires comptime because of this field
// :4:16: note: types are not available at runtime

View File

@ -16,5 +16,5 @@ pub export fn entry2() void {
//
// :3:6: error: no field or member function named 'copy' in '[]const u8'
// :9:8: error: no field or member function named 'bar' in '@TypeOf(.{})'
// :12:18: error: no field or member function named 'bar' in 'tmp.entry2__struct_169'
// :12:18: error: no field or member function named 'bar' in 'tmp.entry2__struct_445'
// :12:6: note: struct declared here

View File

@ -6,6 +6,6 @@ export fn foo() void {
// error
//
// :4:16: error: expected type 'tmp.T', found 'tmp.foo__struct_158'
// :4:16: error: expected type 'tmp.T', found 'tmp.foo__struct_434'
// :3:16: note: struct declared here
// :1:11: note: struct declared here

View File

@ -12,5 +12,5 @@ fn foo(set1: Set1) void {
// backend=stage2
// target=native
//
// :7:21: error: expected type 'error{A,C}', found 'error{A,B}'
// :7:21: error: expected type 'error{C,A}', found 'error{A,B}'
// :7:21: note: 'error.B' not a member of destination error set