From 5316a00a188955d60cc38d56def51b8605181225 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 12 Sep 2022 20:41:25 +0300 Subject: [PATCH 01/12] stage2: properly reset error return trace index --- src/AstGen.zig | 41 +++++++++++++++++++++++++++++++++-- src/Sema.zig | 55 +++++++++++++++++++++++++++++++++++++++++++++-- src/Zir.zig | 16 ++++++++++++++ src/print_zir.zig | 4 +++- 4 files changed, 111 insertions(+), 5 deletions(-) diff --git a/src/AstGen.zig b/src/AstGen.zig index d04608b300..2ef6a0aedc 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -2471,6 +2471,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As .try_ptr, //.try_inline, //.try_ptr_inline, + .save_err_ret_index, => break :b false, .extended => switch (gz.astgen.instructions.items(.data)[inst].extended.opcode) { @@ -2533,6 +2534,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As .validate_array_init_ty, .validate_struct_init_ty, .validate_deref, + .restore_err_ret_index, => break :b true, .@"defer" => unreachable, @@ -5152,10 +5154,16 @@ fn orelseCatchExpr( const astgen = parent_gz.astgen; const tree = astgen.tree; + const do_err_trace = astgen.fn_block != null and (cond_op == .is_non_err or cond_op == .is_non_err_ptr); + var block_scope = parent_gz.makeSubBlock(scope); block_scope.setBreakResultLoc(rl); defer block_scope.unstack(); + if (do_err_trace) { + block_scope.saved_err_trace_index = try parent_gz.addNode(.save_err_ret_index, node); + } + const operand_rl: ResultLoc = switch (block_scope.break_result_loc) { .ref => .ref, else => .none, @@ -5220,7 +5228,7 @@ fn orelseCatchExpr( // instructions or not. const break_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .break_inline else .@"break"; - return finishThenElseBlock( + const result = try finishThenElseBlock( parent_gz, rl, node, @@ -5235,6 +5243,16 @@ fn orelseCatchExpr( block, break_tag, ); + if (do_err_trace) { + _ = try parent_gz.add(.{ + .tag = .restore_err_ret_index, + .data = .{ .un_node = .{ + .operand = parent_gz.saved_err_trace_index, + .src_node = parent_gz.nodeIndexToRelative(node), + } }, + }); + } + return result; } /// Supports `else_scope` stacked on `then_scope` stacked on `block_scope`. Unstacks `else_scope` then `then_scope`. @@ -5430,10 +5448,16 @@ fn ifExpr( const tree = astgen.tree; const token_tags = tree.tokens.items(.tag); + const do_err_trace = astgen.fn_block != null and if_full.error_token != null; + var block_scope = parent_gz.makeSubBlock(scope); block_scope.setBreakResultLoc(rl); defer block_scope.unstack(); + if (do_err_trace) { + block_scope.saved_err_trace_index = try parent_gz.addNode(.save_err_ret_index, node); + } + const payload_is_ref = if (if_full.payload_token) |payload_token| token_tags[payload_token] == .asterisk else @@ -5602,7 +5626,7 @@ fn ifExpr( }; const break_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .break_inline else .@"break"; - return finishThenElseBlock( + const result = try finishThenElseBlock( parent_gz, rl, node, @@ -5617,6 +5641,16 @@ fn ifExpr( block, break_tag, ); + if (do_err_trace) { + _ = try parent_gz.add(.{ + .tag = .restore_err_ret_index, + .data = .{ .un_node = .{ + .operand = parent_gz.saved_err_trace_index, + .src_node = parent_gz.nodeIndexToRelative(node), + } }, + }); + } + return result; } /// Supports `else_scope` stacked on `then_scope`. Unstacks `else_scope` then `then_scope`. @@ -10300,6 +10334,8 @@ const GenZir = struct { /// Keys are the raw instruction index, values are the closure_capture instruction. captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{}, + saved_err_trace_index: Zir.Inst.Ref = .none, + const unstacked_top = std.math.maxInt(usize); /// Call unstack before adding any new instructions to containing GenZir. fn unstack(self: *GenZir) void { @@ -10344,6 +10380,7 @@ const GenZir = struct { .any_defer_node = gz.any_defer_node, .instructions = gz.instructions, .instructions_top = gz.instructions.items.len, + .saved_err_trace_index = gz.saved_err_trace_index, }; } diff --git a/src/Sema.zig b/src/Sema.zig index 972a19de4f..28a012bfb1 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -926,6 +926,8 @@ fn analyzeBodyInner( .ret_ptr => try sema.zirRetPtr(block, inst), .ret_type => try sema.addType(sema.fn_ret_ty), + .save_err_ret_index => try sema.zirSaveErrRetIndex(block, inst), + // Instructions that we know to *always* be noreturn based solely on their tag. // These functions match the return type of analyzeBody so that we can // tail call them here. @@ -1208,6 +1210,11 @@ fn analyzeBodyInner( i += 1; continue; }, + .restore_err_ret_index => { + try sema.zirRestoreErrRetIndex(block, inst); + i += 1; + continue; + }, // Special case instructions to handle comptime control flow. .@"break" => { @@ -16176,6 +16183,52 @@ fn wantErrorReturnTracing(sema: *Sema, fn_ret_ty: Type) bool { backend_supports_error_return_tracing; } +fn zirSaveErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { + const inst_data = sema.code.instructions.items(.data)[inst].node; + const src = LazySrcLoc.nodeOffset(inst_data); + + // This is only relevant at runtime. + if (block.is_comptime) return Air.Inst.Ref.zero_usize; + + const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; + const ok = sema.owner_func.?.calls_or_awaits_errorable_fn and + sema.mod.comp.bin_file.options.error_return_tracing and + backend_supports_error_return_tracing; + if (!ok) return Air.Inst.Ref.zero_usize; + + const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace"); + const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty); + const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty); + const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty); + return sema.fieldVal(block, src, err_return_trace, "index", src); +} + +fn zirRestoreErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void { + const inst_data = sema.code.instructions.items(.data)[inst].un_node; + const src = inst_data.src(); + + // This is only relevant at runtime. + if (block.is_comptime) return; + + const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; + const ok = sema.owner_func.?.calls_or_awaits_errorable_fn and + sema.mod.comp.bin_file.options.error_return_tracing and + backend_supports_error_return_tracing; + if (!ok) return; + + const operand = if (inst_data.operand != .none) + try sema.resolveInst(inst_data.operand) + else + .zero_usize; + + const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace"); + const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty); + const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty); + const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty); + const field_ptr = try sema.structFieldPtr(block, src, err_return_trace, "index", src, stack_trace_ty, true); + try sema.storePtr2(block, src, field_ptr, src, operand, src, .store); +} + fn addToInferredErrorSet(sema: *Sema, uncasted_operand: Air.Inst.Ref) !void { assert(sema.fn_ret_ty.zigTypeTag() == .ErrorUnion); @@ -17181,8 +17234,6 @@ fn zirBoolToInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A fn zirErrorName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { const inst_data = sema.code.instructions.items(.data)[inst].un_node; - const src = inst_data.src(); - _ = src; const operand = try sema.resolveInst(inst_data.operand); const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node }; diff --git a/src/Zir.zig b/src/Zir.zig index 9881ee1617..f08d78f8f2 100644 --- a/src/Zir.zig +++ b/src/Zir.zig @@ -988,6 +988,15 @@ pub const Inst = struct { /// Uses the `err_defer_code` union field. defer_err_code, + /// Saves the current error return case if it exists, + /// otherwise just returns zero. + /// Uses the `node` union field. + save_err_ret_index, + /// Sets error return trace to zero if no operand is given, + /// otherwise sets the value to the given amount. + /// Uses the `un_node` union field. + restore_err_ret_index, + /// The ZIR instruction tag is one of the `Extended` ones. /// Uses the `extended` union field. extended, @@ -1236,6 +1245,8 @@ pub const Inst = struct { //.try_ptr_inline, .@"defer", .defer_err_code, + .save_err_ret_index, + .restore_err_ret_index, => false, .@"break", @@ -1305,6 +1316,7 @@ pub const Inst = struct { .check_comptime_control_flow, .@"defer", .defer_err_code, + .restore_err_ret_index, => true, .param, @@ -1530,6 +1542,7 @@ pub const Inst = struct { .try_ptr, //.try_inline, //.try_ptr_inline, + .save_err_ret_index, => false, .extended => switch (data.extended.opcode) { @@ -1810,6 +1823,9 @@ pub const Inst = struct { .@"defer" = .@"defer", .defer_err_code = .defer_err_code, + .save_err_ret_index = .node, + .restore_err_ret_index = .un_node, + .extended = .extended, }); }; diff --git a/src/print_zir.zig b/src/print_zir.zig index aab7444e08..5d336b5a73 100644 --- a/src/print_zir.zig +++ b/src/print_zir.zig @@ -232,6 +232,7 @@ const Writer = struct { .validate_deref, .overflow_arithmetic_ptr, .check_comptime_control_flow, + .restore_err_ret_index, => try self.writeUnNode(stream, inst), .ref, @@ -405,6 +406,7 @@ const Writer = struct { .alloc_inferred_comptime_mut, .ret_ptr, .ret_type, + .save_err_ret_index, => try self.writeNode(stream, inst), .error_value, @@ -440,7 +442,7 @@ const Writer = struct { .dbg_block_begin, .dbg_block_end, - => try stream.writeAll("))"), + => try stream.writeAll(")"), .closure_get => try self.writeInstNode(stream, inst), From eda3eb1561ec9a68e692821d5de71d03e6f50d42 Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Mon, 12 Sep 2022 23:05:50 -0700 Subject: [PATCH 02/12] stage2: "Pop" error trace for break/return within catch This implement trace "popping" for correctly handled errors within `catch { ... }` and `else { ... }` blocks. When breaking from these blocks with any non-error, we pop the error trace frames corresponding to the operand. When breaking with an error, we preserve the frames so that error traces "chain" together as usual. ```zig fn foo(cond1: bool, cond2: bool) !void { bar() catch { if (cond1) { // If baz() result is a non-error, pop the error trace frames from bar() // If baz() result is an error, leave the bar() frames on the error trace return baz(); } else if (cond2) { // If we break/return an error, then leave the error frames from bar() on the error trace return error.Foo; } }; // An error returned from here does not include bar()'s error frames in the trace return error.Bar; } ``` Notice that if foo() does not return an error it, it leaves no extra frames on the error trace. This is piece (1/3) of https://github.com/ziglang/zig/issues/1923#issuecomment-1218495574 --- src/AstGen.zig | 133 ++++++++++++++++++++++++------ src/Sema.zig | 12 ++- test/stack_traces.zig | 185 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 301 insertions(+), 29 deletions(-) diff --git a/src/AstGen.zig b/src/AstGen.zig index 2ef6a0aedc..dbd65519b0 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -1834,6 +1834,13 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn const break_label = node_datas[node].lhs; const rhs = node_datas[node].rhs; + // Breaking out of a `catch { ... }` or `else |err| { ... }` block with a non-error value + // means that the corresponding error was correctly handled, and the error trace index + // needs to be restored so that any entries from the caught error are effectively "popped" + // + // Note: We only restore for the outermost block, since that will "pop" any nested blocks. + var err_trace_index_to_restore: Zir.Inst.Ref = .none; + // Look for the label in the scope. var scope = parent_scope; while (true) { @@ -1842,6 +1849,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn const block_gz = scope.cast(GenZir).?; if (block_gz.cur_defer_node != 0) { + // We are breaking out of a `defer` block. return astgen.failNodeNotes(node, "cannot break out of defer expression", .{}, &.{ try astgen.errNoteNode( block_gz.cur_defer_node, @@ -1851,6 +1859,11 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn }); } + if (block_gz.saved_err_trace_index != .none) { + // We are breaking out of a `catch { ... }` or `else |err| { ... }`. + err_trace_index_to_restore = block_gz.saved_err_trace_index; + } + const block_inst = blk: { if (break_label != 0) { if (block_gz.label) |*label| { @@ -1862,9 +1875,11 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn } else if (block_gz.break_block != 0) { break :blk block_gz.break_block; } + // If not the target, start over with the parent scope = block_gz.parent; continue; }; + // If we made it here, this block is the target of the break expr const break_tag: Zir.Inst.Tag = if (block_gz.is_inline or block_gz.force_comptime) .break_inline @@ -1874,6 +1889,19 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn if (rhs == 0) { try genDefers(parent_gz, scope, parent_scope, .normal_only); + // As our last action before the break, "pop" the error trace if needed + if (err_trace_index_to_restore != .none) { + // TODO: error-liveness and is_non_err + + _ = try parent_gz.add(.{ + .tag = .restore_err_ret_index, + .data = .{ .un_node = .{ + .operand = err_trace_index_to_restore, + .src_node = parent_gz.nodeIndexToRelative(node), + } }, + }); + } + _ = try parent_gz.addBreak(break_tag, block_inst, .void_value); return Zir.Inst.Ref.unreachable_value; } @@ -1884,6 +1912,19 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn try genDefers(parent_gz, scope, parent_scope, .normal_only); + // As our last action before the break, "pop" the error trace if needed + if (err_trace_index_to_restore != .none) { + // TODO: error-liveness and is_non_err + + _ = try parent_gz.add(.{ + .tag = .restore_err_ret_index, + .data = .{ .un_node = .{ + .operand = err_trace_index_to_restore, + .src_node = parent_gz.nodeIndexToRelative(node), + } }, + }); + } + switch (block_gz.break_result_loc) { .block_ptr => { const br = try parent_gz.addBreak(break_tag, block_inst, operand); @@ -5160,9 +5201,7 @@ fn orelseCatchExpr( block_scope.setBreakResultLoc(rl); defer block_scope.unstack(); - if (do_err_trace) { - block_scope.saved_err_trace_index = try parent_gz.addNode(.save_err_ret_index, node); - } + const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none; const operand_rl: ResultLoc = switch (block_scope.break_result_loc) { .ref => .ref, @@ -5195,6 +5234,12 @@ fn orelseCatchExpr( var else_scope = block_scope.makeSubBlock(scope); defer else_scope.unstack(); + // Any break (of a non-error value) that navigates out of this scope means + // that the error was handled successfully, so this index will be restored. + else_scope.saved_err_trace_index = saved_err_trace_index; + if (else_scope.outermost_err_trace_index == .none) + else_scope.outermost_err_trace_index = saved_err_trace_index; + var err_val_scope: Scope.LocalVal = undefined; const else_sub_scope = blk: { const payload = payload_token orelse break :blk &else_scope.base; @@ -5220,6 +5265,17 @@ fn orelseCatchExpr( const else_result = try expr(&else_scope, else_sub_scope, block_scope.break_result_loc, rhs); if (!else_scope.endsWithNoReturn()) { block_scope.break_count += 1; + + // TODO: Add is_non_err and break check + if (do_err_trace) { + _ = try else_scope.add(.{ + .tag = .restore_err_ret_index, + .data = .{ .un_node = .{ + .operand = saved_err_trace_index, + .src_node = parent_gz.nodeIndexToRelative(node), + } }, + }); + } } try checkUsed(parent_gz, &else_scope.base, else_sub_scope); @@ -5243,15 +5299,6 @@ fn orelseCatchExpr( block, break_tag, ); - if (do_err_trace) { - _ = try parent_gz.add(.{ - .tag = .restore_err_ret_index, - .data = .{ .un_node = .{ - .operand = parent_gz.saved_err_trace_index, - .src_node = parent_gz.nodeIndexToRelative(node), - } }, - }); - } return result; } @@ -5454,9 +5501,7 @@ fn ifExpr( block_scope.setBreakResultLoc(rl); defer block_scope.unstack(); - if (do_err_trace) { - block_scope.saved_err_trace_index = try parent_gz.addNode(.save_err_ret_index, node); - } + const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none; const payload_is_ref = if (if_full.payload_token) |payload_token| token_tags[payload_token] == .asterisk @@ -5574,6 +5619,12 @@ fn ifExpr( var else_scope = parent_gz.makeSubBlock(scope); defer else_scope.unstack(); + // Any break (of a non-error value) that navigates out of this scope means + // that the error was handled successfully, so this index will be restored. + else_scope.saved_err_trace_index = saved_err_trace_index; + if (else_scope.outermost_err_trace_index == .none) + else_scope.outermost_err_trace_index = saved_err_trace_index; + const else_node = if_full.ast.else_expr; const else_info: struct { src: Ast.Node.Index, @@ -5625,6 +5676,18 @@ fn ifExpr( }, }; + if (do_err_trace and !else_scope.endsWithNoReturn()) { + // TODO: is_non_err and other checks + + _ = try else_scope.add(.{ + .tag = .restore_err_ret_index, + .data = .{ .un_node = .{ + .operand = saved_err_trace_index, + .src_node = parent_gz.nodeIndexToRelative(node), + } }, + }); + } + const break_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .break_inline else .@"break"; const result = try finishThenElseBlock( parent_gz, @@ -5641,15 +5704,6 @@ fn ifExpr( block, break_tag, ); - if (do_err_trace) { - _ = try parent_gz.add(.{ - .tag = .restore_err_ret_index, - .data = .{ .un_node = .{ - .operand = parent_gz.saved_err_trace_index, - .src_node = parent_gz.nodeIndexToRelative(node), - } }, - }); - } return result; } @@ -6780,11 +6834,24 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref const operand = try reachableExpr(gz, scope, rl, operand_node, node); gz.anon_name_strategy = prev_anon_name_strategy; + // TODO: This should be almost identical for every break/ret switch (nodeMayEvalToError(tree, operand_node)) { .never => { // Returning a value that cannot be an error; skip error defers. try genDefers(gz, defer_outer, scope, .normal_only); try emitDbgStmt(gz, ret_line, ret_column); + + // As our last action before the return, "pop" the error trace if needed + if (gz.outermost_err_trace_index != .none) { + _ = try gz.add(.{ + .tag = .restore_err_ret_index, + .data = .{ .un_node = .{ + .operand = gz.outermost_err_trace_index, + .src_node = gz.nodeIndexToRelative(node), + } }, + }); + } + try gz.addRet(rl, operand, node); return Zir.Inst.Ref.unreachable_value; }, @@ -6826,6 +6893,17 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref }; try genDefers(&else_scope, defer_outer, scope, which_ones); try emitDbgStmt(&else_scope, ret_line, ret_column); + + // As our last action before the return, "pop" the error trace if needed + if (else_scope.outermost_err_trace_index != .none) { + _ = try else_scope.add(.{ + .tag = .restore_err_ret_index, + .data = .{ .un_node = .{ + .operand = else_scope.outermost_err_trace_index, + .src_node = else_scope.nodeIndexToRelative(node), + } }, + }); + } try else_scope.addRet(rl, operand, node); try setCondBrPayload(condbr, is_non_err, &then_scope, 0, &else_scope, 0); @@ -10334,7 +10412,12 @@ const GenZir = struct { /// Keys are the raw instruction index, values are the closure_capture instruction. captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{}, + /// If this GenZir corresponds to a `catch { ... }` or `else |err| { ... }` block, + /// this err_trace_index can be restored to "pop" the trace entries for the block. saved_err_trace_index: Zir.Inst.Ref = .none, + /// When returning from a function with a non-error, we must pop all trace entries + /// from any containing `catch { ... }` or `else |err| { ... }` blocks. + outermost_err_trace_index: Zir.Inst.Ref = .none, const unstacked_top = std.math.maxInt(usize); /// Call unstack before adding any new instructions to containing GenZir. @@ -10380,7 +10463,7 @@ const GenZir = struct { .any_defer_node = gz.any_defer_node, .instructions = gz.instructions, .instructions_top = gz.instructions.items.len, - .saved_err_trace_index = gz.saved_err_trace_index, + .outermost_err_trace_index = gz.outermost_err_trace_index, }; } diff --git a/src/Sema.zig b/src/Sema.zig index 28a012bfb1..ce3a10fd38 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -16190,9 +16190,14 @@ fn zirSaveErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE // This is only relevant at runtime. if (block.is_comptime) return Air.Inst.Ref.zero_usize; + // In the corner case that `catch { ... }` or `else |err| { ... }` is used in a function + // that does *not* make any errorable calls, we still need an error trace to interact with + // the AIR instructions we've already emitted. + if (sema.owner_func != null) + sema.owner_func.?.calls_or_awaits_errorable_fn = true; + const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; - const ok = sema.owner_func.?.calls_or_awaits_errorable_fn and - sema.mod.comp.bin_file.options.error_return_tracing and + const ok = sema.mod.comp.bin_file.options.error_return_tracing and backend_supports_error_return_tracing; if (!ok) return Air.Inst.Ref.zero_usize; @@ -16211,8 +16216,7 @@ fn zirRestoreErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Compi if (block.is_comptime) return; const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; - const ok = sema.owner_func.?.calls_or_awaits_errorable_fn and - sema.mod.comp.bin_file.options.error_return_tracing and + const ok = sema.mod.comp.bin_file.options.error_return_tracing and backend_supports_error_return_tracing; if (!ok) return; diff --git a/test/stack_traces.zig b/test/stack_traces.zig index 3a8682b5a5..751b33b43e 100644 --- a/test/stack_traces.zig +++ b/test/stack_traces.zig @@ -98,6 +98,191 @@ pub fn addCases(cases: *tests.StackTracesContext) void { }, }); + cases.addCase(.{ + .name = "try return + handled catch/if-else", + .source = + \\fn foo() !void { + \\ return error.TheSkyIsFalling; + \\} + \\ + \\pub fn main() !void { + \\ foo() catch {}; // should not affect error trace + \\ if (foo()) |_| {} else |_| { + \\ // should also not affect error trace + \\ } + \\ try foo(); + \\} + , + .Debug = .{ + .expect = + \\error: TheSkyIsFalling + \\source.zig:2:5: [address] in foo (test) + \\ return error.TheSkyIsFalling; + \\ ^ + \\source.zig:10:5: [address] in main (test) + \\ try foo(); + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + .linux, // defeated by aggressive inlining + }, + .expect = + \\error: TheSkyIsFalling + \\source.zig:2:5: [address] in [function] + \\ return error.TheSkyIsFalling; + \\ ^ + \\source.zig:10:5: [address] in [function] + \\ try foo(); + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: TheSkyIsFalling + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: TheSkyIsFalling + \\ + , + }, + }); + + cases.addCase(.{ + .name = "try return from within catch", + .source = + \\fn foo() !void { + \\ return error.TheSkyIsFalling; + \\} + \\ + \\fn bar() !void { + \\ return error.AndMyCarIsOutOfGas; + \\} + \\ + \\pub fn main() !void { + \\ foo() catch { // error trace should include foo() + \\ try bar(); + \\ }; + \\} + , + .Debug = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\source.zig:2:5: [address] in foo (test) + \\ return error.TheSkyIsFalling; + \\ ^ + \\source.zig:6:5: [address] in bar (test) + \\ return error.AndMyCarIsOutOfGas; + \\ ^ + \\source.zig:11:9: [address] in main (test) + \\ try bar(); + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + }, + .expect = + \\error: AndMyCarIsOutOfGas + \\source.zig:2:5: [address] in [function] + \\ return error.TheSkyIsFalling; + \\ ^ + \\source.zig:6:5: [address] in [function] + \\ return error.AndMyCarIsOutOfGas; + \\ ^ + \\source.zig:11:9: [address] in [function] + \\ try bar(); + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\ + , + }, + }); + + cases.addCase(.{ + .name = "try return from within if-else", + .source = + \\fn foo() !void { + \\ return error.TheSkyIsFalling; + \\} + \\ + \\fn bar() !void { + \\ return error.AndMyCarIsOutOfGas; + \\} + \\ + \\pub fn main() !void { + \\ if (foo()) |_| {} else |_| { // error trace should include foo() + \\ try bar(); + \\ } + \\} + , + .Debug = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\source.zig:2:5: [address] in foo (test) + \\ return error.TheSkyIsFalling; + \\ ^ + \\source.zig:6:5: [address] in bar (test) + \\ return error.AndMyCarIsOutOfGas; + \\ ^ + \\source.zig:11:9: [address] in main (test) + \\ try bar(); + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + }, + .expect = + \\error: AndMyCarIsOutOfGas + \\source.zig:2:5: [address] in [function] + \\ return error.TheSkyIsFalling; + \\ ^ + \\source.zig:6:5: [address] in [function] + \\ return error.AndMyCarIsOutOfGas; + \\ ^ + \\source.zig:11:9: [address] in [function] + \\ try bar(); + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\ + , + }, + }); + cases.addCase(.{ .name = "try try return return", .source = From 0c3a50fe1c1370c975d7de1f2f00458b4a3ec299 Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Mon, 12 Sep 2022 23:09:14 -0700 Subject: [PATCH 03/12] stage2: Do not pop error trace if result is an error This allows for errors to be "re-thrown" by yielding any error as the result of a catch block. For example: ```zig fn errorable() !void { return error.FallingOutOfPlane; } fn foo(have_parachute: bool) !void { return errorable() catch |err| b: { if (have_parachute) { // error trace will include the call to errorable() break :b error.NoParachute; } else { return; } }; } pub fn main() !void { // Anything that returns a non-error does not pollute the error trace. try foo(true); // This error trace will still include errorable(), whose error was "re-thrown" by foo() try foo(false); } ``` This is piece (2/3) of https://github.com/ziglang/zig/issues/1923#issuecomment-1218495574 --- src/AstGen.zig | 236 ++++++++++++++++++++++++++---------------- test/stack_traces.zig | 53 ++++++++++ 2 files changed, 200 insertions(+), 89 deletions(-) diff --git a/src/AstGen.zig b/src/AstGen.zig index dbd65519b0..b7a035902a 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -223,6 +223,10 @@ pub const ResultLoc = union(enum) { /// The expression must generate a pointer rather than a value. For example, the left hand side /// of an assignment uses this kind of result location. ref, + /// Exactly like `none`, except also indicates this is an error-handling expr (try/catch/return etc.) + catch_none, + /// Exactly like `ref`, except also indicates this is an error-handling expr (try/catch/return etc.) + catch_ref, /// The expression will be coerced into this type, but it will be evaluated as an rvalue. ty: Zir.Inst.Ref, /// Same as `ty` but for shift operands. @@ -265,7 +269,7 @@ pub const ResultLoc = union(enum) { fn strategy(rl: ResultLoc, block_scope: *GenZir) Strategy { switch (rl) { // In this branch there will not be any store_to_block_ptr instructions. - .none, .ty, .ty_shift_operand, .coerced_ty, .ref => return .{ + .none, .catch_none, .ty, .ty_shift_operand, .coerced_ty, .ref, .catch_ref => return .{ .tag = .break_operand, .elide_store_to_block_ptr_instructions = false, }, @@ -838,7 +842,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr const lhs = try expr(gz, scope, .none, node_datas[node].lhs); _ = try gz.addUnNode(.validate_deref, lhs, node); switch (rl) { - .ref => return lhs, + .ref, .catch_ref => return lhs, else => { const result = try gz.addUnNode(.load, lhs, node); return rvalue(gz, rl, result, node); @@ -855,7 +859,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr return rvalue(gz, rl, result, node); }, .unwrap_optional => switch (rl) { - .ref => return gz.addUnNode( + .ref, .catch_ref => return gz.addUnNode( .optional_payload_safe_ptr, try expr(gz, scope, .ref, node_datas[node].lhs), node, @@ -900,7 +904,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr else null; switch (rl) { - .ref => return orelseCatchExpr( + .ref, .catch_ref => return orelseCatchExpr( gz, scope, rl, @@ -927,7 +931,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr } }, .@"orelse" => switch (rl) { - .ref => return orelseCatchExpr( + .ref, .catch_ref => return orelseCatchExpr( gz, scope, rl, @@ -1372,11 +1376,11 @@ fn arrayInitExpr( } return Zir.Inst.Ref.void_value; }, - .ref => { + .ref, .catch_ref => { const tag: Zir.Inst.Tag = if (types.array != .none) .array_init_ref else .array_init_anon_ref; return arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag); }, - .none => { + .none, .catch_none => { const tag: Zir.Inst.Tag = if (types.array != .none) .array_init else .array_init_anon; return arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag); }, @@ -1608,7 +1612,7 @@ fn structInitExpr( } return Zir.Inst.Ref.void_value; }, - .ref => { + .ref, .catch_ref => { if (struct_init.ast.type_expr != 0) { const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr); _ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node); @@ -1617,7 +1621,7 @@ fn structInitExpr( return structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon_ref); } }, - .none => { + .none, .catch_none => { if (struct_init.ast.type_expr != 0) { const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr); _ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node); @@ -1891,15 +1895,8 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn // As our last action before the break, "pop" the error trace if needed if (err_trace_index_to_restore != .none) { - // TODO: error-liveness and is_non_err - - _ = try parent_gz.add(.{ - .tag = .restore_err_ret_index, - .data = .{ .un_node = .{ - .operand = err_trace_index_to_restore, - .src_node = parent_gz.nodeIndexToRelative(node), - } }, - }); + // void is a non-error so we always pop - no need to call `popErrorReturnTrace` + _ = try parent_gz.addUnNode(.restore_err_ret_index, err_trace_index_to_restore, node); } _ = try parent_gz.addBreak(break_tag, block_inst, .void_value); @@ -1914,15 +1911,15 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn // As our last action before the break, "pop" the error trace if needed if (err_trace_index_to_restore != .none) { - // TODO: error-liveness and is_non_err - - _ = try parent_gz.add(.{ - .tag = .restore_err_ret_index, - .data = .{ .un_node = .{ - .operand = err_trace_index_to_restore, - .src_node = parent_gz.nodeIndexToRelative(node), - } }, - }); + // Pop the error trace, unless the operand is an error and breaking to an error-handling expr. + try popErrorReturnTrace( + parent_gz, + scope, + block_gz.break_result_loc, + rhs, + operand, + err_trace_index_to_restore, + ); } switch (block_gz.break_result_loc) { @@ -2177,7 +2174,7 @@ fn labeledBlockExpr( try block_scope.setBlockBody(block_inst); const block_ref = indexToRef(block_inst); switch (rl) { - .ref => return block_ref, + .ref, .catch_ref => return block_ref, else => return rvalue(gz, rl, block_ref, block_node), } }, @@ -5141,14 +5138,14 @@ fn tryExpr( const try_column = astgen.source_column; const operand_rl: ResultLoc = switch (rl) { - .ref => .ref, - else => .none, + .ref, .catch_ref => .catch_ref, + else => .catch_none, }; // This could be a pointer or value depending on the `rl` parameter. const operand = try reachableExpr(parent_gz, scope, operand_rl, operand_node, node); const is_inline = parent_gz.force_comptime; const is_inline_bit = @as(u2, @boolToInt(is_inline)); - const is_ptr_bit = @as(u2, @boolToInt(operand_rl == .ref)) << 1; + const is_ptr_bit = @as(u2, @boolToInt(operand_rl == .ref or operand_rl == .catch_ref)) << 1; const block_tag: Zir.Inst.Tag = switch (is_inline_bit | is_ptr_bit) { 0b00 => .@"try", 0b01 => .@"try", @@ -5164,7 +5161,7 @@ fn tryExpr( defer else_scope.unstack(); const err_tag = switch (rl) { - .ref => Zir.Inst.Tag.err_union_code_ptr, + .ref, .catch_ref => Zir.Inst.Tag.err_union_code_ptr, else => Zir.Inst.Tag.err_union_code, }; const err_code = try else_scope.addUnNode(err_tag, operand, node); @@ -5175,11 +5172,86 @@ fn tryExpr( try else_scope.setTryBody(try_inst, operand); const result = indexToRef(try_inst); switch (rl) { - .ref => return result, + .ref, .catch_ref => return result, else => return rvalue(parent_gz, rl, result, node), } } +/// Pops the error return trace, unless: +/// 1. the result is a non-error, AND +/// 2. the result location corresponds to an error-handling expression +/// +/// For reference, the full list of error-handling expressions is: +/// - try X +/// - X catch ... +/// - if (X) |_| { ... } |_| { ... } +/// - return X +/// +fn popErrorReturnTrace( + gz: *GenZir, + scope: *Scope, + rl: ResultLoc, + node: Ast.Node.Index, + result_inst: Zir.Inst.Ref, + error_trace_index: Zir.Inst.Ref, +) InnerError!void { + const astgen = gz.astgen; + const tree = astgen.tree; + + const result_is_err = nodeMayEvalToError(tree, node); + + // If we are breaking to a try/catch/error-union-if/return, the error trace propagates. + const propagate_error_trace = switch (rl) { + .catch_none, .catch_ref => true, // Propagate to try/catch/error-union-if + .ptr, .ty => |ref| b: { // Otherwise, propagate if result loc is a return + const inst = refToIndex(ref) orelse break :b false; + const zir_tags = astgen.instructions.items(.tag); + break :b zir_tags[inst] == .ret_ptr or zir_tags[inst] == .ret_type; + }, + else => false, + }; + + if (result_is_err == .never or !propagate_error_trace) { + // We are returning a non-error, or returning to a non-error-handling operator. + // In either case, we need to pop the error trace. + _ = try gz.addUnNode(.restore_err_ret_index, error_trace_index, node); + } else if (result_is_err == .maybe) { + // We are returning to an error-handling operator with a maybe-error. + // Restore only if it's a non-error, implying the catch was successfully handled. + var block_scope = gz.makeSubBlock(scope); + block_scope.setBreakResultLoc(.discard); + defer block_scope.unstack(); + + // Emit conditional branch for restoring error trace index + const is_non_err = switch (rl) { + .catch_ref => try block_scope.addUnNode(.is_non_err_ptr, result_inst, node), + .ptr => |ptr| try block_scope.addUnNode(.is_non_err_ptr, ptr, node), + .ty, .catch_none => try block_scope.addUnNode(.is_non_err, result_inst, node), + else => unreachable, // Error-handling operators only generate the above result locations + }; + const condbr = try block_scope.addCondBr(.condbr, node); + + const block = try gz.makeBlockInst(.block, node); + try block_scope.setBlockBody(block); + // block_scope unstacked now, can add new instructions to gz + + try gz.instructions.append(astgen.gpa, block); + + var then_scope = block_scope.makeSubBlock(scope); + defer then_scope.unstack(); + + _ = try then_scope.addUnNode(.restore_err_ret_index, error_trace_index, node); + const then_break = try then_scope.makeBreak(.@"break", block, .void_value); + + var else_scope = block_scope.makeSubBlock(scope); + defer else_scope.unstack(); + + const else_break = try else_scope.makeBreak(.@"break", block, .void_value); + + try setCondBrPayload(condbr, is_non_err, &then_scope, then_break, &else_scope, else_break); + } +} + fn orelseCatchExpr( parent_gz: *GenZir, scope: *Scope, @@ -5204,8 +5276,8 @@ fn orelseCatchExpr( const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none; const operand_rl: ResultLoc = switch (block_scope.break_result_loc) { - .ref => .ref, - else => .none, + .ref, .catch_ref => if (do_err_trace) ResultLoc{ .catch_ref = {} } else .ref, + else => if (do_err_trace) ResultLoc{ .catch_none = {} } else .none, }; block_scope.break_count += 1; // This could be a pointer or value depending on the `operand_rl` parameter. @@ -5227,7 +5299,7 @@ fn orelseCatchExpr( // This could be a pointer or value depending on `unwrap_op`. const unwrapped_payload = try then_scope.addUnNode(unwrap_op, operand, node); const then_result = switch (rl) { - .ref => unwrapped_payload, + .ref, .catch_ref => unwrapped_payload, else => try rvalue(&then_scope, block_scope.break_result_loc, unwrapped_payload, node), }; @@ -5266,15 +5338,15 @@ fn orelseCatchExpr( if (!else_scope.endsWithNoReturn()) { block_scope.break_count += 1; - // TODO: Add is_non_err and break check if (do_err_trace) { - _ = try else_scope.add(.{ - .tag = .restore_err_ret_index, - .data = .{ .un_node = .{ - .operand = saved_err_trace_index, - .src_node = parent_gz.nodeIndexToRelative(node), - } }, - }); + try popErrorReturnTrace( + &else_scope, + else_sub_scope, + block_scope.break_result_loc, + rhs, + else_result, + saved_err_trace_index, + ); } } try checkUsed(parent_gz, &else_scope.base, else_sub_scope); @@ -5351,7 +5423,7 @@ fn finishThenElseBlock( } const block_ref = indexToRef(main_block); switch (rl) { - .ref => return block_ref, + .ref, .catch_ref => return block_ref, else => return rvalue(parent_gz, rl, block_ref, node), } }, @@ -5375,7 +5447,7 @@ fn fieldAccess( node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { switch (rl) { - .ref => return addFieldAccess(.field_ptr, gz, scope, .ref, node), + .ref, .catch_ref => return addFieldAccess(.field_ptr, gz, scope, .ref, node), else => { const access = try addFieldAccess(.field_val, gz, scope, .none, node); return rvalue(gz, rl, access, node); @@ -5416,7 +5488,7 @@ fn arrayAccess( const tree = astgen.tree; const node_datas = tree.nodes.items(.data); switch (rl) { - .ref => return gz.addPlNode(.elem_ptr_node, node, Zir.Inst.Bin{ + .ref, .catch_ref => return gz.addPlNode(.elem_ptr_node, node, Zir.Inst.Bin{ .lhs = try expr(gz, scope, .ref, node_datas[node].lhs), .rhs = try expr(gz, scope, .{ .ty = .usize_type }, node_datas[node].rhs), }), @@ -5514,7 +5586,7 @@ fn ifExpr( bool_bit: Zir.Inst.Ref, } = c: { if (if_full.error_token) |_| { - const cond_rl: ResultLoc = if (payload_is_ref) .ref else .none; + const cond_rl: ResultLoc = if (payload_is_ref) .catch_ref else .catch_none; const err_union = try expr(&block_scope, &block_scope.base, cond_rl, if_full.ast.cond_expr); const tag: Zir.Inst.Tag = if (payload_is_ref) .is_non_err_ptr else .is_non_err; break :c .{ @@ -5660,6 +5732,17 @@ fn ifExpr( const e = try expr(&else_scope, sub_scope, block_scope.break_result_loc, else_node); if (!else_scope.endsWithNoReturn()) { block_scope.break_count += 1; + + if (do_err_trace) { + try popErrorReturnTrace( + &else_scope, + sub_scope, + block_scope.break_result_loc, + else_node, + e, + saved_err_trace_index, + ); + } } try checkUsed(parent_gz, &else_scope.base, sub_scope); try else_scope.addDbgBlockEnd(); @@ -5676,18 +5759,6 @@ fn ifExpr( }, }; - if (do_err_trace and !else_scope.endsWithNoReturn()) { - // TODO: is_non_err and other checks - - _ = try else_scope.add(.{ - .tag = .restore_err_ret_index, - .data = .{ .un_node = .{ - .operand = saved_err_trace_index, - .src_node = parent_gz.nodeIndexToRelative(node), - } }, - }); - } - const break_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .break_inline else .@"break"; const result = try finishThenElseBlock( parent_gz, @@ -6760,7 +6831,7 @@ fn switchExpr( } const block_ref = indexToRef(switch_block); - if (strat.tag == .break_operand and strat.elide_store_to_block_ptr_instructions and rl != .ref) + if (strat.tag == .break_operand and strat.elide_store_to_block_ptr_instructions and rl != .ref and rl != .catch_ref) return rvalue(parent_gz, rl, block_ref, switch_node); return block_ref; } @@ -6839,19 +6910,12 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref .never => { // Returning a value that cannot be an error; skip error defers. try genDefers(gz, defer_outer, scope, .normal_only); - try emitDbgStmt(gz, ret_line, ret_column); // As our last action before the return, "pop" the error trace if needed - if (gz.outermost_err_trace_index != .none) { - _ = try gz.add(.{ - .tag = .restore_err_ret_index, - .data = .{ .un_node = .{ - .operand = gz.outermost_err_trace_index, - .src_node = gz.nodeIndexToRelative(node), - } }, - }); - } + if (gz.outermost_err_trace_index != .none) + _ = try gz.addUnNode(.restore_err_ret_index, gz.outermost_err_trace_index, node); + try emitDbgStmt(gz, ret_line, ret_column); try gz.addRet(rl, operand, node); return Zir.Inst.Ref.unreachable_value; }, @@ -6882,6 +6946,11 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref defer then_scope.unstack(); try genDefers(&then_scope, defer_outer, scope, .normal_only); + + // As our last action before the return, "pop" the error trace if needed + if (then_scope.outermost_err_trace_index != .none) + _ = try then_scope.addUnNode(.restore_err_ret_index, then_scope.outermost_err_trace_index, node); + try emitDbgStmt(&then_scope, ret_line, ret_column); try then_scope.addRet(rl, operand, node); @@ -6893,17 +6962,6 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref }; try genDefers(&else_scope, defer_outer, scope, which_ones); try emitDbgStmt(&else_scope, ret_line, ret_column); - - // As our last action before the return, "pop" the error trace if needed - if (else_scope.outermost_err_trace_index != .none) { - _ = try else_scope.add(.{ - .tag = .restore_err_ret_index, - .data = .{ .un_node = .{ - .operand = else_scope.outermost_err_trace_index, - .src_node = else_scope.nodeIndexToRelative(node), - } }, - }); - } try else_scope.addRet(rl, operand, node); try setCondBrPayload(condbr, is_non_err, &then_scope, 0, &else_scope, 0); @@ -7068,7 +7126,7 @@ fn localVarRef( ); switch (rl) { - .ref => return ptr_inst, + .ref, .catch_ref => return ptr_inst, else => { const loaded = try gz.addUnNode(.load, ptr_inst, ident); return rvalue(gz, rl, loaded, ident); @@ -7105,7 +7163,7 @@ fn localVarRef( // Decl references happen by name rather than ZIR index so that when unrelated // decls are modified, ZIR code containing references to them can be unmodified. switch (rl) { - .ref => return gz.addStrTok(.decl_ref, name_str_index, ident_token), + .ref, .catch_ref => return gz.addStrTok(.decl_ref, name_str_index, ident_token), else => { const result = try gz.addStrTok(.decl_val, name_str_index, ident_token); return rvalue(gz, rl, result, ident); @@ -7452,7 +7510,7 @@ fn as( ) InnerError!Zir.Inst.Ref { const dest_type = try typeExpr(gz, scope, lhs); switch (rl) { - .none, .discard, .ref, .ty, .ty_shift_operand, .coerced_ty => { + .none, .catch_none, .discard, .ref, .catch_ref, .ty, .ty_shift_operand, .coerced_ty => { const result = try reachableExpr(gz, scope, .{ .ty = dest_type }, rhs, node); return rvalue(gz, rl, result, node); }, @@ -7652,7 +7710,7 @@ fn builtinCall( return rvalue(gz, rl, result, node); }, .field => { - if (rl == .ref) { + if (rl == .ref or rl == .catch_ref) { return gz.addPlNode(.field_ptr_named, node, Zir.Inst.FieldNamed{ .lhs = try expr(gz, scope, .ref, params[0]), .field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]), @@ -9600,13 +9658,13 @@ fn rvalue( }; if (gz.endsWithNoReturn()) return result; switch (rl) { - .none, .coerced_ty => return result, + .none, .catch_none, .coerced_ty => return result, .discard => { // Emit a compile error for discarding error values. _ = try gz.addUnNode(.ensure_result_non_error, result, src_node); return result; }, - .ref => { + .ref, .catch_ref => { // We need a pointer but we have a value. // Unfortunately it's not quite as simple as directly emitting a ref // instruction here because we need subsequent address-of operator on @@ -10575,7 +10633,7 @@ const GenZir = struct { gz.break_result_loc = parent_rl; }, - .discard, .none, .ref => { + .discard, .none, .catch_none, .ref, .catch_ref => { gz.rl_ty_inst = .none; gz.break_result_loc = parent_rl; }, diff --git a/test/stack_traces.zig b/test/stack_traces.zig index 751b33b43e..3b8cc8a6cd 100644 --- a/test/stack_traces.zig +++ b/test/stack_traces.zig @@ -155,6 +155,59 @@ pub fn addCases(cases: *tests.StackTracesContext) void { }, }); + cases.addCase(.{ + .name = "catch and re-throw error", + .source = + \\fn foo() !void { + \\ return error.TheSkyIsFalling; + \\} + \\ + \\pub fn main() !void { + \\ return foo() catch error.AndMyCarIsOutOfGas; + \\} + , + .Debug = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\source.zig:2:5: [address] in foo (test) + \\ return error.TheSkyIsFalling; + \\ ^ + \\source.zig:6:5: [address] in main (test) + \\ return foo() catch error.AndMyCarIsOutOfGas; + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + .linux, // defeated by aggressive inlining + }, + .expect = + \\error: AndMyCarIsOutOfGas + \\source.zig:2:5: [address] in [function] + \\ return error.TheSkyIsFalling; + \\ ^ + \\source.zig:6:5: [address] in [function] + \\ return foo() catch error.AndMyCarIsOutOfGas; + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: AndMyCarIsOutOfGas + \\ + , + }, + }); + cases.addCase(.{ .name = "try return from within catch", .source = From 3007fdde45868142654d0bfa59bc0e17e5f24a1c Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Tue, 13 Sep 2022 15:31:07 -0700 Subject: [PATCH 04/12] stage2: Pop error trace when storing error to var/const In order to enforce a strict stack discipline for error return traces, we cannot track error return traces that are stored in variables: ```zig const x = errorable(); // errorable()'s error return trace is killed here // v-- error trace starts here instead return x catch error.UnknownError; ``` In order to propagate error return traces, function calls need to be passed directly to an error-handling expression (`if`, `catch`, `try` or `return`): ```zig // When passed directly to `catch`, the return trace is propagated return errorable() catch error.UnknownError; // Using a break also works return blk: { // code here break :blk errorable(); } catch error.UnknownError; ``` Why do we need this restriction? Without it, multiple errors can co-exist with their own error traces. Handling that situation correctly means either: a. Dynamically allocating trace memory and tracking lifetimes, OR b. Allowing the production of one error to interfere with the trace of another (which is the current status quo) This is piece (3/3) of https://github.com/ziglang/zig/issues/1923#issuecomment-1218495574 --- lib/test_runner.zig | 35 +++++++++--------- src/AstGen.zig | 13 +++++++ src/Sema.zig | 86 +++++++++++++++++++++++++++++++------------ src/Zir.zig | 3 +- test/stack_traces.zig | 52 ++++++++++++++++++++++++++ 5 files changed, 147 insertions(+), 42 deletions(-) diff --git a/lib/test_runner.zig b/lib/test_runner.zig index aafaf1b073..8cde13f9a6 100644 --- a/lib/test_runner.zig +++ b/lib/test_runner.zig @@ -44,23 +44,24 @@ pub fn main() void { if (!have_tty) { std.debug.print("{d}/{d} {s}... ", .{ i + 1, test_fn_list.len, test_fn.name }); } - const result = if (test_fn.async_frame_size) |size| switch (io_mode) { - .evented => blk: { - if (async_frame_buffer.len < size) { - std.heap.page_allocator.free(async_frame_buffer); - async_frame_buffer = std.heap.page_allocator.alignedAlloc(u8, std.Target.stack_align, size) catch @panic("out of memory"); - } - const casted_fn = @ptrCast(fn () callconv(.Async) anyerror!void, test_fn.func); - break :blk await @asyncCall(async_frame_buffer, {}, casted_fn, .{}); - }, - .blocking => { - skip_count += 1; - test_node.end(); - progress.log("SKIP (async test)\n", .{}); - continue; - }, - } else test_fn.func(); - if (result) |_| { + if (result: { + if (test_fn.async_frame_size) |size| switch (io_mode) { + .evented => { + if (async_frame_buffer.len < size) { + std.heap.page_allocator.free(async_frame_buffer); + async_frame_buffer = std.heap.page_allocator.alignedAlloc(u8, std.Target.stack_align, size) catch @panic("out of memory"); + } + const casted_fn = @ptrCast(fn () callconv(.Async) anyerror!void, test_fn.func); + break :result await @asyncCall(async_frame_buffer, {}, casted_fn, .{}); + }, + .blocking => { + skip_count += 1; + test_node.end(); + progress.log("SKIP (async test)\n", .{}); + continue; + }, + } else break :result test_fn.func(); + }) |_| { ok_count += 1; test_node.end(); if (!have_tty) std.debug.print("OK\n", .{}); diff --git a/src/AstGen.zig b/src/AstGen.zig index b7a035902a..ec67a89810 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -8545,9 +8545,22 @@ fn callExpr( scratch_index += 1; } + // If our result location is a try/catch/error-union-if/return, the error trace propagates. + // Otherwise, it should always be popped (handled in Sema). + const propagate_error_trace = switch (rl) { + .catch_none, .catch_ref => true, // Propagate to try/catch/error-union-if + .ptr, .ty => |ref| b: { // Otherwise, propagate if result loc is a return + const inst = refToIndex(ref) orelse break :b false; + const zir_tags = astgen.instructions.items(.tag); + break :b zir_tags[inst] == .ret_ptr or zir_tags[inst] == .ret_type; + }, + else => false, + }; + const payload_index = try addExtra(astgen, Zir.Inst.Call{ .callee = callee, .flags = .{ + .pop_error_return_trace = !propagate_error_trace, .packed_modifier = @intCast(Zir.Inst.Call.Flags.PackedModifier, @enumToInt(modifier)), .args_len = @intCast(Zir.Inst.Call.Flags.PackedArgsLen, call.ast.params.len), }, diff --git a/src/Sema.zig b/src/Sema.zig index ce3a10fd38..93f496a43a 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -5664,6 +5664,7 @@ fn zirCall( const modifier = @intToEnum(std.builtin.CallOptions.Modifier, extra.data.flags.packed_modifier); const ensure_result_used = extra.data.flags.ensure_result_used; + const pop_error_return_trace = extra.data.flags.pop_error_return_trace; var func = try sema.resolveInst(extra.data.callee); var resolved_args: []Air.Inst.Ref = undefined; @@ -5771,7 +5772,7 @@ fn zirCall( resolved_args[arg_index] = resolved; } - return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, resolved_args, bound_arg_src); + return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, pop_error_return_trace, resolved_args, bound_arg_src); } const GenericCallAdapter = struct { @@ -5883,6 +5884,7 @@ fn analyzeCall( call_src: LazySrcLoc, modifier: std.builtin.CallOptions.Modifier, ensure_result_used: bool, + pop_error_return_trace: bool, uncasted_args: []const Air.Inst.Ref, bound_arg_src: ?LazySrcLoc, ) CompileError!Air.Inst.Ref { @@ -6333,19 +6335,55 @@ fn analyzeCall( sema.owner_func.?.calls_or_awaits_errorable_fn = true; } - try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Call).Struct.fields.len + - args.len); - const func_inst = try block.addInst(.{ - .tag = call_tag, - .data = .{ .pl_op = .{ - .operand = func, - .payload = sema.addExtraAssumeCapacity(Air.Call{ - .args_len = @intCast(u32, args.len), - }), - } }, - }); - sema.appendRefsAssumeCapacity(args); - break :res func_inst; + const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; + const emit_error_trace_save_restore = sema.mod.comp.bin_file.options.error_return_tracing and + backend_supports_error_return_tracing and + pop_error_return_trace and func_ty_info.return_type.isError(); + + if (emit_error_trace_save_restore) { + // This function call is error-able (and so can generate an error trace), but AstGen determined + // that its result does not go to an error-handling operator (try/catch/return etc.). We need to + // save and restore the error trace index here, effectively "popping" the new entries immediately. + + const unresolved_stack_trace_ty = try sema.getBuiltinType(block, call_src, "StackTrace"); + const stack_trace_ty = try sema.resolveTypeFields(block, call_src, unresolved_stack_trace_ty); + const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty); + const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty); + const field_ptr = try sema.structFieldPtr(block, call_src, err_return_trace, "index", call_src, stack_trace_ty, true); + + const saved_index = try sema.analyzeLoad(block, call_src, field_ptr, call_src); + + try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Call).Struct.fields.len + + args.len); + const func_inst = try block.addInst(.{ + .tag = call_tag, + .data = .{ .pl_op = .{ + .operand = func, + .payload = sema.addExtraAssumeCapacity(Air.Call{ + .args_len = @intCast(u32, args.len), + }), + } }, + }); + sema.appendRefsAssumeCapacity(args); + + try sema.storePtr2(block, call_src, field_ptr, call_src, saved_index, call_src, .store); + + break :res func_inst; + } else { + try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Call).Struct.fields.len + + args.len); + const func_inst = try block.addInst(.{ + .tag = call_tag, + .data = .{ .pl_op = .{ + .operand = func, + .payload = sema.addExtraAssumeCapacity(Air.Call{ + .args_len = @intCast(u32, args.len), + }), + } }, + }); + sema.appendRefsAssumeCapacity(args); + break :res func_inst; + } }; if (ensure_result_used) { @@ -10927,7 +10965,7 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op const panic_fn = try sema.getBuiltin(block, src, "panicUnwrapError"); const err_return_trace = try sema.getErrorReturnTrace(block, src); const args: [2]Air.Inst.Ref = .{ err_return_trace, operand }; - _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, &args, null); + _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, false, &args, null); return true; }, .panic => { @@ -10938,7 +10976,7 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op const panic_fn = try sema.getBuiltin(block, src, "panic"); const err_return_trace = try sema.getErrorReturnTrace(block, src); const args: [3]Air.Inst.Ref = .{ msg_inst, err_return_trace, .null_value }; - _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, &args, null); + _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, false, &args, null); return true; }, else => unreachable, @@ -16141,7 +16179,7 @@ fn retWithErrTracing( const args: [1]Air.Inst.Ref = .{err_return_trace}; if (!need_check) { - _ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, &args, null); + _ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, false, &args, null); _ = try block.addUnOp(ret_tag, operand); return always_noreturn; } @@ -16152,7 +16190,7 @@ fn retWithErrTracing( var else_block = block.makeSubBlock(); defer else_block.instructions.deinit(gpa); - _ = try sema.analyzeCall(&else_block, return_err_fn, src, src, .never_inline, false, &args, null); + _ = try sema.analyzeCall(&else_block, return_err_fn, src, src, .never_inline, false, false, &args, null); _ = try else_block.addUnOp(ret_tag, operand); try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.CondBr).Struct.fields.len + @@ -20369,7 +20407,7 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError } } const ensure_result_used = extra.flags.ensure_result_used; - return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, resolved_args, bound_arg_src); + return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, false, resolved_args, bound_arg_src); } fn zirFieldParentPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { @@ -21803,7 +21841,7 @@ fn panicWithMsg( Value.@"null", ); const args: [3]Air.Inst.Ref = .{ msg_inst, null_stack_trace, .null_value }; - _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, &args, null); + _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, false, &args, null); return always_noreturn; } @@ -21844,7 +21882,7 @@ fn panicUnwrapError( const err = try fail_block.addTyOp(unwrap_err_tag, Type.anyerror, operand); const err_return_trace = try sema.getErrorReturnTrace(&fail_block, src); const args: [2]Air.Inst.Ref = .{ err_return_trace, err }; - _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, &args, null); + _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, false, &args, null); } } try sema.addSafetyCheckExtra(parent_block, ok, &fail_block); @@ -21885,7 +21923,7 @@ fn panicIndexOutOfBounds( } else { const panic_fn = try sema.getBuiltin(&fail_block, src, "panicOutOfBounds"); const args: [2]Air.Inst.Ref = .{ index, len }; - _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, &args, null); + _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, false, &args, null); } } try sema.addSafetyCheckExtra(parent_block, ok, &fail_block); @@ -21927,7 +21965,7 @@ fn panicSentinelMismatch( else { const panic_fn = try sema.getBuiltin(parent_block, src, "checkNonScalarSentinel"); const args: [2]Air.Inst.Ref = .{ expected_sentinel, actual_sentinel }; - _ = try sema.analyzeCall(parent_block, panic_fn, src, src, .auto, false, &args, null); + _ = try sema.analyzeCall(parent_block, panic_fn, src, src, .auto, false, false, &args, null); return; }; const gpa = sema.gpa; @@ -21956,7 +21994,7 @@ fn panicSentinelMismatch( } else { const panic_fn = try sema.getBuiltin(&fail_block, src, "panicSentinelMismatch"); const args: [2]Air.Inst.Ref = .{ expected_sentinel, actual_sentinel }; - _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, &args, null); + _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, false, &args, null); } } try sema.addSafetyCheckExtra(parent_block, ok, &fail_block); diff --git a/src/Zir.zig b/src/Zir.zig index f08d78f8f2..d616ea6d14 100644 --- a/src/Zir.zig +++ b/src/Zir.zig @@ -2825,10 +2825,11 @@ pub const Inst = struct { pub const Flags = packed struct { /// std.builtin.CallOptions.Modifier in packed form pub const PackedModifier = u3; - pub const PackedArgsLen = u28; + pub const PackedArgsLen = u27; packed_modifier: PackedModifier, ensure_result_used: bool = false, + pop_error_return_trace: bool, args_len: PackedArgsLen, comptime { diff --git a/test/stack_traces.zig b/test/stack_traces.zig index 3b8cc8a6cd..1494f86104 100644 --- a/test/stack_traces.zig +++ b/test/stack_traces.zig @@ -208,6 +208,58 @@ pub fn addCases(cases: *tests.StackTracesContext) void { }, }); + cases.addCase(.{ + .name = "stored errors do not contribute to error trace", + .source = + \\fn foo() !void { + \\ return error.TheSkyIsFalling; + \\} + \\ + \\pub fn main() !void { + \\ // Once an error is stored in a variable, it is popped from the trace + \\ var x = foo(); + \\ x = {}; + \\ + \\ // As a result, this error trace will still be clean + \\ return error.SomethingUnrelatedWentWrong; + \\} + , + .Debug = .{ + .expect = + \\error: SomethingUnrelatedWentWrong + \\source.zig:11:5: [address] in main (test) + \\ return error.SomethingUnrelatedWentWrong; + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + .linux, // defeated by aggressive inlining + }, + .expect = + \\error: SomethingUnrelatedWentWrong + \\source.zig:11:5: [address] in [function] + \\ return error.SomethingUnrelatedWentWrong; + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: SomethingUnrelatedWentWrong + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: SomethingUnrelatedWentWrong + \\ + , + }, + }); + cases.addCase(.{ .name = "try return from within catch", .source = From 724d75363855176aa5e6b3d9bcd1656e2cc1f6a6 Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Wed, 14 Sep 2022 13:49:54 -0700 Subject: [PATCH 05/12] stage2: Add `.save_err_return_trace_index` AIR op This is encoded as a primitive AIR instruction to resolve one corner case: A function may include a `catch { ... }` or `else |err| { ... }` block but not call any errorable fn. In that case, there is no error return trace to save the index of and codegen needs to avoid interacting with the non-existing error trace. By using a primitive AIR op, we can depend on Liveness to mark this unused in this corner case. --- src/Air.zig | 5 +++++ src/Liveness.zig | 2 ++ src/Sema.zig | 27 +++++++++++++++++---------- src/arch/aarch64/CodeGen.zig | 6 ++++++ src/arch/arm/CodeGen.zig | 6 ++++++ src/arch/riscv64/CodeGen.zig | 6 ++++++ src/arch/sparc64/CodeGen.zig | 1 + src/arch/wasm/CodeGen.zig | 1 + src/arch/x86_64/CodeGen.zig | 6 ++++++ src/codegen/c.zig | 6 ++++++ src/codegen/llvm.zig | 19 +++++++++++++++++++ src/print_air.zig | 1 + 12 files changed, 76 insertions(+), 10 deletions(-) diff --git a/src/Air.zig b/src/Air.zig index 57479af590..1cd5d85d75 100644 --- a/src/Air.zig +++ b/src/Air.zig @@ -733,6 +733,10 @@ pub const Inst = struct { /// Uses the `ty_op` field. addrspace_cast, + /// Saves the error return trace index, if any. Otherwise, returns 0. + /// Uses the `ty_op` field. + save_err_return_trace_index, + pub fn fromCmpOp(op: std.math.CompareOperator, optimized: bool) Tag { switch (op) { .lt => return if (optimized) .cmp_lt_optimized else .cmp_lt, @@ -1179,6 +1183,7 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type { .slice_len, .ret_addr, .frame_addr, + .save_err_return_trace_index, => return Type.usize, .wasm_memory_grow => return Type.i32, diff --git a/src/Liveness.zig b/src/Liveness.zig index 54a5041e8b..ff8afb8307 100644 --- a/src/Liveness.zig +++ b/src/Liveness.zig @@ -228,6 +228,7 @@ pub fn categorizeOperand( .frame_addr, .wasm_memory_size, .err_return_trace, + .save_err_return_trace_index, => return .none, .fence => return .write, @@ -805,6 +806,7 @@ fn analyzeInst( .frame_addr, .wasm_memory_size, .err_return_trace, + .save_err_return_trace_index, => return trackOperands(a, new_set, inst, main_tomb, .{ .none, .none, .none }), .not, diff --git a/src/Sema.zig b/src/Sema.zig index 93f496a43a..791519c12a 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -16228,22 +16228,28 @@ fn zirSaveErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE // This is only relevant at runtime. if (block.is_comptime) return Air.Inst.Ref.zero_usize; - // In the corner case that `catch { ... }` or `else |err| { ... }` is used in a function - // that does *not* make any errorable calls, we still need an error trace to interact with - // the AIR instructions we've already emitted. - if (sema.owner_func != null) - sema.owner_func.?.calls_or_awaits_errorable_fn = true; - const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; const ok = sema.mod.comp.bin_file.options.error_return_tracing and backend_supports_error_return_tracing; if (!ok) return Air.Inst.Ref.zero_usize; + // This is encoded as a primitive AIR instruction to resolve one corner case: A function + // may include a `catch { ... }` or `else |err| { ... }` block but not call any errorable + // fn. In that case, there is no error return trace to save the index of and codegen needs + // to avoid interacting with the non-existing error trace. + // + // By using a primitive AIR op, we can depend on Liveness to mark this unused in this corner case. + const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace"); const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty); - const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty); - const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty); - return sema.fieldVal(block, src, err_return_trace, "index", src); + const field_index = try sema.structFieldIndex(block, stack_trace_ty, "index", src); + return block.addInst(.{ + .tag = .save_err_return_trace_index, + .data = .{ .ty_pl = .{ + .ty = try sema.addType(stack_trace_ty), + .payload = @intCast(u32, field_index), + } }, + }); } fn zirRestoreErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void { @@ -16254,7 +16260,8 @@ fn zirRestoreErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Compi if (block.is_comptime) return; const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; - const ok = sema.mod.comp.bin_file.options.error_return_tracing and + const ok = sema.owner_func.?.calls_or_awaits_errorable_fn and + sema.mod.comp.bin_file.options.error_return_tracing and backend_supports_error_return_tracing; if (!ok) return; diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 8da94f2e9c..eb8ca8e8f1 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -702,6 +702,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { .errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst), .err_return_trace => try self.airErrReturnTrace(inst), .set_err_return_trace => try self.airSetErrReturnTrace(inst), + .save_err_return_trace_index=> try self.airSaveErrReturnTraceIndex(inst), .wrap_optional => try self.airWrapOptional(inst), .wrap_errunion_payload => try self.airWrapErrUnionPayload(inst), @@ -2867,6 +2868,11 @@ fn airSetErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void { return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch}); } +fn airSaveErrReturnTraceIndex(self: *Self, inst: Air.Inst.Index) !void { + _ = inst; + return self.fail("TODO implement airSaveErrReturnTraceIndex for {}", .{self.target.cpu.arch}); +} + fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void { const ty_op = self.air.instructions.items(.data)[inst].ty_op; const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { diff --git a/src/arch/arm/CodeGen.zig b/src/arch/arm/CodeGen.zig index 530d5c2b04..1ebc348fc2 100644 --- a/src/arch/arm/CodeGen.zig +++ b/src/arch/arm/CodeGen.zig @@ -751,6 +751,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { .errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst), .err_return_trace => try self.airErrReturnTrace(inst), .set_err_return_trace => try self.airSetErrReturnTrace(inst), + .save_err_return_trace_index=> try self.airSaveErrReturnTraceIndex(inst), .wrap_optional => try self.airWrapOptional(inst), .wrap_errunion_payload => try self.airWrapErrUnionPayload(inst), @@ -2116,6 +2117,11 @@ fn airSetErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void { return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch}); } +fn airSaveErrReturnTraceIndex(self: *Self, inst: Air.Inst.Index) !void { + _ = inst; + return self.fail("TODO implement airSaveErrReturnTraceIndex for {}", .{self.target.cpu.arch}); +} + /// T to E!T fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) !void { const ty_op = self.air.instructions.items(.data)[inst].ty_op; diff --git a/src/arch/riscv64/CodeGen.zig b/src/arch/riscv64/CodeGen.zig index dd31bfb6f7..003d2c7e5f 100644 --- a/src/arch/riscv64/CodeGen.zig +++ b/src/arch/riscv64/CodeGen.zig @@ -665,6 +665,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { .errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst), .err_return_trace => try self.airErrReturnTrace(inst), .set_err_return_trace => try self.airSetErrReturnTrace(inst), + .save_err_return_trace_index=> try self.airSaveErrReturnTraceIndex(inst), .wrap_optional => try self.airWrapOptional(inst), .wrap_errunion_payload => try self.airWrapErrUnionPayload(inst), @@ -1329,6 +1330,11 @@ fn airSetErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void { return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch}); } +fn airSaveErrReturnTraceIndex(self: *Self, inst: Air.Inst.Index) !void { + _ = inst; + return self.fail("TODO implement airSaveErrReturnTraceIndex for {}", .{self.target.cpu.arch}); +} + fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void { const ty_op = self.air.instructions.items(.data)[inst].ty_op; const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { diff --git a/src/arch/sparc64/CodeGen.zig b/src/arch/sparc64/CodeGen.zig index 6217119f34..9d37cd9d1b 100644 --- a/src/arch/sparc64/CodeGen.zig +++ b/src/arch/sparc64/CodeGen.zig @@ -679,6 +679,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { .errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst), .err_return_trace => @panic("TODO try self.airErrReturnTrace(inst)"), .set_err_return_trace => @panic("TODO try self.airSetErrReturnTrace(inst)"), + .save_err_return_trace_index=> @panic("TODO try self.airSaveErrReturnTraceIndex(inst)"), .wrap_optional => try self.airWrapOptional(inst), .wrap_errunion_payload => @panic("TODO try self.airWrapErrUnionPayload(inst)"), diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index b47bb71e98..538fcb13c1 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -1857,6 +1857,7 @@ fn genInst(func: *CodeGen, inst: Air.Inst.Index) InnerError!void { .tag_name, .err_return_trace, .set_err_return_trace, + .save_err_return_trace_index, .is_named_enum_value, .error_set_has_value, .addrspace_cast, diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index a3888b4173..a1b354482b 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -756,6 +756,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { .errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst), .err_return_trace => try self.airErrReturnTrace(inst), .set_err_return_trace => try self.airSetErrReturnTrace(inst), + .save_err_return_trace_index=> try self.airSaveErrReturnTraceIndex(inst), .wrap_optional => try self.airWrapOptional(inst), .wrap_errunion_payload => try self.airWrapErrUnionPayload(inst), @@ -1973,6 +1974,11 @@ fn airSetErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void { return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch}); } +fn airSaveErrReturnTraceIndex(self: *Self, inst: Air.Inst.Index) !void { + _ = inst; + return self.fail("TODO implement airSaveErrReturnTraceIndex for {}", .{self.target.cpu.arch}); +} + fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void { const ty_op = self.air.instructions.items(.data)[inst].ty_op; if (self.liveness.isUnused(inst)) { diff --git a/src/codegen/c.zig b/src/codegen/c.zig index 072091d9b2..d6584d75ae 100644 --- a/src/codegen/c.zig +++ b/src/codegen/c.zig @@ -1935,6 +1935,7 @@ fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutO .errunion_payload_ptr_set => try airErrUnionPayloadPtrSet(f, inst), .err_return_trace => try airErrReturnTrace(f, inst), .set_err_return_trace => try airSetErrReturnTrace(f, inst), + .save_err_return_trace_index => try airSaveErrReturnTraceIndex(f, inst), .wasm_memory_size => try airWasmMemorySize(f, inst), .wasm_memory_grow => try airWasmMemoryGrow(f, inst), @@ -3625,6 +3626,11 @@ fn airSetErrReturnTrace(f: *Function, inst: Air.Inst.Index) !CValue { return f.fail("TODO: C backend: implement airSetErrReturnTrace", .{}); } +fn airSaveErrReturnTraceIndex(f: *Function, inst: Air.Inst.Index) !CValue { + _ = inst; + return f.fail("TODO: C backend: implement airSaveErrReturnTraceIndex", .{}); +} + fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue { if (f.liveness.isUnused(inst)) return CValue.none; diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index b0d1588007..3ebca13c20 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -4592,6 +4592,7 @@ pub const FuncGen = struct { .errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst), .err_return_trace => try self.airErrReturnTrace(inst), .set_err_return_trace => try self.airSetErrReturnTrace(inst), + .save_err_return_trace_index => try self.airSaveErrReturnTraceIndex(inst), .wrap_optional => try self.airWrapOptional(inst), .wrap_errunion_payload => try self.airWrapErrUnionPayload(inst), @@ -6543,6 +6544,24 @@ pub const FuncGen = struct { return null; } + fn airSaveErrReturnTraceIndex(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { + if (self.liveness.isUnused(inst)) return null; + + const target = self.dg.module.getTarget(); + + const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; + //const struct_ty = try self.resolveInst(ty_pl.ty); + const struct_ty = self.air.getRefType(ty_pl.ty); + const field_index = ty_pl.payload; + + var ptr_ty_buf: Type.Payload.Pointer = undefined; + const llvm_field_index = llvmFieldIndex(struct_ty, field_index, target, &ptr_ty_buf).?; + const struct_llvm_ty = try self.dg.lowerType(struct_ty); + const field_ptr = self.builder.buildStructGEP(struct_llvm_ty, self.err_ret_trace.?, llvm_field_index, ""); + const field_ptr_ty = Type.initPayload(&ptr_ty_buf.base); + return self.load(field_ptr, field_ptr_ty); + } + fn airWrapOptional(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { if (self.liveness.isUnused(inst)) return null; diff --git a/src/print_air.zig b/src/print_air.zig index d3523c0fc6..0bbc1100f7 100644 --- a/src/print_air.zig +++ b/src/print_air.zig @@ -197,6 +197,7 @@ const Writer = struct { .unreach, .ret_addr, .frame_addr, + .save_err_return_trace_index, => try w.writeNoOp(s, inst), .const_ty, From 77720e30aaead1c814f2714bd5a7ad7ad0fbc23e Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Wed, 14 Sep 2022 17:38:08 -0700 Subject: [PATCH 06/12] Re-factor: Change AstGen.ResultLoc to be a struct This re-factor is intended to make it easier to track what kind of operator/expression consumes a result location, without overloading the ResultLoc union for this purpose. This is used in the following commit to keep track of initializer expressions of `const` variables to avoid popping error traces pre-maturely. Hopefully this will also be useful for implementing RLS temporaries in the future. --- src/AstGen.zig | 1711 +++++++++++++++++----------------- test/behavior/bugs/12891.zig | 1 + 2 files changed, 861 insertions(+), 851 deletions(-) diff --git a/src/AstGen.zig b/src/AstGen.zig index ec67a89810..65818d98df 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -213,127 +213,145 @@ pub fn deinit(astgen: *AstGen, gpa: Allocator) void { astgen.ref_table.deinit(gpa); } -pub const ResultLoc = union(enum) { - /// The expression is the right-hand side of assignment to `_`. Only the side-effects of the - /// expression should be generated. The result instruction from the expression must - /// be ignored. - discard, - /// The expression has an inferred type, and it will be evaluated as an rvalue. - none, - /// The expression must generate a pointer rather than a value. For example, the left hand side - /// of an assignment uses this kind of result location. - ref, - /// Exactly like `none`, except also indicates this is an error-handling expr (try/catch/return etc.) - catch_none, - /// Exactly like `ref`, except also indicates this is an error-handling expr (try/catch/return etc.) - catch_ref, - /// The expression will be coerced into this type, but it will be evaluated as an rvalue. - ty: Zir.Inst.Ref, - /// Same as `ty` but for shift operands. - ty_shift_operand: Zir.Inst.Ref, - /// Same as `ty` but it is guaranteed that Sema will additionally perform the coercion, - /// so no `as` instruction needs to be emitted. - coerced_ty: Zir.Inst.Ref, - /// The expression must store its result into this typed pointer. The result instruction - /// from the expression must be ignored. - ptr: PtrResultLoc, - /// The expression must store its result into this allocation, which has an inferred type. - /// The result instruction from the expression must be ignored. - /// Always an instruction with tag `alloc_inferred`. - inferred_ptr: Zir.Inst.Ref, - /// There is a pointer for the expression to store its result into, however, its type - /// is inferred based on peer type resolution for a `Zir.Inst.Block`. - /// The result instruction from the expression must be ignored. - block_ptr: *GenZir, +pub const ResultInfo = struct { + /// The semantics requested for the result location + rl: Loc, - const PtrResultLoc = struct { - inst: Zir.Inst.Ref, - src_node: ?Ast.Node.Index = null, - }; - - pub const Strategy = struct { - elide_store_to_block_ptr_instructions: bool, - tag: Tag, - - pub const Tag = enum { - /// Both branches will use break_void; result location is used to communicate the - /// result instruction. - break_void, - /// Use break statements to pass the block result value, and call rvalue() at - /// the end depending on rl. Also elide the store_to_block_ptr instructions - /// depending on rl. - break_operand, - }; - }; - - fn strategy(rl: ResultLoc, block_scope: *GenZir) Strategy { - switch (rl) { - // In this branch there will not be any store_to_block_ptr instructions. - .none, .catch_none, .ty, .ty_shift_operand, .coerced_ty, .ref, .catch_ref => return .{ - .tag = .break_operand, - .elide_store_to_block_ptr_instructions = false, - }, - .discard => return .{ - .tag = .break_void, - .elide_store_to_block_ptr_instructions = false, - }, - // The pointer got passed through to the sub-expressions, so we will use - // break_void here. - // In this branch there will not be any store_to_block_ptr instructions. - .ptr => return .{ - .tag = .break_void, - .elide_store_to_block_ptr_instructions = false, - }, - .inferred_ptr, .block_ptr => { - if (block_scope.rvalue_rl_count == block_scope.break_count) { - // Neither prong of the if consumed the result location, so we can - // use break instructions to create an rvalue. - return .{ - .tag = .break_operand, - .elide_store_to_block_ptr_instructions = true, - }; - } else { - // Allow the store_to_block_ptr instructions to remain so that - // semantic analysis can turn them into bitcasts. - return .{ - .tag = .break_void, - .elide_store_to_block_ptr_instructions = false, - }; - } - }, - } - } + /// The "operator" consuming the result location + ctx: Context = .none, /// Turns a `coerced_ty` back into a `ty`. Should be called at branch points /// such as if and switch expressions. - fn br(rl: ResultLoc) ResultLoc { - return switch (rl) { - .coerced_ty => |ty| .{ .ty = ty }, - else => rl, + fn br(ri: ResultInfo) ResultInfo { + return switch (ri.rl) { + .coerced_ty => |ty| .{ + .rl = .{ .ty = ty }, + .ctx = ri.ctx, + }, + else => ri, }; } - fn zirTag(rl: ResultLoc) Zir.Inst.Tag { - return switch (rl) { - .ty => .as_node, - .ty_shift_operand => .as_shift_operand, + fn zirTag(ri: ResultInfo) Zir.Inst.Tag { + switch (ri.rl) { + .ty => return switch (ri.ctx) { + .shift_op => .as_shift_operand, + else => .as_node, + }, else => unreachable, - }; + } } + + pub const Loc = union(enum) { + /// The expression is the right-hand side of assignment to `_`. Only the side-effects of the + /// expression should be generated. The result instruction from the expression must + /// be ignored. + discard, + /// The expression has an inferred type, and it will be evaluated as an rvalue. + none, + /// The expression must generate a pointer rather than a value. For example, the left hand side + /// of an assignment uses this kind of result location. + ref, + /// The expression will be coerced into this type, but it will be evaluated as an rvalue. + ty: Zir.Inst.Ref, + /// Same as `ty` but it is guaranteed that Sema will additionally perform the coercion, + /// so no `as` instruction needs to be emitted. + coerced_ty: Zir.Inst.Ref, + /// The expression must store its result into this typed pointer. The result instruction + /// from the expression must be ignored. + ptr: PtrResultLoc, + /// The expression must store its result into this allocation, which has an inferred type. + /// The result instruction from the expression must be ignored. + /// Always an instruction with tag `alloc_inferred`. + inferred_ptr: Zir.Inst.Ref, + /// There is a pointer for the expression to store its result into, however, its type + /// is inferred based on peer type resolution for a `Zir.Inst.Block`. + /// The result instruction from the expression must be ignored. + block_ptr: *GenZir, + + const PtrResultLoc = struct { + inst: Zir.Inst.Ref, + src_node: ?Ast.Node.Index = null, + }; + + pub const Strategy = struct { + elide_store_to_block_ptr_instructions: bool, + tag: Tag, + + pub const Tag = enum { + /// Both branches will use break_void; result location is used to communicate the + /// result instruction. + break_void, + /// Use break statements to pass the block result value, and call rvalue() at + /// the end depending on rl. Also elide the store_to_block_ptr instructions + /// depending on rl. + break_operand, + }; + }; + + fn strategy(rl: Loc, block_scope: *GenZir) Strategy { + switch (rl) { + // In this branch there will not be any store_to_block_ptr instructions. + .none, .ty, .coerced_ty, .ref => return .{ + .tag = .break_operand, + .elide_store_to_block_ptr_instructions = false, + }, + .discard => return .{ + .tag = .break_void, + .elide_store_to_block_ptr_instructions = false, + }, + // The pointer got passed through to the sub-expressions, so we will use + // break_void here. + // In this branch there will not be any store_to_block_ptr instructions. + .ptr => return .{ + .tag = .break_void, + .elide_store_to_block_ptr_instructions = false, + }, + .inferred_ptr, .block_ptr => { + if (block_scope.rvalue_rl_count == block_scope.break_count) { + // Neither prong of the if consumed the result location, so we can + // use break instructions to create an rvalue. + return .{ + .tag = .break_operand, + .elide_store_to_block_ptr_instructions = true, + }; + } else { + // Allow the store_to_block_ptr instructions to remain so that + // semantic analysis can turn them into bitcasts. + return .{ + .tag = .break_void, + .elide_store_to_block_ptr_instructions = false, + }; + } + }, + } + } + }; + + pub const Context = enum { + /// The expression is the operand to a return expression. + @"return", + /// The expression is the input to an error-handling operator (if-else, try, or catch). + error_handling_expr, + /// The expression is the right-hand side of a shift operation. + shift_op, + /// No specific operator in particular. + none, + }; }; -pub const align_rl: ResultLoc = .{ .ty = .u29_type }; -pub const coerced_align_rl: ResultLoc = .{ .coerced_ty = .u29_type }; -pub const bool_rl: ResultLoc = .{ .ty = .bool_type }; -pub const type_rl: ResultLoc = .{ .ty = .type_type }; -pub const coerced_type_rl: ResultLoc = .{ .coerced_ty = .type_type }; +pub const align_ri: ResultInfo = .{ .rl = .{ .ty = .u29_type } }; +pub const coerced_align_ri: ResultInfo = .{ .rl = .{ .coerced_ty = .u29_type } }; +pub const bool_ri: ResultInfo = .{ .rl = .{ .ty = .bool_type } }; +pub const type_ri: ResultInfo = .{ .rl = .{ .ty = .type_type } }; +pub const coerced_type_ri: ResultInfo = .{ .rl = .{ .coerced_ty = .type_type } }; fn typeExpr(gz: *GenZir, scope: *Scope, type_node: Ast.Node.Index) InnerError!Zir.Inst.Ref { const prev_force_comptime = gz.force_comptime; gz.force_comptime = true; defer gz.force_comptime = prev_force_comptime; - return expr(gz, scope, coerced_type_rl, type_node); + return expr(gz, scope, coerced_type_ri, type_node); } fn reachableTypeExpr( @@ -346,24 +364,24 @@ fn reachableTypeExpr( gz.force_comptime = true; defer gz.force_comptime = prev_force_comptime; - return reachableExpr(gz, scope, coerced_type_rl, type_node, reachable_node); + return reachableExpr(gz, scope, coerced_type_ri, type_node, reachable_node); } /// Same as `expr` but fails with a compile error if the result type is `noreturn`. fn reachableExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, reachable_node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { - return reachableExprComptime(gz, scope, rl, node, reachable_node, false); + return reachableExprComptime(gz, scope, ri, node, reachable_node, false); } fn reachableExprComptime( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, reachable_node: Ast.Node.Index, force_comptime: bool, @@ -372,7 +390,7 @@ fn reachableExprComptime( gz.force_comptime = prev_force_comptime or force_comptime; defer gz.force_comptime = prev_force_comptime; - const result_inst = try expr(gz, scope, rl, node); + const result_inst = try expr(gz, scope, ri, node); if (gz.refIsNoReturn(result_inst)) { try gz.astgen.appendErrorNodeNotes(reachable_node, "unreachable code", .{}, &[_]u32{ try gz.astgen.errNoteNode(node, "control flow is diverted here", .{}), @@ -573,14 +591,14 @@ fn lvalExpr(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Ins .@"orelse", => {}, } - return expr(gz, scope, .ref, node); + return expr(gz, scope, .{ .rl = .ref }, node); } /// Turn Zig AST into untyped ZIR instructions. /// When `rl` is discard, ptr, inferred_ptr, or inferred_ptr, the /// result instruction can be used to inspect whether it is isNoReturn() but that is it, /// it must otherwise not be used. -fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref { +fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; const tree = astgen.tree; const main_tokens = tree.nodes.items(.main_token); @@ -621,161 +639,161 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr .assign => { try assign(gz, scope, node); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_shl => { try assignShift(gz, scope, node, .shl); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_shl_sat => { try assignShiftSat(gz, scope, node); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_shr => { try assignShift(gz, scope, node, .shr); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_bit_and => { try assignOp(gz, scope, node, .bit_and); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_bit_or => { try assignOp(gz, scope, node, .bit_or); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_bit_xor => { try assignOp(gz, scope, node, .xor); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_div => { try assignOp(gz, scope, node, .div); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_sub => { try assignOp(gz, scope, node, .sub); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_sub_wrap => { try assignOp(gz, scope, node, .subwrap); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_sub_sat => { try assignOp(gz, scope, node, .sub_sat); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_mod => { try assignOp(gz, scope, node, .mod_rem); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_add => { try assignOp(gz, scope, node, .add); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_add_wrap => { try assignOp(gz, scope, node, .addwrap); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_add_sat => { try assignOp(gz, scope, node, .add_sat); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_mul => { try assignOp(gz, scope, node, .mul); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_mul_wrap => { try assignOp(gz, scope, node, .mulwrap); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .assign_mul_sat => { try assignOp(gz, scope, node, .mul_sat); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, // zig fmt: off - .shl => return shiftOp(gz, scope, rl, node, node_datas[node].lhs, node_datas[node].rhs, .shl), - .shr => return shiftOp(gz, scope, rl, node, node_datas[node].lhs, node_datas[node].rhs, .shr), + .shl => return shiftOp(gz, scope, ri, node, node_datas[node].lhs, node_datas[node].rhs, .shl), + .shr => return shiftOp(gz, scope, ri, node, node_datas[node].lhs, node_datas[node].rhs, .shr), - .add => return simpleBinOp(gz, scope, rl, node, .add), - .add_wrap => return simpleBinOp(gz, scope, rl, node, .addwrap), - .add_sat => return simpleBinOp(gz, scope, rl, node, .add_sat), - .sub => return simpleBinOp(gz, scope, rl, node, .sub), - .sub_wrap => return simpleBinOp(gz, scope, rl, node, .subwrap), - .sub_sat => return simpleBinOp(gz, scope, rl, node, .sub_sat), - .mul => return simpleBinOp(gz, scope, rl, node, .mul), - .mul_wrap => return simpleBinOp(gz, scope, rl, node, .mulwrap), - .mul_sat => return simpleBinOp(gz, scope, rl, node, .mul_sat), - .div => return simpleBinOp(gz, scope, rl, node, .div), - .mod => return simpleBinOp(gz, scope, rl, node, .mod_rem), - .shl_sat => return simpleBinOp(gz, scope, rl, node, .shl_sat), + .add => return simpleBinOp(gz, scope, ri, node, .add), + .add_wrap => return simpleBinOp(gz, scope, ri, node, .addwrap), + .add_sat => return simpleBinOp(gz, scope, ri, node, .add_sat), + .sub => return simpleBinOp(gz, scope, ri, node, .sub), + .sub_wrap => return simpleBinOp(gz, scope, ri, node, .subwrap), + .sub_sat => return simpleBinOp(gz, scope, ri, node, .sub_sat), + .mul => return simpleBinOp(gz, scope, ri, node, .mul), + .mul_wrap => return simpleBinOp(gz, scope, ri, node, .mulwrap), + .mul_sat => return simpleBinOp(gz, scope, ri, node, .mul_sat), + .div => return simpleBinOp(gz, scope, ri, node, .div), + .mod => return simpleBinOp(gz, scope, ri, node, .mod_rem), + .shl_sat => return simpleBinOp(gz, scope, ri, node, .shl_sat), - .bit_and => return simpleBinOp(gz, scope, rl, node, .bit_and), - .bit_or => return simpleBinOp(gz, scope, rl, node, .bit_or), - .bit_xor => return simpleBinOp(gz, scope, rl, node, .xor), - .bang_equal => return simpleBinOp(gz, scope, rl, node, .cmp_neq), - .equal_equal => return simpleBinOp(gz, scope, rl, node, .cmp_eq), - .greater_than => return simpleBinOp(gz, scope, rl, node, .cmp_gt), - .greater_or_equal => return simpleBinOp(gz, scope, rl, node, .cmp_gte), - .less_than => return simpleBinOp(gz, scope, rl, node, .cmp_lt), - .less_or_equal => return simpleBinOp(gz, scope, rl, node, .cmp_lte), - .array_cat => return simpleBinOp(gz, scope, rl, node, .array_cat), + .bit_and => return simpleBinOp(gz, scope, ri, node, .bit_and), + .bit_or => return simpleBinOp(gz, scope, ri, node, .bit_or), + .bit_xor => return simpleBinOp(gz, scope, ri, node, .xor), + .bang_equal => return simpleBinOp(gz, scope, ri, node, .cmp_neq), + .equal_equal => return simpleBinOp(gz, scope, ri, node, .cmp_eq), + .greater_than => return simpleBinOp(gz, scope, ri, node, .cmp_gt), + .greater_or_equal => return simpleBinOp(gz, scope, ri, node, .cmp_gte), + .less_than => return simpleBinOp(gz, scope, ri, node, .cmp_lt), + .less_or_equal => return simpleBinOp(gz, scope, ri, node, .cmp_lte), + .array_cat => return simpleBinOp(gz, scope, ri, node, .array_cat), .array_mult => { const result = try gz.addPlNode(.array_mul, node, Zir.Inst.Bin{ - .lhs = try expr(gz, scope, .none, node_datas[node].lhs), - .rhs = try comptimeExpr(gz, scope, .{ .coerced_ty = .usize_type }, node_datas[node].rhs), + .lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs), + .rhs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[node].rhs), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, - .error_union => return simpleBinOp(gz, scope, rl, node, .error_union_type), - .merge_error_sets => return simpleBinOp(gz, scope, rl, node, .merge_error_sets), + .error_union => return simpleBinOp(gz, scope, ri, node, .error_union_type), + .merge_error_sets => return simpleBinOp(gz, scope, ri, node, .merge_error_sets), - .bool_and => return boolBinOp(gz, scope, rl, node, .bool_br_and), - .bool_or => return boolBinOp(gz, scope, rl, node, .bool_br_or), + .bool_and => return boolBinOp(gz, scope, ri, node, .bool_br_and), + .bool_or => return boolBinOp(gz, scope, ri, node, .bool_br_or), - .bool_not => return simpleUnOp(gz, scope, rl, node, bool_rl, node_datas[node].lhs, .bool_not), - .bit_not => return simpleUnOp(gz, scope, rl, node, .none, node_datas[node].lhs, .bit_not), + .bool_not => return simpleUnOp(gz, scope, ri, node, bool_ri, node_datas[node].lhs, .bool_not), + .bit_not => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, node_datas[node].lhs, .bit_not), - .negation => return negation(gz, scope, rl, node), - .negation_wrap => return simpleUnOp(gz, scope, rl, node, .none, node_datas[node].lhs, .negate_wrap), + .negation => return negation(gz, scope, ri, node), + .negation_wrap => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, node_datas[node].lhs, .negate_wrap), - .identifier => return identifier(gz, scope, rl, node), + .identifier => return identifier(gz, scope, ri, node), - .asm_simple => return asmExpr(gz, scope, rl, node, tree.asmSimple(node)), - .@"asm" => return asmExpr(gz, scope, rl, node, tree.asmFull(node)), + .asm_simple => return asmExpr(gz, scope, ri, node, tree.asmSimple(node)), + .@"asm" => return asmExpr(gz, scope, ri, node, tree.asmFull(node)), - .string_literal => return stringLiteral(gz, rl, node), - .multiline_string_literal => return multilineStringLiteral(gz, rl, node), + .string_literal => return stringLiteral(gz, ri, node), + .multiline_string_literal => return multilineStringLiteral(gz, ri, node), - .number_literal => return numberLiteral(gz, rl, node, node, .positive), + .number_literal => return numberLiteral(gz, ri, node, node, .positive), // zig fmt: on .builtin_call_two, .builtin_call_two_comma => { if (node_datas[node].lhs == 0) { const params = [_]Ast.Node.Index{}; - return builtinCall(gz, scope, rl, node, ¶ms); + return builtinCall(gz, scope, ri, node, ¶ms); } else if (node_datas[node].rhs == 0) { const params = [_]Ast.Node.Index{node_datas[node].lhs}; - return builtinCall(gz, scope, rl, node, ¶ms); + return builtinCall(gz, scope, ri, node, ¶ms); } else { const params = [_]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs }; - return builtinCall(gz, scope, rl, node, ¶ms); + return builtinCall(gz, scope, ri, node, ¶ms); } }, .builtin_call, .builtin_call_comma => { const params = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs]; - return builtinCall(gz, scope, rl, node, params); + return builtinCall(gz, scope, ri, node, params); }, .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => { var params: [1]Ast.Node.Index = undefined; - return callExpr(gz, scope, rl, node, tree.callOne(¶ms, node)); + return callExpr(gz, scope, ri, node, tree.callOne(¶ms, node)); }, .call, .call_comma, .async_call, .async_call_comma => { - return callExpr(gz, scope, rl, node, tree.callFull(node)); + return callExpr(gz, scope, ri, node, tree.callFull(node)); }, .unreachable_literal => { @@ -790,112 +808,112 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr return Zir.Inst.Ref.unreachable_value; }, .@"return" => return ret(gz, scope, node), - .field_access => return fieldAccess(gz, scope, rl, node), + .field_access => return fieldAccess(gz, scope, ri, node), - .if_simple => return ifExpr(gz, scope, rl.br(), node, tree.ifSimple(node)), - .@"if" => return ifExpr(gz, scope, rl.br(), node, tree.ifFull(node)), + .if_simple => return ifExpr(gz, scope, ri.br(), node, tree.ifSimple(node)), + .@"if" => return ifExpr(gz, scope, ri.br(), node, tree.ifFull(node)), - .while_simple => return whileExpr(gz, scope, rl.br(), node, tree.whileSimple(node), false), - .while_cont => return whileExpr(gz, scope, rl.br(), node, tree.whileCont(node), false), - .@"while" => return whileExpr(gz, scope, rl.br(), node, tree.whileFull(node), false), + .while_simple => return whileExpr(gz, scope, ri.br(), node, tree.whileSimple(node), false), + .while_cont => return whileExpr(gz, scope, ri.br(), node, tree.whileCont(node), false), + .@"while" => return whileExpr(gz, scope, ri.br(), node, tree.whileFull(node), false), - .for_simple => return forExpr(gz, scope, rl.br(), node, tree.forSimple(node), false), - .@"for" => return forExpr(gz, scope, rl.br(), node, tree.forFull(node), false), + .for_simple => return forExpr(gz, scope, ri.br(), node, tree.forSimple(node), false), + .@"for" => return forExpr(gz, scope, ri.br(), node, tree.forFull(node), false), .slice_open => { - const lhs = try expr(gz, scope, .ref, node_datas[node].lhs); - const start = try expr(gz, scope, .{ .coerced_ty = .usize_type }, node_datas[node].rhs); + const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs); + const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[node].rhs); const result = try gz.addPlNode(.slice_start, node, Zir.Inst.SliceStart{ .lhs = lhs, .start = start, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .slice => { - const lhs = try expr(gz, scope, .ref, node_datas[node].lhs); + const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs); const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice); - const start = try expr(gz, scope, .{ .coerced_ty = .usize_type }, extra.start); - const end = try expr(gz, scope, .{ .coerced_ty = .usize_type }, extra.end); + const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.start); + const end = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end); const result = try gz.addPlNode(.slice_end, node, Zir.Inst.SliceEnd{ .lhs = lhs, .start = start, .end = end, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .slice_sentinel => { - const lhs = try expr(gz, scope, .ref, node_datas[node].lhs); + const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs); const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel); - const start = try expr(gz, scope, .{ .coerced_ty = .usize_type }, extra.start); - const end = if (extra.end != 0) try expr(gz, scope, .{ .coerced_ty = .usize_type }, extra.end) else .none; - const sentinel = try expr(gz, scope, .none, extra.sentinel); + const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.start); + const end = if (extra.end != 0) try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end) else .none; + const sentinel = try expr(gz, scope, .{ .rl = .none }, extra.sentinel); const result = try gz.addPlNode(.slice_sentinel, node, Zir.Inst.SliceSentinel{ .lhs = lhs, .start = start, .end = end, .sentinel = sentinel, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .deref => { - const lhs = try expr(gz, scope, .none, node_datas[node].lhs); + const lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs); _ = try gz.addUnNode(.validate_deref, lhs, node); - switch (rl) { - .ref, .catch_ref => return lhs, + switch (ri.rl) { + .ref => return lhs, else => { const result = try gz.addUnNode(.load, lhs, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, } }, .address_of => { - const result = try expr(gz, scope, .ref, node_datas[node].lhs); - return rvalue(gz, rl, result, node); + const result = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs); + return rvalue(gz, ri, result, node); }, .optional_type => { const operand = try typeExpr(gz, scope, node_datas[node].lhs); const result = try gz.addUnNode(.optional_type, operand, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, - .unwrap_optional => switch (rl) { - .ref, .catch_ref => return gz.addUnNode( + .unwrap_optional => switch (ri.rl) { + .ref => return gz.addUnNode( .optional_payload_safe_ptr, - try expr(gz, scope, .ref, node_datas[node].lhs), + try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs), node, ), - else => return rvalue(gz, rl, try gz.addUnNode( + else => return rvalue(gz, ri, try gz.addUnNode( .optional_payload_safe, - try expr(gz, scope, .none, node_datas[node].lhs), + try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs), node, ), node), }, .block_two, .block_two_semicolon => { const statements = [2]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs }; if (node_datas[node].lhs == 0) { - return blockExpr(gz, scope, rl, node, statements[0..0]); + return blockExpr(gz, scope, ri, node, statements[0..0]); } else if (node_datas[node].rhs == 0) { - return blockExpr(gz, scope, rl, node, statements[0..1]); + return blockExpr(gz, scope, ri, node, statements[0..1]); } else { - return blockExpr(gz, scope, rl, node, statements[0..2]); + return blockExpr(gz, scope, ri, node, statements[0..2]); } }, .block, .block_semicolon => { const statements = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs]; - return blockExpr(gz, scope, rl, node, statements); + return blockExpr(gz, scope, ri, node, statements); }, - .enum_literal => return simpleStrTok(gz, rl, main_tokens[node], node, .enum_literal), - .error_value => return simpleStrTok(gz, rl, node_datas[node].rhs, node, .error_value), + .enum_literal => return simpleStrTok(gz, ri, main_tokens[node], node, .enum_literal), + .error_value => return simpleStrTok(gz, ri, node_datas[node].rhs, node, .error_value), // TODO restore this when implementing https://github.com/ziglang/zig/issues/6025 - // .anyframe_literal => return rvalue(gz, rl, .anyframe_type, node), + // .anyframe_literal => return rvalue(gz, ri, .anyframe_type, node), .anyframe_literal => { const result = try gz.addUnNode(.anyframe_type, .void_type, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .anyframe_type => { const return_type = try typeExpr(gz, scope, node_datas[node].rhs); const result = try gz.addUnNode(.anyframe_type, return_type, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .@"catch" => { const catch_token = main_tokens[node]; @@ -903,11 +921,11 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr catch_token + 2 else null; - switch (rl) { - .ref, .catch_ref => return orelseCatchExpr( + switch (ri.rl) { + .ref => return orelseCatchExpr( gz, scope, - rl, + ri, node, node_datas[node].lhs, .is_non_err_ptr, @@ -919,7 +937,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr else => return orelseCatchExpr( gz, scope, - rl, + ri, node, node_datas[node].lhs, .is_non_err, @@ -930,11 +948,11 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr ), } }, - .@"orelse" => switch (rl) { - .ref, .catch_ref => return orelseCatchExpr( + .@"orelse" => switch (ri.rl) { + .ref => return orelseCatchExpr( gz, scope, - rl, + ri, node, node_datas[node].lhs, .is_non_null_ptr, @@ -946,7 +964,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr else => return orelseCatchExpr( gz, scope, - rl, + ri, node, node_datas[node].lhs, .is_non_null, @@ -957,94 +975,94 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr ), }, - .ptr_type_aligned => return ptrType(gz, scope, rl, node, tree.ptrTypeAligned(node)), - .ptr_type_sentinel => return ptrType(gz, scope, rl, node, tree.ptrTypeSentinel(node)), - .ptr_type => return ptrType(gz, scope, rl, node, tree.ptrType(node)), - .ptr_type_bit_range => return ptrType(gz, scope, rl, node, tree.ptrTypeBitRange(node)), + .ptr_type_aligned => return ptrType(gz, scope, ri, node, tree.ptrTypeAligned(node)), + .ptr_type_sentinel => return ptrType(gz, scope, ri, node, tree.ptrTypeSentinel(node)), + .ptr_type => return ptrType(gz, scope, ri, node, tree.ptrType(node)), + .ptr_type_bit_range => return ptrType(gz, scope, ri, node, tree.ptrTypeBitRange(node)), .container_decl, .container_decl_trailing, - => return containerDecl(gz, scope, rl, node, tree.containerDecl(node)), + => return containerDecl(gz, scope, ri, node, tree.containerDecl(node)), .container_decl_two, .container_decl_two_trailing => { var buffer: [2]Ast.Node.Index = undefined; - return containerDecl(gz, scope, rl, node, tree.containerDeclTwo(&buffer, node)); + return containerDecl(gz, scope, ri, node, tree.containerDeclTwo(&buffer, node)); }, .container_decl_arg, .container_decl_arg_trailing, - => return containerDecl(gz, scope, rl, node, tree.containerDeclArg(node)), + => return containerDecl(gz, scope, ri, node, tree.containerDeclArg(node)), .tagged_union, .tagged_union_trailing, - => return containerDecl(gz, scope, rl, node, tree.taggedUnion(node)), + => return containerDecl(gz, scope, ri, node, tree.taggedUnion(node)), .tagged_union_two, .tagged_union_two_trailing => { var buffer: [2]Ast.Node.Index = undefined; - return containerDecl(gz, scope, rl, node, tree.taggedUnionTwo(&buffer, node)); + return containerDecl(gz, scope, ri, node, tree.taggedUnionTwo(&buffer, node)); }, .tagged_union_enum_tag, .tagged_union_enum_tag_trailing, - => return containerDecl(gz, scope, rl, node, tree.taggedUnionEnumTag(node)), + => return containerDecl(gz, scope, ri, node, tree.taggedUnionEnumTag(node)), .@"break" => return breakExpr(gz, scope, node), .@"continue" => return continueExpr(gz, scope, node), - .grouped_expression => return expr(gz, scope, rl, node_datas[node].lhs), - .array_type => return arrayType(gz, scope, rl, node), - .array_type_sentinel => return arrayTypeSentinel(gz, scope, rl, node), - .char_literal => return charLiteral(gz, rl, node), - .error_set_decl => return errorSetDecl(gz, rl, node), - .array_access => return arrayAccess(gz, scope, rl, node), - .@"comptime" => return comptimeExprAst(gz, scope, rl, node), - .@"switch", .switch_comma => return switchExpr(gz, scope, rl.br(), node), + .grouped_expression => return expr(gz, scope, ri, node_datas[node].lhs), + .array_type => return arrayType(gz, scope, ri, node), + .array_type_sentinel => return arrayTypeSentinel(gz, scope, ri, node), + .char_literal => return charLiteral(gz, ri, node), + .error_set_decl => return errorSetDecl(gz, ri, node), + .array_access => return arrayAccess(gz, scope, ri, node), + .@"comptime" => return comptimeExprAst(gz, scope, ri, node), + .@"switch", .switch_comma => return switchExpr(gz, scope, ri.br(), node), - .@"nosuspend" => return nosuspendExpr(gz, scope, rl, node), + .@"nosuspend" => return nosuspendExpr(gz, scope, ri, node), .@"suspend" => return suspendExpr(gz, scope, node), - .@"await" => return awaitExpr(gz, scope, rl, node), - .@"resume" => return resumeExpr(gz, scope, rl, node), + .@"await" => return awaitExpr(gz, scope, ri, node), + .@"resume" => return resumeExpr(gz, scope, ri, node), - .@"try" => return tryExpr(gz, scope, rl, node, node_datas[node].lhs), + .@"try" => return tryExpr(gz, scope, ri, node, node_datas[node].lhs), .array_init_one, .array_init_one_comma => { var elements: [1]Ast.Node.Index = undefined; - return arrayInitExpr(gz, scope, rl, node, tree.arrayInitOne(&elements, node)); + return arrayInitExpr(gz, scope, ri, node, tree.arrayInitOne(&elements, node)); }, .array_init_dot_two, .array_init_dot_two_comma => { var elements: [2]Ast.Node.Index = undefined; - return arrayInitExpr(gz, scope, rl, node, tree.arrayInitDotTwo(&elements, node)); + return arrayInitExpr(gz, scope, ri, node, tree.arrayInitDotTwo(&elements, node)); }, .array_init_dot, .array_init_dot_comma, - => return arrayInitExpr(gz, scope, rl, node, tree.arrayInitDot(node)), + => return arrayInitExpr(gz, scope, ri, node, tree.arrayInitDot(node)), .array_init, .array_init_comma, - => return arrayInitExpr(gz, scope, rl, node, tree.arrayInit(node)), + => return arrayInitExpr(gz, scope, ri, node, tree.arrayInit(node)), .struct_init_one, .struct_init_one_comma => { var fields: [1]Ast.Node.Index = undefined; - return structInitExpr(gz, scope, rl, node, tree.structInitOne(&fields, node)); + return structInitExpr(gz, scope, ri, node, tree.structInitOne(&fields, node)); }, .struct_init_dot_two, .struct_init_dot_two_comma => { var fields: [2]Ast.Node.Index = undefined; - return structInitExpr(gz, scope, rl, node, tree.structInitDotTwo(&fields, node)); + return structInitExpr(gz, scope, ri, node, tree.structInitDotTwo(&fields, node)); }, .struct_init_dot, .struct_init_dot_comma, - => return structInitExpr(gz, scope, rl, node, tree.structInitDot(node)), + => return structInitExpr(gz, scope, ri, node, tree.structInitDot(node)), .struct_init, .struct_init_comma, - => return structInitExpr(gz, scope, rl, node, tree.structInit(node)), + => return structInitExpr(gz, scope, ri, node, tree.structInit(node)), .fn_proto_simple => { var params: [1]Ast.Node.Index = undefined; - return fnProtoExpr(gz, scope, rl, node, tree.fnProtoSimple(¶ms, node)); + return fnProtoExpr(gz, scope, ri, node, tree.fnProtoSimple(¶ms, node)); }, .fn_proto_multi => { - return fnProtoExpr(gz, scope, rl, node, tree.fnProtoMulti(node)); + return fnProtoExpr(gz, scope, ri, node, tree.fnProtoMulti(node)); }, .fn_proto_one => { var params: [1]Ast.Node.Index = undefined; - return fnProtoExpr(gz, scope, rl, node, tree.fnProtoOne(¶ms, node)); + return fnProtoExpr(gz, scope, ri, node, tree.fnProtoOne(¶ms, node)); }, .fn_proto => { - return fnProtoExpr(gz, scope, rl, node, tree.fnProto(node)); + return fnProtoExpr(gz, scope, ri, node, tree.fnProto(node)); }, } } @@ -1052,7 +1070,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerEr fn nosuspendExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; @@ -1067,7 +1085,7 @@ fn nosuspendExpr( } gz.nosuspend_node = node; defer gz.nosuspend_node = 0; - return expr(gz, scope, rl, body_node); + return expr(gz, scope, ri, body_node); } fn suspendExpr( @@ -1100,7 +1118,7 @@ fn suspendExpr( suspend_scope.suspend_node = node; defer suspend_scope.unstack(); - const body_result = try expr(&suspend_scope, &suspend_scope.base, .none, body_node); + const body_result = try expr(&suspend_scope, &suspend_scope.base, .{ .rl = .none }, body_node); if (!gz.refIsNoReturn(body_result)) { _ = try suspend_scope.addBreak(.break_inline, suspend_inst, .void_value); } @@ -1112,7 +1130,7 @@ fn suspendExpr( fn awaitExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; @@ -1125,7 +1143,7 @@ fn awaitExpr( try astgen.errNoteNode(gz.suspend_node, "suspend block here", .{}), }); } - const operand = try expr(gz, scope, .none, rhs_node); + const operand = try expr(gz, scope, .{ .rl = .none }, rhs_node); const result = if (gz.nosuspend_node != 0) try gz.addExtendedPayload(.await_nosuspend, Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(node), @@ -1134,28 +1152,28 @@ fn awaitExpr( else try gz.addUnNode(.@"await", operand, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn resumeExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; const tree = astgen.tree; const node_datas = tree.nodes.items(.data); const rhs_node = node_datas[node].lhs; - const operand = try expr(gz, scope, .none, rhs_node); + const operand = try expr(gz, scope, .{ .rl = .none }, rhs_node); const result = try gz.addUnNode(.@"resume", operand, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn fnProtoExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, fn_proto: Ast.full.FnProto, ) InnerError!Zir.Inst.Ref { @@ -1221,7 +1239,7 @@ fn fnProtoExpr( assert(param_type_node != 0); var param_gz = block_scope.makeSubBlock(scope); defer param_gz.unstack(); - const param_type = try expr(¶m_gz, scope, coerced_type_rl, param_type_node); + const param_type = try expr(¶m_gz, scope, coerced_type_ri, param_type_node); const param_inst_expected = @intCast(u32, astgen.instructions.len + 1); _ = try param_gz.addBreak(.break_inline, param_inst_expected, param_type); const main_tokens = tree.nodes.items(.main_token); @@ -1235,7 +1253,7 @@ fn fnProtoExpr( }; const align_ref: Zir.Inst.Ref = if (fn_proto.ast.align_expr == 0) .none else inst: { - break :inst try expr(&block_scope, scope, align_rl, fn_proto.ast.align_expr); + break :inst try expr(&block_scope, scope, align_ri, fn_proto.ast.align_expr); }; if (fn_proto.ast.addrspace_expr != 0) { @@ -1250,7 +1268,7 @@ fn fnProtoExpr( try expr( &block_scope, scope, - .{ .ty = .calling_convention_type }, + .{ .rl = .{ .ty = .calling_convention_type } }, fn_proto.ast.callconv_expr, ) else @@ -1261,7 +1279,7 @@ fn fnProtoExpr( if (is_inferred_error) { return astgen.failTok(maybe_bang, "function prototype may not have inferred error set", .{}); } - const ret_ty = try expr(&block_scope, scope, coerced_type_rl, fn_proto.ast.return_type); + const ret_ty = try expr(&block_scope, scope, coerced_type_ri, fn_proto.ast.return_type); const result = try block_scope.addFunc(.{ .src_node = fn_proto.ast.proto_node, @@ -1292,13 +1310,13 @@ fn fnProtoExpr( try block_scope.setBlockBody(block_inst); try gz.instructions.append(astgen.gpa, block_inst); - return rvalue(gz, rl, indexToRef(block_inst), fn_proto.ast.proto_node); + return rvalue(gz, ri, indexToRef(block_inst), fn_proto.ast.proto_node); } fn arrayInitExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, array_init: Ast.full.ArrayInit, ) InnerError!Zir.Inst.Ref { @@ -1340,7 +1358,7 @@ fn arrayInitExpr( .elem = elem_type, }; } else { - const sentinel = try comptimeExpr(gz, scope, .{ .ty = elem_type }, array_type.ast.sentinel); + const sentinel = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = elem_type } }, array_type.ast.sentinel); const array_type_inst = try gz.addPlNode( .array_type_sentinel, array_init.ast.type_expr, @@ -1368,29 +1386,29 @@ fn arrayInitExpr( }; }; - switch (rl) { + switch (ri.rl) { .discard => { // TODO elements should still be coerced if type is provided for (array_init.ast.elements) |elem_init| { - _ = try expr(gz, scope, .discard, elem_init); + _ = try expr(gz, scope, .{ .rl = .discard }, elem_init); } return Zir.Inst.Ref.void_value; }, - .ref, .catch_ref => { + .ref => { const tag: Zir.Inst.Tag = if (types.array != .none) .array_init_ref else .array_init_anon_ref; return arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag); }, - .none, .catch_none => { + .none => { const tag: Zir.Inst.Tag = if (types.array != .none) .array_init else .array_init_anon; return arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag); }, - .ty, .ty_shift_operand, .coerced_ty => { + .ty, .coerced_ty => { const tag: Zir.Inst.Tag = if (types.array != .none) .array_init else .array_init_anon; const result = try arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, tag); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .ptr => |ptr_res| { - return arrayInitExprRlPtr(gz, scope, rl, node, ptr_res.inst, array_init.ast.elements, types.array); + return arrayInitExprRlPtr(gz, scope, ri, node, ptr_res.inst, array_init.ast.elements, types.array); }, .inferred_ptr => |ptr_inst| { if (types.array == .none) { @@ -1398,9 +1416,9 @@ fn arrayInitExpr( // analyzing array_base_ptr against an alloc_inferred_mut. // See corresponding logic in structInitExpr. const result = try arrayInitExprRlNone(gz, scope, node, array_init.ast.elements, .array_init_anon); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } else { - return arrayInitExprRlPtr(gz, scope, rl, node, ptr_inst, array_init.ast.elements, types.array); + return arrayInitExprRlPtr(gz, scope, ri, node, ptr_inst, array_init.ast.elements, types.array); } }, .block_ptr => |block_gz| { @@ -1408,9 +1426,9 @@ fn arrayInitExpr( // See corresponding logic in structInitExpr. if (types.array == .none and astgen.isInferred(block_gz.rl_ptr)) { const result = try arrayInitExprRlNone(gz, scope, node, array_init.ast.elements, .array_init_anon); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } - return arrayInitExprRlPtr(gz, scope, rl, node, block_gz.rl_ptr, array_init.ast.elements, types.array); + return arrayInitExprRlPtr(gz, scope, ri, node, block_gz.rl_ptr, array_init.ast.elements, types.array); }, } } @@ -1430,7 +1448,7 @@ fn arrayInitExprRlNone( var extra_index = try reserveExtra(astgen, elements.len); for (elements) |elem_init| { - const elem_ref = try expr(gz, scope, .none, elem_init); + const elem_ref = try expr(gz, scope, .{ .rl = .none }, elem_init); astgen.extra.items[extra_index] = @enumToInt(elem_ref); extra_index += 1; } @@ -1459,9 +1477,9 @@ fn arrayInitExprInner( } for (elements) |elem_init, i| { - const rl = if (elem_ty != .none) - ResultLoc{ .coerced_ty = elem_ty } - else if (array_ty_inst != .none and nodeMayNeedMemoryLocation(astgen.tree, elem_init, true)) rl: { + const ri = if (elem_ty != .none) + ResultInfo{ .rl = .{ .coerced_ty = elem_ty } } + else if (array_ty_inst != .none and nodeMayNeedMemoryLocation(astgen.tree, elem_init, true)) ri: { const ty_expr = try gz.add(.{ .tag = .elem_type_index, .data = .{ .bin = .{ @@ -1469,10 +1487,10 @@ fn arrayInitExprInner( .rhs = @intToEnum(Zir.Inst.Ref, i), } }, }); - break :rl ResultLoc{ .coerced_ty = ty_expr }; - } else ResultLoc{ .none = {} }; + break :ri ResultInfo{ .rl = .{ .coerced_ty = ty_expr } }; + } else ResultInfo{ .rl = .{ .none = {} } }; - const elem_ref = try expr(gz, scope, rl, elem_init); + const elem_ref = try expr(gz, scope, ri, elem_init); astgen.extra.items[extra_index] = @enumToInt(elem_ref); extra_index += 1; } @@ -1483,7 +1501,7 @@ fn arrayInitExprInner( fn arrayInitExprRlPtr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, result_ptr: Zir.Inst.Ref, elements: []const Ast.Node.Index, @@ -1498,7 +1516,7 @@ fn arrayInitExprRlPtr( defer as_scope.unstack(); const result = try arrayInitExprRlPtrInner(&as_scope, scope, node, as_scope.rl_ptr, elements); - return as_scope.finishCoercion(gz, rl, node, result, array_ty); + return as_scope.finishCoercion(gz, ri, node, result, array_ty); } fn arrayInitExprRlPtrInner( @@ -1522,7 +1540,7 @@ fn arrayInitExprRlPtrInner( }); astgen.extra.items[extra_index] = refToIndex(elem_ptr).?; extra_index += 1; - _ = try expr(gz, scope, .{ .ptr = .{ .inst = elem_ptr } }, elem_init); + _ = try expr(gz, scope, .{ .rl = .{ .ptr = .{ .inst = elem_ptr } } }, elem_init); } const tag: Zir.Inst.Tag = if (gz.force_comptime) @@ -1537,7 +1555,7 @@ fn arrayInitExprRlPtrInner( fn structInitExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, struct_init: Ast.full.StructInit, ) InnerError!Zir.Inst.Ref { @@ -1546,7 +1564,7 @@ fn structInitExpr( if (struct_init.ast.type_expr == 0) { if (struct_init.ast.fields.len == 0) { - return rvalue(gz, rl, .empty_struct, node); + return rvalue(gz, ri, .empty_struct, node); } } else array: { const node_tags = tree.nodes.items(.tag); @@ -1558,7 +1576,7 @@ fn structInitExpr( if (struct_init.ast.fields.len == 0) { const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr); const result = try gz.addUnNode(.struct_init_empty, ty_inst, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } break :array; }, @@ -1575,7 +1593,7 @@ fn structInitExpr( .rhs = elem_type, }); } else blk: { - const sentinel = try comptimeExpr(gz, scope, .{ .ty = elem_type }, array_type.ast.sentinel); + const sentinel = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = elem_type } }, array_type.ast.sentinel); break :blk try gz.addPlNode( .array_type_sentinel, struct_init.ast.type_expr, @@ -1587,11 +1605,11 @@ fn structInitExpr( ); }; const result = try gz.addUnNode(.struct_init_empty, array_type_inst, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr); const result = try gz.addUnNode(.struct_init_empty, ty_inst, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } else { return astgen.failNode( struct_init.ast.type_expr, @@ -1601,7 +1619,7 @@ fn structInitExpr( } } - switch (rl) { + switch (ri.rl) { .discard => { if (struct_init.ast.type_expr != 0) { const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr); @@ -1612,7 +1630,7 @@ fn structInitExpr( } return Zir.Inst.Ref.void_value; }, - .ref, .catch_ref => { + .ref => { if (struct_init.ast.type_expr != 0) { const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr); _ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node); @@ -1621,7 +1639,7 @@ fn structInitExpr( return structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon_ref); } }, - .none, .catch_none => { + .none => { if (struct_init.ast.type_expr != 0) { const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr); _ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node); @@ -1630,26 +1648,26 @@ fn structInitExpr( return structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon); } }, - .ty, .ty_shift_operand, .coerced_ty => |ty_inst| { + .ty, .coerced_ty => |ty_inst| { if (struct_init.ast.type_expr == 0) { const result = try structInitExprRlNone(gz, scope, node, struct_init, ty_inst, .struct_init_anon); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } const inner_ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr); _ = try gz.addUnNode(.validate_struct_init_ty, inner_ty_inst, node); const result = try structInitExprRlTy(gz, scope, node, struct_init, inner_ty_inst, .struct_init); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, - .ptr => |ptr_res| return structInitExprRlPtr(gz, scope, rl, node, struct_init, ptr_res.inst), + .ptr => |ptr_res| return structInitExprRlPtr(gz, scope, ri, node, struct_init, ptr_res.inst), .inferred_ptr => |ptr_inst| { if (struct_init.ast.type_expr == 0) { // We treat this case differently so that we don't get a crash when // analyzing field_base_ptr against an alloc_inferred_mut. // See corresponding logic in arrayInitExpr. const result = try structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } else { - return structInitExprRlPtr(gz, scope, rl, node, struct_init, ptr_inst); + return structInitExprRlPtr(gz, scope, ri, node, struct_init, ptr_inst); } }, .block_ptr => |block_gz| { @@ -1657,10 +1675,10 @@ fn structInitExpr( // See corresponding logic in arrayInitExpr. if (struct_init.ast.type_expr == 0 and astgen.isInferred(block_gz.rl_ptr)) { const result = try structInitExprRlNone(gz, scope, node, struct_init, .none, .struct_init_anon); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } - return structInitExprRlPtr(gz, scope, rl, node, struct_init, block_gz.rl_ptr); + return structInitExprRlPtr(gz, scope, ri, node, struct_init, block_gz.rl_ptr); }, } } @@ -1685,16 +1703,15 @@ fn structInitExprRlNone( for (struct_init.ast.fields) |field_init| { const name_token = tree.firstToken(field_init) - 2; const str_index = try astgen.identAsString(name_token); - const sub_rl: ResultLoc = if (ty_inst != .none) - ResultLoc{ .ty = try gz.addPlNode(.field_type, field_init, Zir.Inst.FieldType{ + const sub_ri: ResultInfo = if (ty_inst != .none) + ResultInfo{ .rl = .{ .ty = try gz.addPlNode(.field_type, field_init, Zir.Inst.FieldType{ .container_type = ty_inst, .name_start = str_index, - }) } - else - .none; + }) } } + else .{ .rl = .none }; setExtra(astgen, extra_index, Zir.Inst.StructInitAnon.Item{ .field_name = str_index, - .init = try expr(gz, scope, sub_rl, field_init), + .init = try expr(gz, scope, sub_ri, field_init), }); extra_index += field_size; } @@ -1705,7 +1722,7 @@ fn structInitExprRlNone( fn structInitExprRlPtr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, struct_init: Ast.full.StructInit, result_ptr: Zir.Inst.Ref, @@ -1721,7 +1738,7 @@ fn structInitExprRlPtr( defer as_scope.unstack(); const result = try structInitExprRlPtrInner(&as_scope, scope, node, struct_init, as_scope.rl_ptr); - return as_scope.finishCoercion(gz, rl, node, result, ty_inst); + return as_scope.finishCoercion(gz, ri, node, result, ty_inst); } fn structInitExprRlPtrInner( @@ -1748,7 +1765,7 @@ fn structInitExprRlPtrInner( }); astgen.extra.items[extra_index] = refToIndex(field_ptr).?; extra_index += 1; - _ = try expr(gz, scope, .{ .ptr = .{ .inst = field_ptr } }, field_init); + _ = try expr(gz, scope, .{ .rl = .{ .ptr = .{ .inst = field_ptr } } }, field_init); } const tag: Zir.Inst.Tag = if (gz.force_comptime) @@ -1786,7 +1803,7 @@ fn structInitExprRlTy( }); setExtra(astgen, extra_index, Zir.Inst.StructInit.Item{ .field_type = refToIndex(field_ty_inst).?, - .init = try expr(gz, scope, .{ .ty = field_ty_inst }, field_init), + .init = try expr(gz, scope, .{ .rl = .{ .ty = field_ty_inst } }, field_init), }); extra_index += field_size; } @@ -1799,14 +1816,14 @@ fn structInitExprRlTy( fn comptimeExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const prev_force_comptime = gz.force_comptime; gz.force_comptime = true; defer gz.force_comptime = prev_force_comptime; - return expr(gz, scope, rl, node); + return expr(gz, scope, ri, node); } /// This one is for an actual `comptime` syntax, and will emit a compile error if @@ -1815,7 +1832,7 @@ fn comptimeExpr( fn comptimeExprAst( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; @@ -1826,7 +1843,7 @@ fn comptimeExprAst( const node_datas = tree.nodes.items(.data); const body_node = node_datas[node].lhs; gz.force_comptime = true; - const result = try expr(gz, scope, rl, body_node); + const result = try expr(gz, scope, ri, body_node); gz.force_comptime = false; return result; } @@ -1904,7 +1921,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn } block_gz.break_count += 1; - const operand = try reachableExpr(parent_gz, parent_scope, block_gz.break_result_loc, rhs, node); + const operand = try reachableExpr(parent_gz, parent_scope, block_gz.break_result_info, rhs, node); const search_index = @intCast(Zir.Inst.Index, astgen.instructions.len); try genDefers(parent_gz, scope, parent_scope, .normal_only); @@ -1915,14 +1932,14 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn try popErrorReturnTrace( parent_gz, scope, - block_gz.break_result_loc, + block_gz.break_result_info, rhs, operand, err_trace_index_to_restore, ); } - switch (block_gz.break_result_loc) { + switch (block_gz.break_result_info.rl) { .block_ptr => { const br = try parent_gz.addBreak(break_tag, block_inst, operand); try block_gz.labeled_breaks.append(astgen.gpa, .{ .br = br, .search = search_index }); @@ -2028,7 +2045,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) fn blockExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, block_node: Ast.Node.Index, statements: []const Ast.Node.Index, ) InnerError!Zir.Inst.Ref { @@ -2044,12 +2061,12 @@ fn blockExpr( if (token_tags[lbrace - 1] == .colon and token_tags[lbrace - 2] == .identifier) { - return labeledBlockExpr(gz, scope, rl, block_node, statements); + return labeledBlockExpr(gz, scope, ri, block_node, statements); } var sub_gz = gz.makeSubBlock(scope); try blockExprStmts(&sub_gz, &sub_gz.base, statements); - return rvalue(gz, rl, .void_value, block_node); + return rvalue(gz, ri, .void_value, block_node); } fn checkLabelRedefinition(astgen: *AstGen, parent_scope: *Scope, label: Ast.TokenIndex) !void { @@ -2087,7 +2104,7 @@ fn checkLabelRedefinition(astgen: *AstGen, parent_scope: *Scope, label: Ast.Toke fn labeledBlockExpr( gz: *GenZir, parent_scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, block_node: Ast.Node.Index, statements: []const Ast.Node.Index, ) InnerError!Zir.Inst.Ref { @@ -2116,7 +2133,7 @@ fn labeledBlockExpr( .token = label_token, .block_inst = block_inst, }; - block_scope.setBreakResultLoc(rl); + block_scope.setBreakResultInfo(ri); defer block_scope.unstack(); defer block_scope.labeled_breaks.deinit(astgen.gpa); @@ -2132,7 +2149,7 @@ fn labeledBlockExpr( const zir_datas = gz.astgen.instructions.items(.data); const zir_tags = gz.astgen.instructions.items(.tag); - const strat = rl.strategy(&block_scope); + const strat = ri.rl.strategy(&block_scope); switch (strat.tag) { .break_void => { // The code took advantage of the result location as a pointer. @@ -2173,9 +2190,9 @@ fn labeledBlockExpr( } try block_scope.setBlockBody(block_inst); const block_ref = indexToRef(block_inst); - switch (rl) { - .ref, .catch_ref => return block_ref, - else => return rvalue(gz, rl, block_ref, block_node), + switch (ri.rl) { + .ref => return block_ref, + else => return rvalue(gz, ri, block_ref, block_node), } }, } @@ -2246,12 +2263,12 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod continue; }, - .while_simple => _ = try whileExpr(gz, scope, .discard, inner_node, tree.whileSimple(inner_node), true), - .while_cont => _ = try whileExpr(gz, scope, .discard, inner_node, tree.whileCont(inner_node), true), - .@"while" => _ = try whileExpr(gz, scope, .discard, inner_node, tree.whileFull(inner_node), true), + .while_simple => _ = try whileExpr(gz, scope, .{ .rl = .discard }, inner_node, tree.whileSimple(inner_node), true), + .while_cont => _ = try whileExpr(gz, scope, .{ .rl = .discard }, inner_node, tree.whileCont(inner_node), true), + .@"while" => _ = try whileExpr(gz, scope, .{ .rl = .discard }, inner_node, tree.whileFull(inner_node), true), - .for_simple => _ = try forExpr(gz, scope, .discard, inner_node, tree.forSimple(inner_node), true), - .@"for" => _ = try forExpr(gz, scope, .discard, inner_node, tree.forFull(inner_node), true), + .for_simple => _ = try forExpr(gz, scope, .{ .rl = .discard }, inner_node, tree.forSimple(inner_node), true), + .@"for" => _ = try forExpr(gz, scope, .{ .rl = .discard }, inner_node, tree.forFull(inner_node), true), else => noreturn_src_node = try unusedResultExpr(gz, scope, inner_node), // zig fmt: on @@ -2272,7 +2289,7 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) Inner try emitDbgNode(gz, statement); // We need to emit an error if the result is not `noreturn` or `void`, but // we want to avoid adding the ZIR instruction if possible for performance. - const maybe_unused_result = try expr(gz, scope, .none, statement); + const maybe_unused_result = try expr(gz, scope, .{ .rl = .none }, statement); return addEnsureResult(gz, maybe_unused_result, statement); } @@ -2839,7 +2856,7 @@ fn varDecl( } const align_inst: Zir.Inst.Ref = if (var_decl.ast.align_node != 0) - try expr(gz, scope, align_rl, var_decl.ast.align_node) + try expr(gz, scope, align_ri, var_decl.ast.align_node) else .none; @@ -2856,12 +2873,12 @@ fn varDecl( if (align_inst == .none and !nodeMayNeedMemoryLocation(tree, var_decl.ast.init_node, type_node != 0)) { - const result_loc: ResultLoc = if (type_node != 0) .{ - .ty = try typeExpr(gz, scope, type_node), - } else .none; + const result_info: ResultInfo = if (type_node != 0) .{ + .rl = .{ .ty = try typeExpr(gz, scope, type_node) }, + } else .{ .rl = .none }; const prev_anon_name_strategy = gz.anon_name_strategy; gz.anon_name_strategy = .dbg_var; - const init_inst = try reachableExpr(gz, scope, result_loc, var_decl.ast.init_node, node); + const init_inst = try reachableExpr(gz, scope, result_info, var_decl.ast.init_node, node); gz.anon_name_strategy = prev_anon_name_strategy; try gz.addDbgVar(.dbg_var_val, ident_name, init_inst); @@ -2931,8 +2948,8 @@ fn varDecl( init_scope.rl_ptr = alloc; init_scope.rl_ty_inst = .none; } - const init_result_loc: ResultLoc = .{ .block_ptr = &init_scope }; - const init_inst = try reachableExpr(&init_scope, &init_scope.base, init_result_loc, var_decl.ast.init_node, node); + const init_result_info: ResultInfo = .{ .rl = .{ .block_ptr = &init_scope } }; + const init_inst = try reachableExpr(&init_scope, &init_scope.base, init_result_info, var_decl.ast.init_node, node); const zir_tags = astgen.instructions.items(.tag); const zir_datas = astgen.instructions.items(.data); @@ -3021,7 +3038,7 @@ fn varDecl( const is_comptime = var_decl.comptime_token != null or gz.force_comptime; var resolve_inferred_alloc: Zir.Inst.Ref = .none; const var_data: struct { - result_loc: ResultLoc, + result_info: ResultInfo, alloc: Zir.Inst.Ref, } = if (var_decl.ast.type_node != 0) a: { const type_inst = try typeExpr(gz, scope, var_decl.ast.type_node); @@ -3043,7 +3060,7 @@ fn varDecl( } }; gz.rl_ty_inst = type_inst; - break :a .{ .alloc = alloc, .result_loc = .{ .ptr = .{ .inst = alloc } } }; + break :a .{ .alloc = alloc, .result_info = .{ .rl = .{ .ptr = .{ .inst = alloc } } } }; } else a: { const alloc = alloc: { if (align_inst == .none) { @@ -3064,11 +3081,11 @@ fn varDecl( }; gz.rl_ty_inst = .none; resolve_inferred_alloc = alloc; - break :a .{ .alloc = alloc, .result_loc = .{ .inferred_ptr = alloc } }; + break :a .{ .alloc = alloc, .result_info = .{ .rl = .{ .inferred_ptr = alloc } } }; }; const prev_anon_name_strategy = gz.anon_name_strategy; gz.anon_name_strategy = .dbg_var; - _ = try reachableExprComptime(gz, scope, var_data.result_loc, var_decl.ast.init_node, node, is_comptime); + _ = try reachableExprComptime(gz, scope, var_data.result_info, var_decl.ast.init_node, node, is_comptime); gz.anon_name_strategy = prev_anon_name_strategy; if (resolve_inferred_alloc != .none) { _ = try gz.addUnNode(.resolve_inferred_alloc, resolve_inferred_alloc, node); @@ -3138,15 +3155,15 @@ fn assign(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerError!voi // This intentionally does not support `@"_"` syntax. const ident_name = tree.tokenSlice(main_tokens[lhs]); if (mem.eql(u8, ident_name, "_")) { - _ = try expr(gz, scope, .discard, rhs); + _ = try expr(gz, scope, .{ .rl = .discard }, rhs); return; } } const lvalue = try lvalExpr(gz, scope, lhs); - _ = try expr(gz, scope, .{ .ptr = .{ + _ = try expr(gz, scope, .{ .rl = .{ .ptr = .{ .inst = lvalue, .src_node = infix_node, - } }, rhs); + } } }, rhs); } fn assignOp( @@ -3163,7 +3180,7 @@ fn assignOp( const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs); const lhs = try gz.addUnNode(.load, lhs_ptr, infix_node); const lhs_type = try gz.addUnNode(.typeof, lhs, infix_node); - const rhs = try expr(gz, scope, .{ .coerced_ty = lhs_type }, node_datas[infix_node].rhs); + const rhs = try expr(gz, scope, .{ .rl = .{ .coerced_ty = lhs_type } }, node_datas[infix_node].rhs); const result = try gz.addPlNode(op_inst_tag, infix_node, Zir.Inst.Bin{ .lhs = lhs, @@ -3186,7 +3203,7 @@ fn assignShift( const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs); const lhs = try gz.addUnNode(.load, lhs_ptr, infix_node); const rhs_type = try gz.addUnNode(.typeof_log2_int_type, lhs, infix_node); - const rhs = try expr(gz, scope, .{ .ty = rhs_type }, node_datas[infix_node].rhs); + const rhs = try expr(gz, scope, .{ .rl = .{ .ty = rhs_type } }, node_datas[infix_node].rhs); const result = try gz.addPlNode(op_inst_tag, infix_node, Zir.Inst.Bin{ .lhs = lhs, @@ -3204,7 +3221,7 @@ fn assignShiftSat(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerE const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs); const lhs = try gz.addUnNode(.load, lhs_ptr, infix_node); // Saturating shift-left allows any integer type for both the LHS and RHS. - const rhs = try expr(gz, scope, .none, node_datas[infix_node].rhs); + const rhs = try expr(gz, scope, .{ .rl = .none }, node_datas[infix_node].rhs); const result = try gz.addPlNode(.shl_sat, infix_node, Zir.Inst.Bin{ .lhs = lhs, @@ -3216,7 +3233,7 @@ fn assignShiftSat(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerE fn ptrType( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ptr_info: Ast.full.PtrType, ) InnerError!Zir.Inst.Ref { @@ -3234,21 +3251,21 @@ fn ptrType( var trailing_count: u32 = 0; if (ptr_info.ast.sentinel != 0) { - sentinel_ref = try expr(gz, scope, .{ .ty = elem_type }, ptr_info.ast.sentinel); + sentinel_ref = try expr(gz, scope, .{ .rl = .{ .ty = elem_type } }, ptr_info.ast.sentinel); trailing_count += 1; } if (ptr_info.ast.align_node != 0) { - align_ref = try expr(gz, scope, coerced_align_rl, ptr_info.ast.align_node); + align_ref = try expr(gz, scope, coerced_align_ri, ptr_info.ast.align_node); trailing_count += 1; } if (ptr_info.ast.addrspace_node != 0) { - addrspace_ref = try expr(gz, scope, .{ .ty = .address_space_type }, ptr_info.ast.addrspace_node); + addrspace_ref = try expr(gz, scope, .{ .rl = .{ .ty = .address_space_type } }, ptr_info.ast.addrspace_node); trailing_count += 1; } if (ptr_info.ast.bit_range_start != 0) { assert(ptr_info.ast.bit_range_end != 0); - bit_start_ref = try expr(gz, scope, .{ .coerced_ty = .u16_type }, ptr_info.ast.bit_range_start); - bit_end_ref = try expr(gz, scope, .{ .coerced_ty = .u16_type }, ptr_info.ast.bit_range_end); + bit_start_ref = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, ptr_info.ast.bit_range_start); + bit_end_ref = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, ptr_info.ast.bit_range_end); trailing_count += 2; } @@ -3295,10 +3312,10 @@ fn ptrType( } }); gz.instructions.appendAssumeCapacity(new_index); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } -fn arrayType(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) !Zir.Inst.Ref { +fn arrayType(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) !Zir.Inst.Ref { const astgen = gz.astgen; const tree = astgen.tree; const node_datas = tree.nodes.items(.data); @@ -3311,17 +3328,17 @@ fn arrayType(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) !Z { return astgen.failNode(len_node, "unable to infer array size", .{}); } - const len = try expr(gz, scope, .{ .coerced_ty = .usize_type }, len_node); + const len = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, len_node); const elem_type = try typeExpr(gz, scope, node_datas[node].rhs); const result = try gz.addPlNode(.array_type, node, Zir.Inst.Bin{ .lhs = len, .rhs = elem_type, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } -fn arrayTypeSentinel(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) !Zir.Inst.Ref { +fn arrayTypeSentinel(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) !Zir.Inst.Ref { const astgen = gz.astgen; const tree = astgen.tree; const node_datas = tree.nodes.items(.data); @@ -3335,16 +3352,16 @@ fn arrayTypeSentinel(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.I { return astgen.failNode(len_node, "unable to infer array size", .{}); } - const len = try reachableExpr(gz, scope, .{ .coerced_ty = .usize_type }, len_node, node); + const len = try reachableExpr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, len_node, node); const elem_type = try typeExpr(gz, scope, extra.elem_type); - const sentinel = try reachableExpr(gz, scope, .{ .coerced_ty = elem_type }, extra.sentinel, node); + const sentinel = try reachableExpr(gz, scope, .{ .rl = .{ .coerced_ty = elem_type } }, extra.sentinel, node); const result = try gz.addPlNode(.array_type_sentinel, node, Zir.Inst.ArrayTypeSentinel{ .len = len, .elem_type = elem_type, .sentinel = sentinel, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } const WipMembers = struct { @@ -3580,7 +3597,7 @@ fn fnDecl( assert(param_type_node != 0); var param_gz = decl_gz.makeSubBlock(scope); defer param_gz.unstack(); - const param_type = try expr(¶m_gz, params_scope, coerced_type_rl, param_type_node); + const param_type = try expr(¶m_gz, params_scope, coerced_type_ri, param_type_node); const param_inst_expected = @intCast(u32, astgen.instructions.len + 1); _ = try param_gz.addBreak(.break_inline, param_inst_expected, param_type); @@ -3629,7 +3646,7 @@ fn fnDecl( var align_gz = decl_gz.makeSubBlock(params_scope); defer align_gz.unstack(); const align_ref: Zir.Inst.Ref = if (fn_proto.ast.align_expr == 0) .none else inst: { - const inst = try expr(&decl_gz, params_scope, coerced_align_rl, fn_proto.ast.align_expr); + const inst = try expr(&decl_gz, params_scope, coerced_align_ri, fn_proto.ast.align_expr); if (align_gz.instructionsSlice().len == 0) { // In this case we will send a len=0 body which can be encoded more efficiently. break :inst inst; @@ -3641,7 +3658,7 @@ fn fnDecl( var addrspace_gz = decl_gz.makeSubBlock(params_scope); defer addrspace_gz.unstack(); const addrspace_ref: Zir.Inst.Ref = if (fn_proto.ast.addrspace_expr == 0) .none else inst: { - const inst = try expr(&decl_gz, params_scope, .{ .coerced_ty = .address_space_type }, fn_proto.ast.addrspace_expr); + const inst = try expr(&decl_gz, params_scope, .{ .rl = .{ .coerced_ty = .address_space_type } }, fn_proto.ast.addrspace_expr); if (addrspace_gz.instructionsSlice().len == 0) { // In this case we will send a len=0 body which can be encoded more efficiently. break :inst inst; @@ -3653,7 +3670,7 @@ fn fnDecl( var section_gz = decl_gz.makeSubBlock(params_scope); defer section_gz.unstack(); const section_ref: Zir.Inst.Ref = if (fn_proto.ast.section_expr == 0) .none else inst: { - const inst = try expr(&decl_gz, params_scope, .{ .coerced_ty = .const_slice_u8_type }, fn_proto.ast.section_expr); + const inst = try expr(&decl_gz, params_scope, .{ .rl = .{ .coerced_ty = .const_slice_u8_type } }, fn_proto.ast.section_expr); if (section_gz.instructionsSlice().len == 0) { // In this case we will send a len=0 body which can be encoded more efficiently. break :inst inst; @@ -3676,7 +3693,7 @@ fn fnDecl( const inst = try expr( &decl_gz, params_scope, - .{ .coerced_ty = .calling_convention_type }, + .{ .rl = .{ .coerced_ty = .calling_convention_type } }, fn_proto.ast.callconv_expr, ); if (cc_gz.instructionsSlice().len == 0) { @@ -3698,7 +3715,7 @@ fn fnDecl( var ret_gz = decl_gz.makeSubBlock(params_scope); defer ret_gz.unstack(); const ret_ref: Zir.Inst.Ref = inst: { - const inst = try expr(&ret_gz, params_scope, coerced_type_rl, fn_proto.ast.return_type); + const inst = try expr(&ret_gz, params_scope, coerced_type_ri, fn_proto.ast.return_type); if (ret_gz.instructionsSlice().len == 0) { // In this case we will send a len=0 body which can be encoded more efficiently. break :inst inst; @@ -3752,7 +3769,7 @@ fn fnDecl( const lbrace_line = astgen.source_line - decl_gz.decl_line; const lbrace_column = astgen.source_column; - _ = try expr(&fn_gz, params_scope, .none, body_node); + _ = try expr(&fn_gz, params_scope, .{ .rl = .none }, body_node); try checkUsed(gz, &fn_gz.base, params_scope); if (!fn_gz.endsWithNoReturn()) { @@ -3848,13 +3865,13 @@ fn globalVarDecl( break :blk token_tags[maybe_extern_token] == .keyword_extern; }; const align_inst: Zir.Inst.Ref = if (var_decl.ast.align_node == 0) .none else inst: { - break :inst try expr(&block_scope, &block_scope.base, align_rl, var_decl.ast.align_node); + break :inst try expr(&block_scope, &block_scope.base, align_ri, var_decl.ast.align_node); }; const addrspace_inst: Zir.Inst.Ref = if (var_decl.ast.addrspace_node == 0) .none else inst: { - break :inst try expr(&block_scope, &block_scope.base, .{ .ty = .address_space_type }, var_decl.ast.addrspace_node); + break :inst try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .address_space_type } }, var_decl.ast.addrspace_node); }; const section_inst: Zir.Inst.Ref = if (var_decl.ast.section_node == 0) .none else inst: { - break :inst try comptimeExpr(&block_scope, &block_scope.base, .{ .ty = .const_slice_u8_type }, var_decl.ast.section_node); + break :inst try comptimeExpr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .const_slice_u8_type } }, var_decl.ast.section_node); }; const has_section_or_addrspace = section_inst != .none or addrspace_inst != .none; wip_members.nextDecl(is_pub, is_export, align_inst != .none, has_section_or_addrspace); @@ -3894,7 +3911,7 @@ fn globalVarDecl( try expr( &block_scope, &block_scope.base, - .{ .ty = .type_type }, + .{ .rl = .{ .ty = .type_type } }, var_decl.ast.type_node, ) else @@ -3903,7 +3920,7 @@ fn globalVarDecl( const init_inst = try expr( &block_scope, &block_scope.base, - if (type_inst != .none) .{ .ty = type_inst } else .none, + if (type_inst != .none) .{ .rl = .{ .ty = type_inst } } else .{ .rl = .none }, var_decl.ast.init_node, ); @@ -3992,7 +4009,7 @@ fn comptimeDecl( }; defer decl_block.unstack(); - const block_result = try expr(&decl_block, &decl_block.base, .none, body_node); + const block_result = try expr(&decl_block, &decl_block.base, .{ .rl = .none }, body_node); if (decl_block.isEmpty() or !decl_block.refIsNoReturn(block_result)) { _ = try decl_block.addBreak(.break_inline, block_inst, .void_value); } @@ -4196,7 +4213,7 @@ fn testDecl( const lbrace_line = astgen.source_line - decl_block.decl_line; const lbrace_column = astgen.source_column; - const block_result = try expr(&fn_block, &fn_block.base, .none, body_node); + const block_result = try expr(&fn_block, &fn_block.base, .{ .rl = .none }, body_node); if (fn_block.isEmpty() or !fn_block.refIsNoReturn(block_result)) { // Since we are adding the return instruction here, we must handle the coercion. // We do this by using the `ret_tok` instruction. @@ -4410,7 +4427,7 @@ fn structDeclInner( if (layout == .Packed) { try astgen.appendErrorNode(member.ast.align_expr, "unable to override alignment of packed struct fields", .{}); } - const align_ref = try expr(&block_scope, &namespace.base, coerced_align_rl, member.ast.align_expr); + const align_ref = try expr(&block_scope, &namespace.base, coerced_align_ri, member.ast.align_expr); if (!block_scope.endsWithNoReturn()) { _ = try block_scope.addBreak(.break_inline, decl_inst, align_ref); } @@ -4423,9 +4440,9 @@ fn structDeclInner( } if (have_value) { - const rl: ResultLoc = if (field_type == .none) .none else .{ .coerced_ty = field_type }; + const ri: ResultInfo = .{ .rl = if (field_type == .none) .none else .{ .coerced_ty = field_type } }; - const default_inst = try expr(&block_scope, &namespace.base, rl, member.ast.value_expr); + const default_inst = try expr(&block_scope, &namespace.base, ri, member.ast.value_expr); if (!block_scope.endsWithNoReturn()) { _ = try block_scope.addBreak(.break_inline, decl_inst, default_inst); } @@ -4554,7 +4571,7 @@ fn unionDeclInner( return astgen.failNode(member_node, "union field missing type", .{}); } if (have_align) { - const align_inst = try expr(&block_scope, &block_scope.base, .{ .ty = .u32_type }, member.ast.align_expr); + const align_inst = try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .u32_type } }, member.ast.align_expr); wip_members.appendToField(@enumToInt(align_inst)); } if (have_value) { @@ -4586,7 +4603,7 @@ fn unionDeclInner( }, ); } - const tag_value = try expr(&block_scope, &block_scope.base, .{ .ty = arg_inst }, member.ast.value_expr); + const tag_value = try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = arg_inst } }, member.ast.value_expr); wip_members.appendToField(@enumToInt(tag_value)); } } @@ -4624,7 +4641,7 @@ fn unionDeclInner( fn containerDecl( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, container_decl: Ast.full.ContainerDecl, ) InnerError!Zir.Inst.Ref { @@ -4650,7 +4667,7 @@ fn containerDecl( } else std.builtin.Type.ContainerLayout.Auto; const result = try structDeclInner(gz, scope, node, container_decl, layout, container_decl.ast.arg); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .keyword_union => { const layout = if (container_decl.layout_token) |t| switch (token_tags[t]) { @@ -4660,7 +4677,7 @@ fn containerDecl( } else std.builtin.Type.ContainerLayout.Auto; const result = try unionDeclInner(gz, scope, node, container_decl.ast.members, layout, container_decl.ast.arg, container_decl.ast.enum_token); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .keyword_enum => { if (container_decl.layout_token) |t| { @@ -4790,7 +4807,7 @@ fn containerDecl( _ = try astgen.scanDecls(&namespace, container_decl.ast.members); const arg_inst: Zir.Inst.Ref = if (container_decl.ast.arg != 0) - try comptimeExpr(&block_scope, &namespace.base, .{ .ty = .type_type }, container_decl.ast.arg) + try comptimeExpr(&block_scope, &namespace.base, .{ .rl = .{ .ty = .type_type } }, container_decl.ast.arg) else .none; @@ -4834,7 +4851,7 @@ fn containerDecl( }, ); } - const tag_value_inst = try expr(&block_scope, &namespace.base, .{ .ty = arg_inst }, member.ast.value_expr); + const tag_value_inst = try expr(&block_scope, &namespace.base, .{ .rl = .{ .ty = arg_inst } }, member.ast.value_expr); wip_members.appendToField(@enumToInt(tag_value_inst)); } } @@ -4865,7 +4882,7 @@ fn containerDecl( block_scope.unstack(); try gz.addNamespaceCaptures(&namespace); - return rvalue(gz, rl, indexToRef(decl_inst), node); + return rvalue(gz, ri, indexToRef(decl_inst), node); }, .keyword_opaque => { assert(container_decl.ast.arg == 0); @@ -4915,7 +4932,7 @@ fn containerDecl( astgen.extra.appendSliceAssumeCapacity(decls_slice); try gz.addNamespaceCaptures(&namespace); - return rvalue(gz, rl, indexToRef(decl_inst), node); + return rvalue(gz, ri, indexToRef(decl_inst), node); }, else => unreachable, } @@ -5046,7 +5063,7 @@ fn containerMember( return .decl; } -fn errorSetDecl(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref { +fn errorSetDecl(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; const gpa = astgen.gpa; const tree = astgen.tree; @@ -5101,13 +5118,13 @@ fn errorSetDecl(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir .fields_len = @intCast(u32, fields_len), }); const result = try gz.addPlNodePayloadIndex(.error_set_decl, node, payload_index); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn tryExpr( parent_gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, operand_node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { @@ -5137,15 +5154,15 @@ fn tryExpr( const try_line = astgen.source_line - parent_gz.decl_line; const try_column = astgen.source_column; - const operand_rl: ResultLoc = switch (rl) { - .ref, .catch_ref => .catch_ref, - else => .catch_none, + const operand_ri: ResultInfo = switch (ri.rl) { + .ref => .{ .rl = .ref, .ctx = .error_handling_expr }, + else => .{ .rl = .none, .ctx = .error_handling_expr }, }; - // This could be a pointer or value depending on the `rl` parameter. - const operand = try reachableExpr(parent_gz, scope, operand_rl, operand_node, node); + // This could be a pointer or value depending on the `ri` parameter. + const operand = try reachableExpr(parent_gz, scope, operand_ri, operand_node, node); const is_inline = parent_gz.force_comptime; const is_inline_bit = @as(u2, @boolToInt(is_inline)); - const is_ptr_bit = @as(u2, @boolToInt(operand_rl == .ref or operand_rl == .catch_ref)) << 1; + const is_ptr_bit = @as(u2, @boolToInt(operand_ri.rl == .ref)) << 1; const block_tag: Zir.Inst.Tag = switch (is_inline_bit | is_ptr_bit) { 0b00 => .@"try", 0b01 => .@"try", @@ -5160,8 +5177,8 @@ fn tryExpr( var else_scope = parent_gz.makeSubBlock(scope); defer else_scope.unstack(); - const err_tag = switch (rl) { - .ref, .catch_ref => Zir.Inst.Tag.err_union_code_ptr, + const err_tag = switch (ri.rl) { + .ref => Zir.Inst.Tag.err_union_code_ptr, else => Zir.Inst.Tag.err_union_code, }; const err_code = try else_scope.addUnNode(err_tag, operand, node); @@ -5171,9 +5188,9 @@ fn tryExpr( try else_scope.setTryBody(try_inst, operand); const result = indexToRef(try_inst); - switch (rl) { - .ref, .catch_ref => return result, - else => return rvalue(parent_gz, rl, result, node), + switch (ri.rl) { + .ref => return result, + else => return rvalue(parent_gz, ri, result, node), } } @@ -5190,7 +5207,7 @@ fn tryExpr( fn popErrorReturnTrace( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, result_inst: Zir.Inst.Ref, error_trace_index: Zir.Inst.Ref, @@ -5201,13 +5218,8 @@ fn popErrorReturnTrace( const result_is_err = nodeMayEvalToError(tree, node); // If we are breaking to a try/catch/error-union-if/return, the error trace propagates. - const propagate_error_trace = switch (rl) { - .catch_none, .catch_ref => true, // Propagate to try/catch/error-union-if - .ptr, .ty => |ref| b: { // Otherwise, propagate if result loc is a return - const inst = refToIndex(ref) orelse break :b false; - const zir_tags = astgen.instructions.items(.tag); - break :b zir_tags[inst] == .ret_ptr or zir_tags[inst] == .ret_type; - }, + const propagate_error_trace = switch (ri.ctx) { + .error_handling_expr, .@"return" => true, else => false, }; @@ -5219,14 +5231,14 @@ fn popErrorReturnTrace( // We are returning to an error-handling operator with a maybe-error. // Restore only if it's a non-error, implying the catch was successfully handled. var block_scope = gz.makeSubBlock(scope); - block_scope.setBreakResultLoc(.discard); + block_scope.setBreakResultInfo(.{ .rl = .discard }); defer block_scope.unstack(); // Emit conditional branch for restoring error trace index - const is_non_err = switch (rl) { - .catch_ref => try block_scope.addUnNode(.is_non_err_ptr, result_inst, node), - .ptr => |ptr| try block_scope.addUnNode(.is_non_err_ptr, ptr, node), - .ty, .catch_none => try block_scope.addUnNode(.is_non_err, result_inst, node), + const is_non_err = switch (ri.rl) { + .ref => try block_scope.addUnNode(.is_non_err_ptr, result_inst, node), + .ptr => |ptr| try block_scope.addUnNode(.is_non_err_ptr, ptr.inst, node), + .ty, .none => try block_scope.addUnNode(.is_non_err, result_inst, node), else => unreachable, // Error-handling operators only generate the above result locations }; const condbr = try block_scope.addCondBr(.condbr, node); @@ -5255,7 +5267,7 @@ fn popErrorReturnTrace( fn orelseCatchExpr( parent_gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, lhs: Ast.Node.Index, cond_op: Zir.Inst.Tag, @@ -5270,21 +5282,21 @@ fn orelseCatchExpr( const do_err_trace = astgen.fn_block != null and (cond_op == .is_non_err or cond_op == .is_non_err_ptr); var block_scope = parent_gz.makeSubBlock(scope); - block_scope.setBreakResultLoc(rl); + block_scope.setBreakResultInfo(ri); defer block_scope.unstack(); const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none; - const operand_rl: ResultLoc = switch (block_scope.break_result_loc) { - .ref, .catch_ref => if (do_err_trace) ResultLoc{ .catch_ref = {} } else .ref, - else => if (do_err_trace) ResultLoc{ .catch_none = {} } else .none, + const operand_ri: ResultInfo = switch (block_scope.break_result_info.rl) { + .ref => .{ .rl = .ref, .ctx = if (do_err_trace) .error_handling_expr else .none }, + else => .{ .rl = .none, .ctx = if (do_err_trace) .error_handling_expr else .none }, }; block_scope.break_count += 1; - // This could be a pointer or value depending on the `operand_rl` parameter. - // We cannot use `block_scope.break_result_loc` because that has the bare + // This could be a pointer or value depending on the `operand_ri` parameter. + // We cannot use `block_scope.break_result_info` because that has the bare // type, whereas this expression has the optional type. Later we make // up for this fact by calling rvalue on the else branch. - const operand = try reachableExpr(&block_scope, &block_scope.base, operand_rl, lhs, rhs); + const operand = try reachableExpr(&block_scope, &block_scope.base, operand_ri, lhs, rhs); const cond = try block_scope.addUnNode(cond_op, operand, node); const condbr = try block_scope.addCondBr(.condbr, node); @@ -5298,9 +5310,9 @@ fn orelseCatchExpr( // This could be a pointer or value depending on `unwrap_op`. const unwrapped_payload = try then_scope.addUnNode(unwrap_op, operand, node); - const then_result = switch (rl) { - .ref, .catch_ref => unwrapped_payload, - else => try rvalue(&then_scope, block_scope.break_result_loc, unwrapped_payload, node), + const then_result = switch (ri.rl) { + .ref => unwrapped_payload, + else => try rvalue(&then_scope, block_scope.break_result_info, unwrapped_payload, node), }; var else_scope = block_scope.makeSubBlock(scope); @@ -5334,7 +5346,7 @@ fn orelseCatchExpr( break :blk &err_val_scope.base; }; - const else_result = try expr(&else_scope, else_sub_scope, block_scope.break_result_loc, rhs); + const else_result = try expr(&else_scope, else_sub_scope, block_scope.break_result_info, rhs); if (!else_scope.endsWithNoReturn()) { block_scope.break_count += 1; @@ -5342,7 +5354,7 @@ fn orelseCatchExpr( try popErrorReturnTrace( &else_scope, else_sub_scope, - block_scope.break_result_loc, + block_scope.break_result_info, rhs, else_result, saved_err_trace_index, @@ -5358,7 +5370,7 @@ fn orelseCatchExpr( const break_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .break_inline else .@"break"; const result = try finishThenElseBlock( parent_gz, - rl, + ri, node, &block_scope, &then_scope, @@ -5377,7 +5389,7 @@ fn orelseCatchExpr( /// Supports `else_scope` stacked on `then_scope` stacked on `block_scope`. Unstacks `else_scope` then `then_scope`. fn finishThenElseBlock( parent_gz: *GenZir, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, block_scope: *GenZir, then_scope: *GenZir, @@ -5392,7 +5404,7 @@ fn finishThenElseBlock( ) InnerError!Zir.Inst.Ref { // We now have enough information to decide whether the result instruction should // be communicated via result location pointer or break instructions. - const strat = rl.strategy(block_scope); + const strat = ri.rl.strategy(block_scope); // else_scope may be stacked on then_scope, so check for no-return on then_scope manually const tags = parent_gz.astgen.instructions.items(.tag); const then_slice = then_scope.instructionsSliceUpto(else_scope); @@ -5422,9 +5434,9 @@ fn finishThenElseBlock( try setCondBrPayload(condbr, cond, then_scope, then_break, else_scope, else_break); } const block_ref = indexToRef(main_block); - switch (rl) { - .ref, .catch_ref => return block_ref, - else => return rvalue(parent_gz, rl, block_ref, node), + switch (ri.rl) { + .ref => return block_ref, + else => return rvalue(parent_gz, ri, block_ref, node), } }, } @@ -5443,14 +5455,14 @@ fn tokenIdentEql(astgen: *AstGen, token1: Ast.TokenIndex, token2: Ast.TokenIndex fn fieldAccess( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { - switch (rl) { - .ref, .catch_ref => return addFieldAccess(.field_ptr, gz, scope, .ref, node), + switch (ri.rl) { + .ref => return addFieldAccess(.field_ptr, gz, scope, .{ .rl = .ref }, node), else => { - const access = try addFieldAccess(.field_val, gz, scope, .none, node); - return rvalue(gz, rl, access, node); + const access = try addFieldAccess(.field_val, gz, scope, .{ .rl = .none }, node); + return rvalue(gz, ri, access, node); }, } } @@ -5459,7 +5471,7 @@ fn addFieldAccess( tag: Zir.Inst.Tag, gz: *GenZir, scope: *Scope, - lhs_rl: ResultLoc, + lhs_ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; @@ -5473,7 +5485,7 @@ fn addFieldAccess( const str_index = try astgen.identAsString(field_ident); return gz.addPlNode(tag, node, Zir.Inst.Field{ - .lhs = try expr(gz, scope, lhs_rl, object_node), + .lhs = try expr(gz, scope, lhs_ri, object_node), .field_name_start = str_index, }); } @@ -5481,20 +5493,20 @@ fn addFieldAccess( fn arrayAccess( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; const tree = astgen.tree; const node_datas = tree.nodes.items(.data); - switch (rl) { - .ref, .catch_ref => return gz.addPlNode(.elem_ptr_node, node, Zir.Inst.Bin{ - .lhs = try expr(gz, scope, .ref, node_datas[node].lhs), - .rhs = try expr(gz, scope, .{ .ty = .usize_type }, node_datas[node].rhs), + switch (ri.rl) { + .ref => return gz.addPlNode(.elem_ptr_node, node, Zir.Inst.Bin{ + .lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs), + .rhs = try expr(gz, scope, .{ .rl = .{ .ty = .usize_type } }, node_datas[node].rhs), }), - else => return rvalue(gz, rl, try gz.addPlNode(.elem_val_node, node, Zir.Inst.Bin{ - .lhs = try expr(gz, scope, .none, node_datas[node].lhs), - .rhs = try expr(gz, scope, .{ .ty = .usize_type }, node_datas[node].rhs), + else => return rvalue(gz, ri, try gz.addPlNode(.elem_val_node, node, Zir.Inst.Bin{ + .lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs), + .rhs = try expr(gz, scope, .{ .rl = .{ .ty = .usize_type } }, node_datas[node].rhs), }), node), } } @@ -5502,7 +5514,7 @@ fn arrayAccess( fn simpleBinOp( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, op_inst_tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { @@ -5511,15 +5523,15 @@ fn simpleBinOp( const node_datas = tree.nodes.items(.data); const result = try gz.addPlNode(op_inst_tag, node, Zir.Inst.Bin{ - .lhs = try reachableExpr(gz, scope, .none, node_datas[node].lhs, node), - .rhs = try reachableExpr(gz, scope, .none, node_datas[node].rhs, node), + .lhs = try reachableExpr(gz, scope, .{ .rl = .none }, node_datas[node].lhs, node), + .rhs = try reachableExpr(gz, scope, .{ .rl = .none }, node_datas[node].rhs, node), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn simpleStrTok( gz: *GenZir, - rl: ResultLoc, + ri: ResultInfo, ident_token: Ast.TokenIndex, node: Ast.Node.Index, op_inst_tag: Zir.Inst.Tag, @@ -5527,13 +5539,13 @@ fn simpleStrTok( const astgen = gz.astgen; const str_index = try astgen.identAsString(ident_token); const result = try gz.addStrTok(op_inst_tag, str_index, ident_token); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn boolBinOp( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, zir_tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { @@ -5541,25 +5553,25 @@ fn boolBinOp( const tree = astgen.tree; const node_datas = tree.nodes.items(.data); - const lhs = try expr(gz, scope, bool_rl, node_datas[node].lhs); + const lhs = try expr(gz, scope, bool_ri, node_datas[node].lhs); const bool_br = try gz.addBoolBr(zir_tag, lhs); var rhs_scope = gz.makeSubBlock(scope); defer rhs_scope.unstack(); - const rhs = try expr(&rhs_scope, &rhs_scope.base, bool_rl, node_datas[node].rhs); + const rhs = try expr(&rhs_scope, &rhs_scope.base, bool_ri, node_datas[node].rhs); if (!gz.refIsNoReturn(rhs)) { _ = try rhs_scope.addBreak(.break_inline, bool_br, rhs); } try rhs_scope.setBoolBrBody(bool_br); const block_ref = indexToRef(bool_br); - return rvalue(gz, rl, block_ref, node); + return rvalue(gz, ri, block_ref, node); } fn ifExpr( parent_gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, if_full: Ast.full.If, ) InnerError!Zir.Inst.Ref { @@ -5570,7 +5582,7 @@ fn ifExpr( const do_err_trace = astgen.fn_block != null and if_full.error_token != null; var block_scope = parent_gz.makeSubBlock(scope); - block_scope.setBreakResultLoc(rl); + block_scope.setBreakResultInfo(ri); defer block_scope.unstack(); const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none; @@ -5586,23 +5598,23 @@ fn ifExpr( bool_bit: Zir.Inst.Ref, } = c: { if (if_full.error_token) |_| { - const cond_rl: ResultLoc = if (payload_is_ref) .catch_ref else .catch_none; - const err_union = try expr(&block_scope, &block_scope.base, cond_rl, if_full.ast.cond_expr); + const cond_ri: ResultInfo = .{ .rl = if (payload_is_ref) .ref else .none, .ctx = .error_handling_expr }; + const err_union = try expr(&block_scope, &block_scope.base, cond_ri, if_full.ast.cond_expr); const tag: Zir.Inst.Tag = if (payload_is_ref) .is_non_err_ptr else .is_non_err; break :c .{ .inst = err_union, .bool_bit = try block_scope.addUnNode(tag, err_union, if_full.ast.cond_expr), }; } else if (if_full.payload_token) |_| { - const cond_rl: ResultLoc = if (payload_is_ref) .ref else .none; - const optional = try expr(&block_scope, &block_scope.base, cond_rl, if_full.ast.cond_expr); + const cond_ri: ResultInfo = .{ .rl = if (payload_is_ref) .ref else .none }; + const optional = try expr(&block_scope, &block_scope.base, cond_ri, if_full.ast.cond_expr); const tag: Zir.Inst.Tag = if (payload_is_ref) .is_non_null_ptr else .is_non_null; break :c .{ .inst = optional, .bool_bit = try block_scope.addUnNode(tag, optional, if_full.ast.cond_expr), }; } else { - const cond = try expr(&block_scope, &block_scope.base, bool_rl, if_full.ast.cond_expr); + const cond = try expr(&block_scope, &block_scope.base, bool_ri, if_full.ast.cond_expr); break :c .{ .inst = cond, .bool_bit = cond, @@ -5678,7 +5690,7 @@ fn ifExpr( } }; - const then_result = try expr(&then_scope, then_sub_scope, block_scope.break_result_loc, if_full.ast.then_expr); + const then_result = try expr(&then_scope, then_sub_scope, block_scope.break_result_info, if_full.ast.then_expr); if (!then_scope.endsWithNoReturn()) { block_scope.break_count += 1; } @@ -5729,7 +5741,7 @@ fn ifExpr( break :s &else_scope.base; } }; - const e = try expr(&else_scope, sub_scope, block_scope.break_result_loc, else_node); + const e = try expr(&else_scope, sub_scope, block_scope.break_result_info, else_node); if (!else_scope.endsWithNoReturn()) { block_scope.break_count += 1; @@ -5737,7 +5749,7 @@ fn ifExpr( try popErrorReturnTrace( &else_scope, sub_scope, - block_scope.break_result_loc, + block_scope.break_result_info, else_node, e, saved_err_trace_index, @@ -5752,9 +5764,9 @@ fn ifExpr( }; } else .{ .src = if_full.ast.then_expr, - .result = switch (rl) { + .result = switch (ri.rl) { // Explicitly store void to ptr result loc if there is no else branch - .ptr, .block_ptr => try rvalue(&else_scope, rl, .void_value, node), + .ptr, .block_ptr => try rvalue(&else_scope, ri, .void_value, node), else => .none, }, }; @@ -5762,7 +5774,7 @@ fn ifExpr( const break_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .break_inline else .@"break"; const result = try finishThenElseBlock( parent_gz, - rl, + ri, node, &block_scope, &then_scope, @@ -5896,7 +5908,7 @@ fn setCondBrPayloadElideBlockStorePtr( fn whileExpr( parent_gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, while_full: Ast.full.While, is_statement: bool, @@ -5916,7 +5928,7 @@ fn whileExpr( var loop_scope = parent_gz.makeSubBlock(scope); loop_scope.is_inline = is_inline; - loop_scope.setBreakResultLoc(rl); + loop_scope.setBreakResultInfo(ri); defer loop_scope.unstack(); defer loop_scope.labeled_breaks.deinit(astgen.gpa); @@ -5934,23 +5946,23 @@ fn whileExpr( bool_bit: Zir.Inst.Ref, } = c: { if (while_full.error_token) |_| { - const cond_rl: ResultLoc = if (payload_is_ref) .ref else .none; - const err_union = try expr(&continue_scope, &continue_scope.base, cond_rl, while_full.ast.cond_expr); + const cond_ri: ResultInfo = .{ .rl = if (payload_is_ref) .ref else .none }; + const err_union = try expr(&continue_scope, &continue_scope.base, cond_ri, while_full.ast.cond_expr); const tag: Zir.Inst.Tag = if (payload_is_ref) .is_non_err_ptr else .is_non_err; break :c .{ .inst = err_union, .bool_bit = try continue_scope.addUnNode(tag, err_union, while_full.ast.then_expr), }; } else if (while_full.payload_token) |_| { - const cond_rl: ResultLoc = if (payload_is_ref) .ref else .none; - const optional = try expr(&continue_scope, &continue_scope.base, cond_rl, while_full.ast.cond_expr); + const cond_ri: ResultInfo = .{ .rl = if (payload_is_ref) .ref else .none }; + const optional = try expr(&continue_scope, &continue_scope.base, cond_ri, while_full.ast.cond_expr); const tag: Zir.Inst.Tag = if (payload_is_ref) .is_non_null_ptr else .is_non_null; break :c .{ .inst = optional, .bool_bit = try continue_scope.addUnNode(tag, optional, while_full.ast.then_expr), }; } else { - const cond = try expr(&continue_scope, &continue_scope.base, bool_rl, while_full.ast.cond_expr); + const cond = try expr(&continue_scope, &continue_scope.base, bool_ri, while_full.ast.cond_expr); break :c .{ .inst = cond, .bool_bit = cond, @@ -6069,7 +6081,7 @@ fn whileExpr( if (dbg_var_name) |some| { try then_scope.addDbgVar(.dbg_var_val, some, dbg_var_inst); } - const then_result = try expr(&then_scope, then_sub_scope, .none, while_full.ast.then_expr); + const then_result = try expr(&then_scope, then_sub_scope, .{ .rl = .none }, while_full.ast.then_expr); _ = try addEnsureResult(&then_scope, then_result, while_full.ast.then_expr); try checkUsed(parent_gz, &then_scope.base, then_sub_scope); @@ -6114,7 +6126,7 @@ fn whileExpr( // control flow apply to outer loops; not this one. loop_scope.continue_block = 0; loop_scope.break_block = 0; - const else_result = try expr(&else_scope, sub_scope, loop_scope.break_result_loc, else_node); + const else_result = try expr(&else_scope, sub_scope, loop_scope.break_result_info, else_node); if (is_statement) { _ = try addEnsureResult(&else_scope, else_result, else_node); } @@ -6141,7 +6153,7 @@ fn whileExpr( const break_tag: Zir.Inst.Tag = if (is_inline) .break_inline else .@"break"; const result = try finishThenElseBlock( parent_gz, - rl, + ri, node, &loop_scope, &then_scope, @@ -6163,7 +6175,7 @@ fn whileExpr( fn forExpr( parent_gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, for_full: Ast.full.While, is_statement: bool, @@ -6186,8 +6198,8 @@ fn forExpr( try emitDbgNode(parent_gz, for_full.ast.cond_expr); - const cond_rl: ResultLoc = if (payload_is_ref) .ref else .none; - const array_ptr = try expr(parent_gz, scope, cond_rl, for_full.ast.cond_expr); + const cond_ri: ResultInfo = .{ .rl = if (payload_is_ref) .ref else .none }; + const array_ptr = try expr(parent_gz, scope, cond_ri, for_full.ast.cond_expr); const len = try parent_gz.addUnNode(.indexable_ptr_len, array_ptr, for_full.ast.cond_expr); const index_ptr = blk: { @@ -6204,7 +6216,7 @@ fn forExpr( var loop_scope = parent_gz.makeSubBlock(scope); loop_scope.is_inline = is_inline; - loop_scope.setBreakResultLoc(rl); + loop_scope.setBreakResultInfo(ri); defer loop_scope.unstack(); defer loop_scope.labeled_breaks.deinit(astgen.gpa); @@ -6308,7 +6320,7 @@ fn forExpr( break :blk &index_scope.base; }; - const then_result = try expr(&then_scope, then_sub_scope, .none, for_full.ast.then_expr); + const then_result = try expr(&then_scope, then_sub_scope, .{ .rl = .none }, for_full.ast.then_expr); _ = try addEnsureResult(&then_scope, then_result, for_full.ast.then_expr); try checkUsed(parent_gz, &then_scope.base, then_sub_scope); @@ -6327,7 +6339,7 @@ fn forExpr( // control flow apply to outer loops; not this one. loop_scope.continue_block = 0; loop_scope.break_block = 0; - const else_result = try expr(&else_scope, sub_scope, loop_scope.break_result_loc, else_node); + const else_result = try expr(&else_scope, sub_scope, loop_scope.break_result_info, else_node); if (is_statement) { _ = try addEnsureResult(&else_scope, else_result, else_node); } @@ -6352,7 +6364,7 @@ fn forExpr( const break_tag: Zir.Inst.Tag = if (is_inline) .break_inline else .@"break"; const result = try finishThenElseBlock( parent_gz, - rl, + ri, node, &loop_scope, &then_scope, @@ -6374,7 +6386,7 @@ fn forExpr( fn switchExpr( parent_gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, switch_node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = parent_gz.astgen; @@ -6505,13 +6517,13 @@ fn switchExpr( } } - const operand_rl: ResultLoc = if (any_payload_is_ref) .ref else .none; - const raw_operand = try expr(parent_gz, scope, operand_rl, operand_node); + const operand_ri: ResultInfo = .{ .rl = if (any_payload_is_ref) .ref else .none }; + const raw_operand = try expr(parent_gz, scope, operand_ri, operand_node); const cond_tag: Zir.Inst.Tag = if (any_payload_is_ref) .switch_cond_ref else .switch_cond; const cond = try parent_gz.addUnNode(cond_tag, raw_operand, operand_node); // We need the type of the operand to use as the result location for all the prong items. const cond_ty_inst = try parent_gz.addUnNode(.typeof, cond, operand_node); - const item_rl: ResultLoc = .{ .ty = cond_ty_inst }; + const item_ri: ResultInfo = .{ .rl = .{ .ty = cond_ty_inst } }; // This contains the data that goes into the `extra` array for the SwitchBlock/SwitchBlockMulti, // except the first cases_nodes.len slots are a table that indexes payloads later in the array, with @@ -6528,7 +6540,7 @@ fn switchExpr( var block_scope = parent_gz.makeSubBlock(scope); // block_scope not used for collecting instructions block_scope.instructions_top = GenZir.unstacked_top; - block_scope.setBreakResultLoc(rl); + block_scope.setBreakResultInfo(ri); // This gets added to the parent block later, after the item expressions. const switch_block = try parent_gz.makeBlockInst(.switch_block, switch_node); @@ -6669,7 +6681,7 @@ fn switchExpr( if (node_tags[item_node] == .switch_range) continue; items_len += 1; - const item_inst = try comptimeExpr(parent_gz, scope, item_rl, item_node); + const item_inst = try comptimeExpr(parent_gz, scope, item_ri, item_node); try payloads.append(gpa, @enumToInt(item_inst)); } @@ -6679,8 +6691,8 @@ fn switchExpr( if (node_tags[range] != .switch_range) continue; ranges_len += 1; - const first = try comptimeExpr(parent_gz, scope, item_rl, node_datas[range].lhs); - const last = try comptimeExpr(parent_gz, scope, item_rl, node_datas[range].rhs); + const first = try comptimeExpr(parent_gz, scope, item_ri, node_datas[range].lhs); + const last = try comptimeExpr(parent_gz, scope, item_ri, node_datas[range].rhs); try payloads.appendSlice(gpa, &[_]u32{ @enumToInt(first), @enumToInt(last), }); @@ -6698,7 +6710,7 @@ fn switchExpr( scalar_case_index += 1; try payloads.resize(gpa, header_index + 2); // item, body_len const item_node = case.ast.values[0]; - const item_inst = try comptimeExpr(parent_gz, scope, item_rl, item_node); + const item_inst = try comptimeExpr(parent_gz, scope, item_ri, item_node); payloads.items[header_index] = @enumToInt(item_inst); break :blk header_index + 1; }; @@ -6717,7 +6729,7 @@ fn switchExpr( if (dbg_var_tag_name) |some| { try case_scope.addDbgVar(.dbg_var_val, some, dbg_var_tag_inst); } - const case_result = try expr(&case_scope, sub_scope, block_scope.break_result_loc, case.ast.target_expr); + const case_result = try expr(&case_scope, sub_scope, block_scope.break_result_info, case.ast.target_expr); try checkUsed(parent_gz, &case_scope.base, sub_scope); try case_scope.addDbgBlockEnd(); if (!parent_gz.refIsNoReturn(case_result)) { @@ -6759,7 +6771,7 @@ fn switchExpr( zir_datas[switch_block].pl_node.payload_index = payload_index; - const strat = rl.strategy(&block_scope); + const strat = ri.rl.strategy(&block_scope); for (payloads.items[case_table_start..case_table_end]) |start_index, i| { var body_len_index = start_index; var end_index = start_index; @@ -6831,8 +6843,8 @@ fn switchExpr( } const block_ref = indexToRef(switch_block); - if (strat.tag == .break_operand and strat.elide_store_to_block_ptr_instructions and rl != .ref and rl != .catch_ref) - return rvalue(parent_gz, rl, block_ref, switch_node); + if (strat.tag == .break_operand and strat.elide_store_to_block_ptr_instructions and ri.rl != .ref) + return rvalue(parent_gz, ri, block_ref, switch_node); return block_ref; } @@ -6895,14 +6907,16 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref return Zir.Inst.Ref.unreachable_value; } - const rl: ResultLoc = if (nodeMayNeedMemoryLocation(tree, operand_node, true)) .{ - .ptr = .{ .inst = try gz.addNode(.ret_ptr, node) }, + const ri: ResultInfo = if (nodeMayNeedMemoryLocation(tree, operand_node, true)) .{ + .rl = .{ .ptr = .{ .inst = try gz.addNode(.ret_ptr, node) } }, + .ctx = .@"return", } else .{ - .ty = try gz.addNode(.ret_type, node), + .rl = .{ .ty = try gz.addNode(.ret_type, node) }, + .ctx = .@"return", }; const prev_anon_name_strategy = gz.anon_name_strategy; gz.anon_name_strategy = .func; - const operand = try reachableExpr(gz, scope, rl, operand_node, node); + const operand = try reachableExpr(gz, scope, ri, operand_node, node); gz.anon_name_strategy = prev_anon_name_strategy; // TODO: This should be almost identical for every break/ret @@ -6916,15 +6930,15 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref _ = try gz.addUnNode(.restore_err_ret_index, gz.outermost_err_trace_index, node); try emitDbgStmt(gz, ret_line, ret_column); - try gz.addRet(rl, operand, node); + try gz.addRet(ri, operand, node); return Zir.Inst.Ref.unreachable_value; }, .always => { // Value is always an error. Emit both error defers and regular defers. - const err_code = if (rl == .ptr) try gz.addUnNode(.load, rl.ptr.inst, node) else operand; + const err_code = if (ri.rl == .ptr) try gz.addUnNode(.load, ri.rl.ptr.inst, node) else operand; try genDefers(gz, defer_outer, scope, .{ .both = err_code }); try emitDbgStmt(gz, ret_line, ret_column); - try gz.addRet(rl, operand, node); + try gz.addRet(ri, operand, node); return Zir.Inst.Ref.unreachable_value; }, .maybe => { @@ -6933,12 +6947,12 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref // Only regular defers; no branch needed. try genDefers(gz, defer_outer, scope, .normal_only); try emitDbgStmt(gz, ret_line, ret_column); - try gz.addRet(rl, operand, node); + try gz.addRet(ri, operand, node); return Zir.Inst.Ref.unreachable_value; } // Emit conditional branch for generating errdefers. - const result = if (rl == .ptr) try gz.addUnNode(.load, rl.ptr.inst, node) else operand; + const result = if (ri.rl == .ptr) try gz.addUnNode(.load, ri.rl.ptr.inst, node) else operand; const is_non_err = try gz.addUnNode(.is_non_err, result, node); const condbr = try gz.addCondBr(.condbr, node); @@ -6952,7 +6966,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref _ = try then_scope.addUnNode(.restore_err_ret_index, then_scope.outermost_err_trace_index, node); try emitDbgStmt(&then_scope, ret_line, ret_column); - try then_scope.addRet(rl, operand, node); + try then_scope.addRet(ri, operand, node); var else_scope = gz.makeSubBlock(scope); defer else_scope.unstack(); @@ -6962,7 +6976,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref }; try genDefers(&else_scope, defer_outer, scope, which_ones); try emitDbgStmt(&else_scope, ret_line, ret_column); - try else_scope.addRet(rl, operand, node); + try else_scope.addRet(ri, operand, node); try setCondBrPayload(condbr, is_non_err, &then_scope, 0, &else_scope, 0); @@ -6995,7 +7009,7 @@ fn parseBitCount(buf: []const u8) std.fmt.ParseIntError!u16 { fn identifier( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, ident: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const tracy = trace(@src()); @@ -7014,7 +7028,7 @@ fn identifier( // if not @"" syntax, just use raw token slice if (ident_name_raw[0] != '@') { if (primitives.get(ident_name_raw)) |zir_const_ref| { - return rvalue(gz, rl, zir_const_ref, ident); + return rvalue(gz, ri, zir_const_ref, ident); } if (ident_name_raw.len >= 2) integer: { @@ -7047,19 +7061,19 @@ fn identifier( .bit_count = bit_count, } }, }); - return rvalue(gz, rl, result, ident); + return rvalue(gz, ri, result, ident); } } } // Local variables, including function parameters. - return localVarRef(gz, scope, rl, ident, ident_token); + return localVarRef(gz, scope, ri, ident, ident_token); } fn localVarRef( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, ident: Ast.Node.Index, ident_token: Ast.TokenIndex, ) InnerError!Zir.Inst.Ref { @@ -7077,7 +7091,7 @@ fn localVarRef( if (local_val.name == name_str_index) { // Locals cannot shadow anything, so we do not need to look for ambiguous // references in this case. - if (rl == .discard) { + if (ri.rl == .discard) { local_val.discarded = ident_token; } else { local_val.used = ident_token; @@ -7093,14 +7107,14 @@ fn localVarRef( gpa, ); - return rvalue(gz, rl, value_inst, ident); + return rvalue(gz, ri, value_inst, ident); } s = local_val.parent; }, .local_ptr => { const local_ptr = s.cast(Scope.LocalPtr).?; if (local_ptr.name == name_str_index) { - if (rl == .discard) { + if (ri.rl == .discard) { local_ptr.discarded = ident_token; } else { local_ptr.used = ident_token; @@ -7125,11 +7139,11 @@ fn localVarRef( gpa, ); - switch (rl) { - .ref, .catch_ref => return ptr_inst, + switch (ri.rl) { + .ref => return ptr_inst, else => { const loaded = try gz.addUnNode(.load, ptr_inst, ident); - return rvalue(gz, rl, loaded, ident); + return rvalue(gz, ri, loaded, ident); }, } } @@ -7162,11 +7176,11 @@ fn localVarRef( // Decl references happen by name rather than ZIR index so that when unrelated // decls are modified, ZIR code containing references to them can be unmodified. - switch (rl) { - .ref, .catch_ref => return gz.addStrTok(.decl_ref, name_str_index, ident_token), + switch (ri.rl) { + .ref => return gz.addStrTok(.decl_ref, name_str_index, ident_token), else => { const result = try gz.addStrTok(.decl_val, name_str_index, ident_token); - return rvalue(gz, rl, result, ident); + return rvalue(gz, ri, result, ident); }, } } @@ -7210,7 +7224,7 @@ fn tunnelThroughClosure( fn stringLiteral( gz: *GenZir, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; @@ -7225,12 +7239,12 @@ fn stringLiteral( .len = str.len, } }, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn multilineStringLiteral( gz: *GenZir, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; @@ -7242,10 +7256,10 @@ fn multilineStringLiteral( .len = str.len, } }, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } -fn charLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref { +fn charLiteral(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; const tree = astgen.tree; const main_tokens = tree.nodes.items(.main_token); @@ -7255,7 +7269,7 @@ fn charLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir. switch (std.zig.parseCharLiteral(slice)) { .success => |codepoint| { const result = try gz.addInt(codepoint); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .failure => |err| return astgen.failWithStrLitError(err, main_token, slice, 0), } @@ -7263,7 +7277,7 @@ fn charLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir. const Sign = enum { negative, positive }; -fn numberLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index, source_node: Ast.Node.Index, sign: Sign) InnerError!Zir.Inst.Ref { +fn numberLiteral(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index, source_node: Ast.Node.Index, sign: Sign) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; const tree = astgen.tree; const main_tokens = tree.nodes.items(.main_token); @@ -7305,7 +7319,7 @@ fn numberLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index, source_node: const bigger_again: f128 = smaller_float; if (bigger_again == float_number) { const result = try gz.addFloat(smaller_float); - return rvalue(gz, rl, result, source_node); + return rvalue(gz, ri, result, source_node); } // We need to use 128 bits. Break the float into 4 u32 values so we can // put it into the `extra` array. @@ -7316,16 +7330,16 @@ fn numberLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index, source_node: .piece2 = @truncate(u32, int_bits >> 64), .piece3 = @truncate(u32, int_bits >> 96), }); - return rvalue(gz, rl, result, source_node); + return rvalue(gz, ri, result, source_node); }, .failure => |err| return astgen.failWithNumberError(err, num_token, bytes), }; if (sign == .positive) { - return rvalue(gz, rl, result, source_node); + return rvalue(gz, ri, result, source_node); } else { const negated = try gz.addUnNode(.negate, result, source_node); - return rvalue(gz, rl, negated, source_node); + return rvalue(gz, ri, negated, source_node); } } @@ -7361,7 +7375,7 @@ fn failWithNumberError(astgen: *AstGen, err: std.zig.number_literal.Error, token fn asmExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, full: Ast.full.Asm, ) InnerError!Zir.Inst.Ref { @@ -7384,7 +7398,7 @@ fn asmExpr( }, else => .{ .tag = .asm_expr, - .tmpl = @enumToInt(try comptimeExpr(gz, scope, .none, full.ast.template)), + .tmpl = @enumToInt(try comptimeExpr(gz, scope, .{ .rl = .none }, full.ast.template)), }, }; @@ -7436,7 +7450,7 @@ fn asmExpr( outputs[i] = .{ .name = name, .constraint = constraint, - .operand = try localVarRef(gz, scope, .ref, node, ident_token), + .operand = try localVarRef(gz, scope, .{ .rl = .ref }, node, ident_token), }; } } @@ -7452,7 +7466,7 @@ fn asmExpr( const name = try astgen.identAsString(symbolic_name); const constraint_token = symbolic_name + 2; const constraint = (try astgen.strLitAsString(constraint_token)).index; - const operand = try expr(gz, scope, .none, node_datas[input_node].lhs); + const operand = try expr(gz, scope, .{ .rl = .none }, node_datas[input_node].lhs); inputs[i] = .{ .name = name, .constraint = constraint, @@ -7497,31 +7511,31 @@ fn asmExpr( .inputs = inputs, .clobbers = clobbers_buffer[0..clobber_i], }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn as( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, lhs: Ast.Node.Index, rhs: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const dest_type = try typeExpr(gz, scope, lhs); - switch (rl) { - .none, .catch_none, .discard, .ref, .catch_ref, .ty, .ty_shift_operand, .coerced_ty => { - const result = try reachableExpr(gz, scope, .{ .ty = dest_type }, rhs, node); - return rvalue(gz, rl, result, node); + switch (ri.rl) { + .none, .discard, .ref, .ty, .coerced_ty => { + const result = try reachableExpr(gz, scope, .{ .rl = .{ .ty = dest_type } }, rhs, node); + return rvalue(gz, ri, result, node); }, .ptr => |result_ptr| { - return asRlPtr(gz, scope, rl, node, result_ptr.inst, rhs, dest_type); + return asRlPtr(gz, scope, ri, node, result_ptr.inst, rhs, dest_type); }, .inferred_ptr => |result_ptr| { - return asRlPtr(gz, scope, rl, node, result_ptr, rhs, dest_type); + return asRlPtr(gz, scope, ri, node, result_ptr, rhs, dest_type); }, .block_ptr => |block_scope| { - return asRlPtr(gz, scope, rl, node, block_scope.rl_ptr, rhs, dest_type); + return asRlPtr(gz, scope, ri, node, block_scope.rl_ptr, rhs, dest_type); }, } } @@ -7529,29 +7543,29 @@ fn as( fn unionInit( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, params: []const Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const union_type = try typeExpr(gz, scope, params[0]); - const field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]); + const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]); const field_type = try gz.addPlNode(.field_type_ref, params[1], Zir.Inst.FieldTypeRef{ .container_type = union_type, .field_name = field_name, }); - const init = try reachableExpr(gz, scope, .{ .ty = field_type }, params[2], node); + const init = try reachableExpr(gz, scope, .{ .rl = .{ .ty = field_type } }, params[2], node); const result = try gz.addPlNode(.union_init, node, Zir.Inst.UnionInit{ .union_type = union_type, .init = init, .field_name = field_name, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn asRlPtr( parent_gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, src_node: Ast.Node.Index, result_ptr: Zir.Inst.Ref, operand_node: Ast.Node.Index, @@ -7560,31 +7574,31 @@ fn asRlPtr( var as_scope = try parent_gz.makeCoercionScope(scope, dest_type, result_ptr, src_node); defer as_scope.unstack(); - const result = try reachableExpr(&as_scope, &as_scope.base, .{ .block_ptr = &as_scope }, operand_node, src_node); - return as_scope.finishCoercion(parent_gz, rl, operand_node, result, dest_type); + const result = try reachableExpr(&as_scope, &as_scope.base, .{ .rl = .{ .block_ptr = &as_scope } }, operand_node, src_node); + return as_scope.finishCoercion(parent_gz, ri, operand_node, result, dest_type); } fn bitCast( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, lhs: Ast.Node.Index, rhs: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const dest_type = try reachableTypeExpr(gz, scope, lhs, node); - const operand = try reachableExpr(gz, scope, .none, rhs, node); + const operand = try reachableExpr(gz, scope, .{ .rl = .none }, rhs, node); const result = try gz.addPlNode(.bitcast, node, Zir.Inst.Bin{ .lhs = dest_type, .rhs = operand, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn typeOf( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, args: []const Ast.Node.Index, ) InnerError!Zir.Inst.Ref { @@ -7600,7 +7614,7 @@ fn typeOf( typeof_scope.force_comptime = false; defer typeof_scope.unstack(); - const ty_expr = try reachableExpr(&typeof_scope, &typeof_scope.base, .none, args[0], node); + const ty_expr = try reachableExpr(&typeof_scope, &typeof_scope.base, .{ .rl = .none }, args[0], node); if (!gz.refIsNoReturn(ty_expr)) { _ = try typeof_scope.addBreak(.break_inline, typeof_inst, ty_expr); } @@ -7608,7 +7622,7 @@ fn typeOf( // typeof_scope unstacked now, can add new instructions to gz try gz.instructions.append(gpa, typeof_inst); - return rvalue(gz, rl, indexToRef(typeof_inst), node); + return rvalue(gz, ri, indexToRef(typeof_inst), node); } const payload_size: u32 = std.meta.fields(Zir.Inst.TypeOfPeer).len; const payload_index = try reserveExtra(astgen, payload_size + args.len); @@ -7620,7 +7634,7 @@ fn typeOf( typeof_scope.force_comptime = false; for (args) |arg, i| { - const param_ref = try reachableExpr(&typeof_scope, &typeof_scope.base, .none, arg, node); + const param_ref = try reachableExpr(&typeof_scope, &typeof_scope.base, .{ .rl = .none }, arg, node); astgen.extra.items[args_index + i] = @enumToInt(param_ref); } _ = try typeof_scope.addBreak(.break_inline, refToIndex(typeof_inst).?, .void_value); @@ -7636,13 +7650,13 @@ fn typeOf( astgen.appendBodyWithFixups(body); typeof_scope.unstack(); - return rvalue(gz, rl, typeof_inst, node); + return rvalue(gz, ri, typeof_inst, node); } fn builtinCall( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, params: []const Ast.Node.Index, ) InnerError!Zir.Inst.Ref { @@ -7694,7 +7708,7 @@ fn builtinCall( if (!gop.found_existing) { gop.value_ptr.* = str_lit_token; } - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .compile_log => { const payload_index = try addExtra(gz.astgen, Zir.Inst.NodeMultiOp{ @@ -7702,32 +7716,32 @@ fn builtinCall( }); var extra_index = try reserveExtra(gz.astgen, params.len); for (params) |param| { - const param_ref = try expr(gz, scope, .none, param); + const param_ref = try expr(gz, scope, .{ .rl = .none }, param); astgen.extra.items[extra_index] = @enumToInt(param_ref); extra_index += 1; } const result = try gz.addExtendedMultiOpPayloadIndex(.compile_log, payload_index, params.len); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .field => { - if (rl == .ref or rl == .catch_ref) { + if (ri.rl == .ref) { return gz.addPlNode(.field_ptr_named, node, Zir.Inst.FieldNamed{ - .lhs = try expr(gz, scope, .ref, params[0]), - .field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]), + .lhs = try expr(gz, scope, .{ .rl = .ref }, params[0]), + .field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]), }); } const result = try gz.addPlNode(.field_val_named, node, Zir.Inst.FieldNamed{ - .lhs = try expr(gz, scope, .none, params[0]), - .field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]), + .lhs = try expr(gz, scope, .{ .rl = .none }, params[0]), + .field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, // zig fmt: off - .as => return as( gz, scope, rl, node, params[0], params[1]), - .bit_cast => return bitCast( gz, scope, rl, node, params[0], params[1]), - .TypeOf => return typeOf( gz, scope, rl, node, params), - .union_init => return unionInit(gz, scope, rl, node, params), + .as => return as( gz, scope, ri, node, params[0], params[1]), + .bit_cast => return bitCast( gz, scope, ri, node, params[0], params[1]), + .TypeOf => return typeOf( gz, scope, ri, node, params), + .union_init => return unionInit(gz, scope, ri, node, params), .c_import => return cImport( gz, scope, node, params[0]), // zig fmt: on @@ -7752,9 +7766,9 @@ fn builtinCall( local_val.used = ident_token; _ = try gz.addPlNode(.export_value, node, Zir.Inst.ExportValue{ .operand = local_val.inst, - .options = try comptimeExpr(gz, scope, .{ .coerced_ty = .export_options_type }, params[1]), + .options = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .export_options_type } }, params[1]), }); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); } s = local_val.parent; }, @@ -7767,9 +7781,9 @@ fn builtinCall( const loaded = try gz.addUnNode(.load, local_ptr.ptr, node); _ = try gz.addPlNode(.export_value, node, Zir.Inst.ExportValue{ .operand = loaded, - .options = try comptimeExpr(gz, scope, .{ .coerced_ty = .export_options_type }, params[1]), + .options = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .export_options_type } }, params[1]), }); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); } s = local_ptr.parent; }, @@ -7801,47 +7815,47 @@ fn builtinCall( }, else => return astgen.failNode(params[0], "symbol to export must identify a declaration", .{}), } - const options = try comptimeExpr(gz, scope, .{ .ty = .export_options_type }, params[1]); + const options = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .export_options_type } }, params[1]); _ = try gz.addPlNode(.@"export", node, Zir.Inst.Export{ .namespace = namespace, .decl_name = decl_name, .options = options, }); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); }, .@"extern" => { const type_inst = try typeExpr(gz, scope, params[0]); - const options = try comptimeExpr(gz, scope, .{ .ty = .extern_options_type }, params[1]); + const options = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .extern_options_type } }, params[1]); const result = try gz.addExtendedPayload(.builtin_extern, Zir.Inst.BinNode{ .node = gz.nodeIndexToRelative(node), .lhs = type_inst, .rhs = options, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .fence => { - const order = try expr(gz, scope, .{ .coerced_ty = .atomic_order_type }, params[0]); + const order = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .atomic_order_type } }, params[0]); const result = try gz.addExtendedPayload(.fence, Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(node), .operand = order, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .set_float_mode => { - const order = try expr(gz, scope, .{ .coerced_ty = .float_mode_type }, params[0]); + const order = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .float_mode_type } }, params[0]); const result = try gz.addExtendedPayload(.set_float_mode, Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(node), .operand = order, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .set_align_stack => { - const order = try expr(gz, scope, align_rl, params[0]); + const order = try expr(gz, scope, align_ri, params[0]); const result = try gz.addExtendedPayload(.set_align_stack, Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(node), .operand = order, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .src => { @@ -7853,62 +7867,62 @@ fn builtinCall( .line = astgen.source_line, .column = astgen.source_column, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, // zig fmt: off - .This => return rvalue(gz, rl, try gz.addNodeExtended(.this, node), node), - .return_address => return rvalue(gz, rl, try gz.addNodeExtended(.ret_addr, node), node), - .error_return_trace => return rvalue(gz, rl, try gz.addNodeExtended(.error_return_trace, node), node), - .frame => return rvalue(gz, rl, try gz.addNodeExtended(.frame, node), node), - .frame_address => return rvalue(gz, rl, try gz.addNodeExtended(.frame_address, node), node), - .breakpoint => return rvalue(gz, rl, try gz.addNodeExtended(.breakpoint, node), node), + .This => return rvalue(gz, ri, try gz.addNodeExtended(.this, node), node), + .return_address => return rvalue(gz, ri, try gz.addNodeExtended(.ret_addr, node), node), + .error_return_trace => return rvalue(gz, ri, try gz.addNodeExtended(.error_return_trace, node), node), + .frame => return rvalue(gz, ri, try gz.addNodeExtended(.frame, node), node), + .frame_address => return rvalue(gz, ri, try gz.addNodeExtended(.frame_address, node), node), + .breakpoint => return rvalue(gz, ri, try gz.addNodeExtended(.breakpoint, node), node), - .type_info => return simpleUnOpType(gz, scope, rl, node, params[0], .type_info), - .size_of => return simpleUnOpType(gz, scope, rl, node, params[0], .size_of), - .bit_size_of => return simpleUnOpType(gz, scope, rl, node, params[0], .bit_size_of), - .align_of => return simpleUnOpType(gz, scope, rl, node, params[0], .align_of), + .type_info => return simpleUnOpType(gz, scope, ri, node, params[0], .type_info), + .size_of => return simpleUnOpType(gz, scope, ri, node, params[0], .size_of), + .bit_size_of => return simpleUnOpType(gz, scope, ri, node, params[0], .bit_size_of), + .align_of => return simpleUnOpType(gz, scope, ri, node, params[0], .align_of), - .ptr_to_int => return simpleUnOp(gz, scope, rl, node, .none, params[0], .ptr_to_int), - .compile_error => return simpleUnOp(gz, scope, rl, node, .{ .ty = .const_slice_u8_type }, params[0], .compile_error), - .set_eval_branch_quota => return simpleUnOp(gz, scope, rl, node, .{ .coerced_ty = .u32_type }, params[0], .set_eval_branch_quota), - .enum_to_int => return simpleUnOp(gz, scope, rl, node, .none, params[0], .enum_to_int), - .bool_to_int => return simpleUnOp(gz, scope, rl, node, bool_rl, params[0], .bool_to_int), - .embed_file => return simpleUnOp(gz, scope, rl, node, .{ .ty = .const_slice_u8_type }, params[0], .embed_file), - .error_name => return simpleUnOp(gz, scope, rl, node, .{ .ty = .anyerror_type }, params[0], .error_name), - .set_cold => return simpleUnOp(gz, scope, rl, node, bool_rl, params[0], .set_cold), - .set_runtime_safety => return simpleUnOp(gz, scope, rl, node, bool_rl, params[0], .set_runtime_safety), - .sqrt => return simpleUnOp(gz, scope, rl, node, .none, params[0], .sqrt), - .sin => return simpleUnOp(gz, scope, rl, node, .none, params[0], .sin), - .cos => return simpleUnOp(gz, scope, rl, node, .none, params[0], .cos), - .tan => return simpleUnOp(gz, scope, rl, node, .none, params[0], .tan), - .exp => return simpleUnOp(gz, scope, rl, node, .none, params[0], .exp), - .exp2 => return simpleUnOp(gz, scope, rl, node, .none, params[0], .exp2), - .log => return simpleUnOp(gz, scope, rl, node, .none, params[0], .log), - .log2 => return simpleUnOp(gz, scope, rl, node, .none, params[0], .log2), - .log10 => return simpleUnOp(gz, scope, rl, node, .none, params[0], .log10), - .fabs => return simpleUnOp(gz, scope, rl, node, .none, params[0], .fabs), - .floor => return simpleUnOp(gz, scope, rl, node, .none, params[0], .floor), - .ceil => return simpleUnOp(gz, scope, rl, node, .none, params[0], .ceil), - .trunc => return simpleUnOp(gz, scope, rl, node, .none, params[0], .trunc), - .round => return simpleUnOp(gz, scope, rl, node, .none, params[0], .round), - .tag_name => return simpleUnOp(gz, scope, rl, node, .none, params[0], .tag_name), - .type_name => return simpleUnOp(gz, scope, rl, node, .none, params[0], .type_name), - .Frame => return simpleUnOp(gz, scope, rl, node, .none, params[0], .frame_type), - .frame_size => return simpleUnOp(gz, scope, rl, node, .none, params[0], .frame_size), + .ptr_to_int => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .ptr_to_int), + .compile_error => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .compile_error), + .set_eval_branch_quota => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .coerced_ty = .u32_type } }, params[0], .set_eval_branch_quota), + .enum_to_int => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .enum_to_int), + .bool_to_int => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .bool_to_int), + .embed_file => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .embed_file), + .error_name => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .anyerror_type } }, params[0], .error_name), + .set_cold => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .set_cold), + .set_runtime_safety => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .set_runtime_safety), + .sqrt => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .sqrt), + .sin => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .sin), + .cos => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .cos), + .tan => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .tan), + .exp => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .exp), + .exp2 => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .exp2), + .log => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .log), + .log2 => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .log2), + .log10 => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .log10), + .fabs => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .fabs), + .floor => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .floor), + .ceil => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .ceil), + .trunc => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .trunc), + .round => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .round), + .tag_name => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .tag_name), + .type_name => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .type_name), + .Frame => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .frame_type), + .frame_size => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .frame_size), - .float_to_int => return typeCast(gz, scope, rl, node, params[0], params[1], .float_to_int), - .int_to_float => return typeCast(gz, scope, rl, node, params[0], params[1], .int_to_float), - .int_to_ptr => return typeCast(gz, scope, rl, node, params[0], params[1], .int_to_ptr), - .int_to_enum => return typeCast(gz, scope, rl, node, params[0], params[1], .int_to_enum), - .float_cast => return typeCast(gz, scope, rl, node, params[0], params[1], .float_cast), - .int_cast => return typeCast(gz, scope, rl, node, params[0], params[1], .int_cast), - .ptr_cast => return typeCast(gz, scope, rl, node, params[0], params[1], .ptr_cast), - .truncate => return typeCast(gz, scope, rl, node, params[0], params[1], .truncate), + .float_to_int => return typeCast(gz, scope, ri, node, params[0], params[1], .float_to_int), + .int_to_float => return typeCast(gz, scope, ri, node, params[0], params[1], .int_to_float), + .int_to_ptr => return typeCast(gz, scope, ri, node, params[0], params[1], .int_to_ptr), + .int_to_enum => return typeCast(gz, scope, ri, node, params[0], params[1], .int_to_enum), + .float_cast => return typeCast(gz, scope, ri, node, params[0], params[1], .float_cast), + .int_cast => return typeCast(gz, scope, ri, node, params[0], params[1], .int_cast), + .ptr_cast => return typeCast(gz, scope, ri, node, params[0], params[1], .ptr_cast), + .truncate => return typeCast(gz, scope, ri, node, params[0], params[1], .truncate), // zig fmt: on .Type => { - const operand = try expr(gz, scope, .{ .coerced_ty = .type_info_type }, params[0]); + const operand = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .type_info_type } }, params[0]); const gpa = gz.astgen.gpa; @@ -7930,219 +7944,219 @@ fn builtinCall( }); gz.instructions.appendAssumeCapacity(new_index); const result = indexToRef(new_index); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .panic => { try emitDbgNode(gz, node); - return simpleUnOp(gz, scope, rl, node, .{ .ty = .const_slice_u8_type }, params[0], if (gz.force_comptime) .panic_comptime else .panic); + return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], if (gz.force_comptime) .panic_comptime else .panic); }, .error_to_int => { - const operand = try expr(gz, scope, .none, params[0]); + const operand = try expr(gz, scope, .{ .rl = .none }, params[0]); const result = try gz.addExtendedPayload(.error_to_int, Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(node), .operand = operand, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .int_to_error => { - const operand = try expr(gz, scope, .{ .coerced_ty = .u16_type }, params[0]); + const operand = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, params[0]); const result = try gz.addExtendedPayload(.int_to_error, Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(node), .operand = operand, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .align_cast => { - const dest_align = try comptimeExpr(gz, scope, align_rl, params[0]); - const rhs = try expr(gz, scope, .none, params[1]); + const dest_align = try comptimeExpr(gz, scope, align_ri, params[0]); + const rhs = try expr(gz, scope, .{ .rl = .none }, params[1]); const result = try gz.addPlNode(.align_cast, node, Zir.Inst.Bin{ .lhs = dest_align, .rhs = rhs, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .err_set_cast => { const result = try gz.addExtendedPayload(.err_set_cast, Zir.Inst.BinNode{ .lhs = try typeExpr(gz, scope, params[0]), - .rhs = try expr(gz, scope, .none, params[1]), + .rhs = try expr(gz, scope, .{ .rl = .none }, params[1]), .node = gz.nodeIndexToRelative(node), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .addrspace_cast => { const result = try gz.addExtendedPayload(.addrspace_cast, Zir.Inst.BinNode{ - .lhs = try comptimeExpr(gz, scope, .{ .ty = .address_space_type }, params[0]), - .rhs = try expr(gz, scope, .none, params[1]), + .lhs = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .address_space_type } }, params[0]), + .rhs = try expr(gz, scope, .{ .rl = .none }, params[1]), .node = gz.nodeIndexToRelative(node), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, // zig fmt: off - .has_decl => return hasDeclOrField(gz, scope, rl, node, params[0], params[1], .has_decl), - .has_field => return hasDeclOrField(gz, scope, rl, node, params[0], params[1], .has_field), + .has_decl => return hasDeclOrField(gz, scope, ri, node, params[0], params[1], .has_decl), + .has_field => return hasDeclOrField(gz, scope, ri, node, params[0], params[1], .has_field), - .clz => return bitBuiltin(gz, scope, rl, node, params[0], .clz), - .ctz => return bitBuiltin(gz, scope, rl, node, params[0], .ctz), - .pop_count => return bitBuiltin(gz, scope, rl, node, params[0], .pop_count), - .byte_swap => return bitBuiltin(gz, scope, rl, node, params[0], .byte_swap), - .bit_reverse => return bitBuiltin(gz, scope, rl, node, params[0], .bit_reverse), + .clz => return bitBuiltin(gz, scope, ri, node, params[0], .clz), + .ctz => return bitBuiltin(gz, scope, ri, node, params[0], .ctz), + .pop_count => return bitBuiltin(gz, scope, ri, node, params[0], .pop_count), + .byte_swap => return bitBuiltin(gz, scope, ri, node, params[0], .byte_swap), + .bit_reverse => return bitBuiltin(gz, scope, ri, node, params[0], .bit_reverse), - .div_exact => return divBuiltin(gz, scope, rl, node, params[0], params[1], .div_exact), - .div_floor => return divBuiltin(gz, scope, rl, node, params[0], params[1], .div_floor), - .div_trunc => return divBuiltin(gz, scope, rl, node, params[0], params[1], .div_trunc), - .mod => return divBuiltin(gz, scope, rl, node, params[0], params[1], .mod), - .rem => return divBuiltin(gz, scope, rl, node, params[0], params[1], .rem), + .div_exact => return divBuiltin(gz, scope, ri, node, params[0], params[1], .div_exact), + .div_floor => return divBuiltin(gz, scope, ri, node, params[0], params[1], .div_floor), + .div_trunc => return divBuiltin(gz, scope, ri, node, params[0], params[1], .div_trunc), + .mod => return divBuiltin(gz, scope, ri, node, params[0], params[1], .mod), + .rem => return divBuiltin(gz, scope, ri, node, params[0], params[1], .rem), - .shl_exact => return shiftOp(gz, scope, rl, node, params[0], params[1], .shl_exact), - .shr_exact => return shiftOp(gz, scope, rl, node, params[0], params[1], .shr_exact), + .shl_exact => return shiftOp(gz, scope, ri, node, params[0], params[1], .shl_exact), + .shr_exact => return shiftOp(gz, scope, ri, node, params[0], params[1], .shr_exact), - .bit_offset_of => return offsetOf(gz, scope, rl, node, params[0], params[1], .bit_offset_of), - .offset_of => return offsetOf(gz, scope, rl, node, params[0], params[1], .offset_of), + .bit_offset_of => return offsetOf(gz, scope, ri, node, params[0], params[1], .bit_offset_of), + .offset_of => return offsetOf(gz, scope, ri, node, params[0], params[1], .offset_of), - .c_undef => return simpleCBuiltin(gz, scope, rl, node, params[0], .c_undef), - .c_include => return simpleCBuiltin(gz, scope, rl, node, params[0], .c_include), + .c_undef => return simpleCBuiltin(gz, scope, ri, node, params[0], .c_undef), + .c_include => return simpleCBuiltin(gz, scope, ri, node, params[0], .c_include), - .cmpxchg_strong => return cmpxchg(gz, scope, rl, node, params, 1), - .cmpxchg_weak => return cmpxchg(gz, scope, rl, node, params, 0), + .cmpxchg_strong => return cmpxchg(gz, scope, ri, node, params, 1), + .cmpxchg_weak => return cmpxchg(gz, scope, ri, node, params, 0), // zig fmt: on .wasm_memory_size => { - const operand = try comptimeExpr(gz, scope, .{ .coerced_ty = .u32_type }, params[0]); + const operand = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .u32_type } }, params[0]); const result = try gz.addExtendedPayload(.wasm_memory_size, Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(node), .operand = operand, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .wasm_memory_grow => { - const index_arg = try comptimeExpr(gz, scope, .{ .coerced_ty = .u32_type }, params[0]); - const delta_arg = try expr(gz, scope, .{ .coerced_ty = .u32_type }, params[1]); + const index_arg = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .u32_type } }, params[0]); + const delta_arg = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .u32_type } }, params[1]); const result = try gz.addExtendedPayload(.wasm_memory_grow, Zir.Inst.BinNode{ .node = gz.nodeIndexToRelative(node), .lhs = index_arg, .rhs = delta_arg, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .c_define => { if (!gz.c_import) return gz.astgen.failNode(node, "C define valid only inside C import block", .{}); - const name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[0]); - const value = try comptimeExpr(gz, scope, .none, params[1]); + const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0]); + const value = try comptimeExpr(gz, scope, .{ .rl = .none }, params[1]); const result = try gz.addExtendedPayload(.c_define, Zir.Inst.BinNode{ .node = gz.nodeIndexToRelative(node), .lhs = name, .rhs = value, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .splat => { - const len = try expr(gz, scope, .{ .coerced_ty = .u32_type }, params[0]); - const scalar = try expr(gz, scope, .none, params[1]); + const len = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .u32_type } }, params[0]); + const scalar = try expr(gz, scope, .{ .rl = .none }, params[1]); const result = try gz.addPlNode(.splat, node, Zir.Inst.Bin{ .lhs = len, .rhs = scalar, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .reduce => { - const op = try expr(gz, scope, .{ .ty = .reduce_op_type }, params[0]); - const scalar = try expr(gz, scope, .none, params[1]); + const op = try expr(gz, scope, .{ .rl = .{ .ty = .reduce_op_type } }, params[0]); + const scalar = try expr(gz, scope, .{ .rl = .none }, params[1]); const result = try gz.addPlNode(.reduce, node, Zir.Inst.Bin{ .lhs = op, .rhs = scalar, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .max => { - const a = try expr(gz, scope, .none, params[0]); - const b = try expr(gz, scope, .none, params[1]); + const a = try expr(gz, scope, .{ .rl = .none }, params[0]); + const b = try expr(gz, scope, .{ .rl = .none }, params[1]); const result = try gz.addPlNode(.max, node, Zir.Inst.Bin{ .lhs = a, .rhs = b, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .min => { - const a = try expr(gz, scope, .none, params[0]); - const b = try expr(gz, scope, .none, params[1]); + const a = try expr(gz, scope, .{ .rl = .none }, params[0]); + const b = try expr(gz, scope, .{ .rl = .none }, params[1]); const result = try gz.addPlNode(.min, node, Zir.Inst.Bin{ .lhs = a, .rhs = b, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, - .add_with_overflow => return overflowArithmetic(gz, scope, rl, node, params, .add_with_overflow), - .sub_with_overflow => return overflowArithmetic(gz, scope, rl, node, params, .sub_with_overflow), - .mul_with_overflow => return overflowArithmetic(gz, scope, rl, node, params, .mul_with_overflow), + .add_with_overflow => return overflowArithmetic(gz, scope, ri, node, params, .add_with_overflow), + .sub_with_overflow => return overflowArithmetic(gz, scope, ri, node, params, .sub_with_overflow), + .mul_with_overflow => return overflowArithmetic(gz, scope, ri, node, params, .mul_with_overflow), .shl_with_overflow => { const int_type = try typeExpr(gz, scope, params[0]); const log2_int_type = try gz.addUnNode(.log2_int_type, int_type, params[0]); const ptr_type = try gz.addUnNode(.overflow_arithmetic_ptr, int_type, params[0]); - const lhs = try expr(gz, scope, .{ .ty = int_type }, params[1]); - const rhs = try expr(gz, scope, .{ .ty = log2_int_type }, params[2]); - const ptr = try expr(gz, scope, .{ .ty = ptr_type }, params[3]); + const lhs = try expr(gz, scope, .{ .rl = .{ .ty = int_type } }, params[1]); + const rhs = try expr(gz, scope, .{ .rl = .{ .ty = log2_int_type } }, params[2]); + const ptr = try expr(gz, scope, .{ .rl = .{ .ty = ptr_type } }, params[3]); const result = try gz.addExtendedPayload(.shl_with_overflow, Zir.Inst.OverflowArithmetic{ .node = gz.nodeIndexToRelative(node), .lhs = lhs, .rhs = rhs, .ptr = ptr, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .atomic_load => { const result = try gz.addPlNode(.atomic_load, node, Zir.Inst.AtomicLoad{ // zig fmt: off - .elem_type = try typeExpr(gz, scope, params[0]), - .ptr = try expr (gz, scope, .none, params[1]), - .ordering = try expr (gz, scope, .{ .coerced_ty = .atomic_order_type }, params[2]), + .elem_type = try typeExpr(gz, scope, params[0]), + .ptr = try expr (gz, scope, .{ .rl = .none }, params[1]), + .ordering = try expr (gz, scope, .{ .rl = .{ .coerced_ty = .atomic_order_type } }, params[2]), // zig fmt: on }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .atomic_rmw => { const int_type = try typeExpr(gz, scope, params[0]); const result = try gz.addPlNode(.atomic_rmw, node, Zir.Inst.AtomicRmw{ // zig fmt: off - .ptr = try expr(gz, scope, .none, params[1]), - .operation = try expr(gz, scope, .{ .coerced_ty = .atomic_rmw_op_type }, params[2]), - .operand = try expr(gz, scope, .{ .ty = int_type }, params[3]), - .ordering = try expr(gz, scope, .{ .coerced_ty = .atomic_order_type }, params[4]), + .ptr = try expr(gz, scope, .{ .rl = .none }, params[1]), + .operation = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .atomic_rmw_op_type } }, params[2]), + .operand = try expr(gz, scope, .{ .rl = .{ .ty = int_type } }, params[3]), + .ordering = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .atomic_order_type } }, params[4]), // zig fmt: on }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .atomic_store => { const int_type = try typeExpr(gz, scope, params[0]); const result = try gz.addPlNode(.atomic_store, node, Zir.Inst.AtomicStore{ // zig fmt: off - .ptr = try expr(gz, scope, .none, params[1]), - .operand = try expr(gz, scope, .{ .ty = int_type }, params[2]), - .ordering = try expr(gz, scope, .{ .coerced_ty = .atomic_order_type }, params[3]), + .ptr = try expr(gz, scope, .{ .rl = .none }, params[1]), + .operand = try expr(gz, scope, .{ .rl = .{ .ty = int_type } }, params[2]), + .ordering = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .atomic_order_type } }, params[3]), // zig fmt: on }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .mul_add => { const float_type = try typeExpr(gz, scope, params[0]); - const mulend1 = try expr(gz, scope, .{ .coerced_ty = float_type }, params[1]); - const mulend2 = try expr(gz, scope, .{ .coerced_ty = float_type }, params[2]); - const addend = try expr(gz, scope, .{ .ty = float_type }, params[3]); + const mulend1 = try expr(gz, scope, .{ .rl = .{ .coerced_ty = float_type } }, params[1]); + const mulend2 = try expr(gz, scope, .{ .rl = .{ .coerced_ty = float_type } }, params[2]); + const addend = try expr(gz, scope, .{ .rl = .{ .ty = float_type } }, params[3]); const result = try gz.addPlNode(.mul_add, node, Zir.Inst.MulAdd{ .mulend1 = mulend1, .mulend2 = mulend2, .addend = addend, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .call => { - const options = try comptimeExpr(gz, scope, .{ .ty = .call_options_type }, params[0]); + const options = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .call_options_type } }, params[0]); const callee = try calleeExpr(gz, scope, params[1]); - const args = try expr(gz, scope, .none, params[2]); + const args = try expr(gz, scope, .{ .rl = .none }, params[2]); const result = try gz.addPlNode(.builtin_call, node, Zir.Inst.BuiltinCall{ .options = options, .callee = callee, @@ -8153,115 +8167,115 @@ fn builtinCall( .ensure_result_used = false, }, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .field_parent_ptr => { const parent_type = try typeExpr(gz, scope, params[0]); - const field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]); + const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]); const result = try gz.addPlNode(.field_parent_ptr, node, Zir.Inst.FieldParentPtr{ .parent_type = parent_type, .field_name = field_name, - .field_ptr = try expr(gz, scope, .none, params[2]), + .field_ptr = try expr(gz, scope, .{ .rl = .none }, params[2]), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .memcpy => { const result = try gz.addPlNode(.memcpy, node, Zir.Inst.Memcpy{ - .dest = try expr(gz, scope, .{ .coerced_ty = .manyptr_u8_type }, params[0]), - .source = try expr(gz, scope, .{ .coerced_ty = .manyptr_const_u8_type }, params[1]), - .byte_count = try expr(gz, scope, .{ .coerced_ty = .usize_type }, params[2]), + .dest = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .manyptr_u8_type } }, params[0]), + .source = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .manyptr_const_u8_type } }, params[1]), + .byte_count = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, params[2]), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .memset => { const result = try gz.addPlNode(.memset, node, Zir.Inst.Memset{ - .dest = try expr(gz, scope, .{ .coerced_ty = .manyptr_u8_type }, params[0]), - .byte = try expr(gz, scope, .{ .coerced_ty = .u8_type }, params[1]), - .byte_count = try expr(gz, scope, .{ .coerced_ty = .usize_type }, params[2]), + .dest = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .manyptr_u8_type } }, params[0]), + .byte = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .u8_type } }, params[1]), + .byte_count = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, params[2]), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .shuffle => { const result = try gz.addPlNode(.shuffle, node, Zir.Inst.Shuffle{ .elem_type = try typeExpr(gz, scope, params[0]), - .a = try expr(gz, scope, .none, params[1]), - .b = try expr(gz, scope, .none, params[2]), - .mask = try comptimeExpr(gz, scope, .none, params[3]), + .a = try expr(gz, scope, .{ .rl = .none }, params[1]), + .b = try expr(gz, scope, .{ .rl = .none }, params[2]), + .mask = try comptimeExpr(gz, scope, .{ .rl = .none }, params[3]), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .select => { const result = try gz.addExtendedPayload(.select, Zir.Inst.Select{ .node = gz.nodeIndexToRelative(node), .elem_type = try typeExpr(gz, scope, params[0]), - .pred = try expr(gz, scope, .none, params[1]), - .a = try expr(gz, scope, .none, params[2]), - .b = try expr(gz, scope, .none, params[3]), + .pred = try expr(gz, scope, .{ .rl = .none }, params[1]), + .a = try expr(gz, scope, .{ .rl = .none }, params[2]), + .b = try expr(gz, scope, .{ .rl = .none }, params[3]), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .async_call => { const result = try gz.addExtendedPayload(.builtin_async_call, Zir.Inst.AsyncCall{ .node = gz.nodeIndexToRelative(node), - .frame_buffer = try expr(gz, scope, .none, params[0]), - .result_ptr = try expr(gz, scope, .none, params[1]), - .fn_ptr = try expr(gz, scope, .none, params[2]), - .args = try expr(gz, scope, .none, params[3]), + .frame_buffer = try expr(gz, scope, .{ .rl = .none }, params[0]), + .result_ptr = try expr(gz, scope, .{ .rl = .none }, params[1]), + .fn_ptr = try expr(gz, scope, .{ .rl = .none }, params[2]), + .args = try expr(gz, scope, .{ .rl = .none }, params[3]), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .Vector => { const result = try gz.addPlNode(.vector_type, node, Zir.Inst.Bin{ - .lhs = try comptimeExpr(gz, scope, .{ .coerced_ty = .u32_type }, params[0]), + .lhs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .u32_type } }, params[0]), .rhs = try typeExpr(gz, scope, params[1]), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, .prefetch => { - const ptr = try expr(gz, scope, .none, params[0]); - const options = try comptimeExpr(gz, scope, .{ .ty = .prefetch_options_type }, params[1]); + const ptr = try expr(gz, scope, .{ .rl = .none }, params[0]); + const options = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .prefetch_options_type } }, params[1]); const result = try gz.addExtendedPayload(.prefetch, Zir.Inst.BinNode{ .node = gz.nodeIndexToRelative(node), .lhs = ptr, .rhs = options, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); }, } } fn simpleNoOpVoid( gz: *GenZir, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { _ = try gz.addNode(tag, node); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); } fn hasDeclOrField( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, lhs_node: Ast.Node.Index, rhs_node: Ast.Node.Index, tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { const container_type = try typeExpr(gz, scope, lhs_node); - const name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, rhs_node); + const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, rhs_node); const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ .lhs = container_type, .rhs = name, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn typeCast( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, lhs_node: Ast.Node.Index, rhs_node: Ast.Node.Index, @@ -8269,42 +8283,42 @@ fn typeCast( ) InnerError!Zir.Inst.Ref { const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ .lhs = try typeExpr(gz, scope, lhs_node), - .rhs = try expr(gz, scope, .none, rhs_node), + .rhs = try expr(gz, scope, .{ .rl = .none }, rhs_node), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn simpleUnOpType( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, operand_node: Ast.Node.Index, tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { const operand = try typeExpr(gz, scope, operand_node); const result = try gz.addUnNode(tag, operand, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn simpleUnOp( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, - operand_rl: ResultLoc, + operand_ri: ResultInfo, operand_node: Ast.Node.Index, tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { - const operand = try expr(gz, scope, operand_rl, operand_node); + const operand = try expr(gz, scope, operand_ri, operand_node); const result = try gz.addUnNode(tag, operand, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn negation( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { const astgen = gz.astgen; @@ -8316,18 +8330,18 @@ fn negation( // its negativity rather than having it go through comptime subtraction. const operand_node = node_datas[node].lhs; if (node_tags[operand_node] == .number_literal) { - return numberLiteral(gz, rl, operand_node, node, .negative); + return numberLiteral(gz, ri, operand_node, node, .negative); } - const operand = try expr(gz, scope, .none, operand_node); + const operand = try expr(gz, scope, .{ .rl = .none }, operand_node); const result = try gz.addUnNode(.negate, operand, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn cmpxchg( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, params: []const Ast.Node.Index, small: u16, @@ -8336,98 +8350,98 @@ fn cmpxchg( const result = try gz.addExtendedPayloadSmall(.cmpxchg, small, Zir.Inst.Cmpxchg{ // zig fmt: off .node = gz.nodeIndexToRelative(node), - .ptr = try expr(gz, scope, .none, params[1]), - .expected_value = try expr(gz, scope, .{ .ty = int_type }, params[2]), - .new_value = try expr(gz, scope, .{ .coerced_ty = int_type }, params[3]), - .success_order = try expr(gz, scope, .{ .coerced_ty = .atomic_order_type }, params[4]), - .failure_order = try expr(gz, scope, .{ .coerced_ty = .atomic_order_type }, params[5]), + .ptr = try expr(gz, scope, .{ .rl = .none }, params[1]), + .expected_value = try expr(gz, scope, .{ .rl = .{ .ty = int_type } }, params[2]), + .new_value = try expr(gz, scope, .{ .rl = .{ .coerced_ty = int_type } }, params[3]), + .success_order = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .atomic_order_type } }, params[4]), + .failure_order = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .atomic_order_type } }, params[5]), // zig fmt: on }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn bitBuiltin( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, operand_node: Ast.Node.Index, tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { - const operand = try expr(gz, scope, .none, operand_node); + const operand = try expr(gz, scope, .{ .rl = .none }, operand_node); const result = try gz.addUnNode(tag, operand, node); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn divBuiltin( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, lhs_node: Ast.Node.Index, rhs_node: Ast.Node.Index, tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ - .lhs = try expr(gz, scope, .none, lhs_node), - .rhs = try expr(gz, scope, .none, rhs_node), + .lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node), + .rhs = try expr(gz, scope, .{ .rl = .none }, rhs_node), }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn simpleCBuiltin( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, operand_node: Ast.Node.Index, tag: Zir.Inst.Extended, ) InnerError!Zir.Inst.Ref { const name: []const u8 = if (tag == .c_undef) "C undef" else "C include"; if (!gz.c_import) return gz.astgen.failNode(node, "{s} valid only inside C import block", .{name}); - const operand = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, operand_node); + const operand = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, operand_node); _ = try gz.addExtendedPayload(tag, Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(node), .operand = operand, }); - return rvalue(gz, rl, .void_value, node); + return rvalue(gz, ri, .void_value, node); } fn offsetOf( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, lhs_node: Ast.Node.Index, rhs_node: Ast.Node.Index, tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { const type_inst = try typeExpr(gz, scope, lhs_node); - const field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, rhs_node); + const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, rhs_node); const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ .lhs = type_inst, .rhs = field_name, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn shiftOp( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, lhs_node: Ast.Node.Index, rhs_node: Ast.Node.Index, tag: Zir.Inst.Tag, ) InnerError!Zir.Inst.Ref { - const lhs = try expr(gz, scope, .none, lhs_node); + const lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node); const log2_int_type = try gz.addUnNode(.typeof_log2_int_type, lhs, lhs_node); - const rhs = try expr(gz, scope, .{ .ty_shift_operand = log2_int_type }, rhs_node); + const rhs = try expr(gz, scope, .{ .rl = .{ .ty = log2_int_type }, .ctx = .shift_op }, rhs_node); const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ .lhs = lhs, .rhs = rhs, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn cImport( @@ -8445,7 +8459,7 @@ fn cImport( defer block_scope.unstack(); const block_inst = try gz.makeBlockInst(.c_import, node); - const block_result = try expr(&block_scope, &block_scope.base, .none, body_node); + const block_result = try expr(&block_scope, &block_scope.base, .{ .rl = .none }, body_node); _ = try gz.addUnNode(.ensure_result_used, block_result, node); if (!gz.refIsNoReturn(block_result)) { _ = try block_scope.addBreak(.break_inline, block_inst, .void_value); @@ -8460,29 +8474,29 @@ fn cImport( fn overflowArithmetic( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, params: []const Ast.Node.Index, tag: Zir.Inst.Extended, ) InnerError!Zir.Inst.Ref { const int_type = try typeExpr(gz, scope, params[0]); const ptr_type = try gz.addUnNode(.overflow_arithmetic_ptr, int_type, params[0]); - const lhs = try expr(gz, scope, .{ .ty = int_type }, params[1]); - const rhs = try expr(gz, scope, .{ .ty = int_type }, params[2]); - const ptr = try expr(gz, scope, .{ .ty = ptr_type }, params[3]); + const lhs = try expr(gz, scope, .{ .rl = .{ .ty = int_type } }, params[1]); + const rhs = try expr(gz, scope, .{ .rl = .{ .ty = int_type } }, params[2]); + const ptr = try expr(gz, scope, .{ .rl = .{ .ty = ptr_type } }, params[3]); const result = try gz.addExtendedPayload(tag, Zir.Inst.OverflowArithmetic{ .node = gz.nodeIndexToRelative(node), .lhs = lhs, .rhs = rhs, .ptr = ptr, }); - return rvalue(gz, rl, result, node); + return rvalue(gz, ri, result, node); } fn callExpr( gz: *GenZir, scope: *Scope, - rl: ResultLoc, + ri: ResultInfo, node: Ast.Node.Index, call: Ast.full.Call, ) InnerError!Zir.Inst.Ref { @@ -8534,7 +8548,7 @@ fn callExpr( defer arg_block.unstack(); // `call_inst` is reused to provide the param type. - const arg_ref = try expr(&arg_block, &arg_block.base, .{ .coerced_ty = call_inst }, param_node); + const arg_ref = try expr(&arg_block, &arg_block.base, .{ .rl = .{ .coerced_ty = call_inst } }, param_node); _ = try arg_block.addBreak(.break_inline, call_index, arg_ref); const body = arg_block.instructionsSlice(); @@ -8547,13 +8561,8 @@ fn callExpr( // If our result location is a try/catch/error-union-if/return, the error trace propagates. // Otherwise, it should always be popped (handled in Sema). - const propagate_error_trace = switch (rl) { - .catch_none, .catch_ref => true, // Propagate to try/catch/error-union-if - .ptr, .ty => |ref| b: { // Otherwise, propagate if result loc is a return - const inst = refToIndex(ref) orelse break :b false; - const zir_tags = astgen.instructions.items(.tag); - break :b zir_tags[inst] == .ret_ptr or zir_tags[inst] == .ret_type; - }, + const propagate_error_trace = switch (ri.ctx) { + .error_handling_expr, .@"return" => true, // Propagate to try/catch/error-union-if and return else => false, }; @@ -8575,7 +8584,7 @@ fn callExpr( .payload_index = payload_index, } }, }); - return rvalue(gz, rl, call_inst, node); // TODO function call with result location + return rvalue(gz, ri, call_inst, node); // TODO function call with result location } /// calleeExpr generates the function part of a call expression (f in f(x)), or the @@ -8596,7 +8605,7 @@ fn calleeExpr( const tag = tree.nodes.items(.tag)[node]; switch (tag) { - .field_access => return addFieldAccess(.field_call_bind, gz, scope, .ref, node), + .field_access => return addFieldAccess(.field_call_bind, gz, scope, .{ .rl = .ref }, node), .builtin_call_two, .builtin_call_two_comma, @@ -8628,8 +8637,8 @@ fn calleeExpr( // If anything is wrong, fall back to builtinCall. // It will emit any necessary compile errors and notes. if (std.mem.eql(u8, builtin_name, "@field") and params.len == 2) { - const lhs = try expr(gz, scope, .ref, params[0]); - const field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]); + const lhs = try expr(gz, scope, .{ .rl = .ref }, params[0]); + const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]); return gz.addExtendedPayload(.field_call_bind_named, Zir.Inst.FieldNamedNode{ .node = gz.nodeIndexToRelative(node), .lhs = lhs, @@ -8637,9 +8646,9 @@ fn calleeExpr( }); } - return builtinCall(gz, scope, .none, node, params); + return builtinCall(gz, scope, .{ .rl = .none }, node, params); }, - else => return expr(gz, scope, .none, node), + else => return expr(gz, scope, .{ .rl = .none }, node), } } @@ -9655,7 +9664,7 @@ fn nodeUsesAnonNameStrategy(tree: *const Ast, node: Ast.Node.Index) bool { /// Assumes nothing stacked on `gz`. fn rvalue( gz: *GenZir, - rl: ResultLoc, + ri: ResultInfo, raw_result: Zir.Inst.Ref, src_node: Ast.Node.Index, ) InnerError!Zir.Inst.Ref { @@ -9670,14 +9679,14 @@ fn rvalue( break :r raw_result; }; if (gz.endsWithNoReturn()) return result; - switch (rl) { - .none, .catch_none, .coerced_ty => return result, + switch (ri.rl) { + .none, .coerced_ty => return result, .discard => { // Emit a compile error for discarding error values. _ = try gz.addUnNode(.ensure_result_non_error, result, src_node); return result; }, - .ref, .catch_ref => { + .ref => { // We need a pointer but we have a value. // Unfortunately it's not quite as simple as directly emitting a ref // instruction here because we need subsequent address-of operator on @@ -9696,7 +9705,7 @@ fn rvalue( } return indexToRef(gop.value_ptr.*); }, - .ty, .ty_shift_operand => |ty_inst| { + .ty => |ty_inst| { // Quickly eliminate some common, unnecessary type coercion. const as_ty = @as(u64, @enumToInt(Zir.Inst.Ref.type_type)) << 32; const as_comptime_int = @as(u64, @enumToInt(Zir.Inst.Ref.comptime_int_type)) << 32; @@ -9757,7 +9766,7 @@ fn rvalue( => return result, // type of result is already correct // Need an explicit type coercion instruction. - else => return gz.addPlNode(rl.zirTag(), src_node, Zir.Inst.As{ + else => return gz.addPlNode(ri.zirTag(), src_node, Zir.Inst.As{ .dest_type = ty_inst, .operand = result, }), @@ -10451,8 +10460,8 @@ const GenZir = struct { label: ?Label = null, break_block: Zir.Inst.Index = 0, continue_block: Zir.Inst.Index = 0, - /// Only valid when setBreakResultLoc is called. - break_result_loc: AstGen.ResultLoc = undefined, + /// Only valid when setBreakResultInfo is called. + break_result_info: AstGen.ResultInfo = undefined, /// When a block has a pointer result location, here it is. rl_ptr: Zir.Inst.Ref = .none, /// When a block has a type result location, here it is. @@ -10562,7 +10571,7 @@ const GenZir = struct { fn finishCoercion( as_scope: *GenZir, parent_gz: *GenZir, - rl: ResultLoc, + ri: ResultInfo, src_node: Ast.Node.Index, result: Zir.Inst.Ref, dest_type: Zir.Inst.Ref, @@ -10588,7 +10597,7 @@ const GenZir = struct { as_scope.instructions_top = GenZir.unstacked_top; // as_scope now unstacked, can add new instructions to parent_gz const casted_result = try parent_gz.addBin(.as, dest_type, result); - return rvalue(parent_gz, rl, casted_result, src_node); + return rvalue(parent_gz, ri, casted_result, src_node); } else { // implicitly move all as_scope instructions to parent_gz as_scope.instructions_top = GenZir.unstacked_top; @@ -10631,7 +10640,7 @@ const GenZir = struct { return gz.astgen.tree.firstToken(gz.decl_node_index); } - fn setBreakResultLoc(gz: *GenZir, parent_rl: AstGen.ResultLoc) void { + fn setBreakResultInfo(gz: *GenZir, parent_ri: AstGen.ResultInfo) void { // Depending on whether the result location is a pointer or value, different // ZIR needs to be generated. In the former case we rely on storing to the // pointer to communicate the result, and use breakvoid; in the latter case @@ -10640,32 +10649,32 @@ const GenZir = struct { // the scenario where the result location is not consumed. In this case // we emit ZIR for the block break instructions to have the result values, // and then rvalue() on that to pass the value to the result location. - switch (parent_rl) { - .ty, .ty_shift_operand, .coerced_ty => |ty_inst| { + switch (parent_ri.rl) { + .ty, .coerced_ty => |ty_inst| { gz.rl_ty_inst = ty_inst; - gz.break_result_loc = parent_rl; + gz.break_result_info = parent_ri; }, - .discard, .none, .catch_none, .ref, .catch_ref => { + .discard, .none, .ref => { gz.rl_ty_inst = .none; - gz.break_result_loc = parent_rl; + gz.break_result_info = parent_ri; }, .ptr => |ptr_res| { gz.rl_ty_inst = .none; - gz.break_result_loc = .{ .ptr = .{ .inst = ptr_res.inst } }; + gz.break_result_info = .{ .rl = .{ .ptr = .{ .inst = ptr_res.inst } } }; }, .inferred_ptr => |ptr| { gz.rl_ty_inst = .none; gz.rl_ptr = ptr; - gz.break_result_loc = .{ .block_ptr = gz }; + gz.break_result_info = .{ .rl = .{ .block_ptr = gz }, .ctx = parent_ri.ctx }; }, .block_ptr => |parent_block_scope| { gz.rl_ty_inst = parent_block_scope.rl_ty_inst; gz.rl_ptr = parent_block_scope.rl_ptr; - gz.break_result_loc = .{ .block_ptr = gz }; + gz.break_result_info = .{ .rl = .{ .block_ptr = gz }, .ctx = parent_ri.ctx }; }, } } @@ -11815,10 +11824,10 @@ const GenZir = struct { return new_index; } - fn addRet(gz: *GenZir, rl: ResultLoc, operand: Zir.Inst.Ref, node: Ast.Node.Index) !void { - switch (rl) { + fn addRet(gz: *GenZir, ri: ResultInfo, operand: Zir.Inst.Ref, node: Ast.Node.Index) !void { + switch (ri.rl) { .ptr => |ptr_res| _ = try gz.addUnNode(.ret_load, ptr_res.inst, node), - .ty, .ty_shift_operand => _ = try gz.addUnNode(.ret_node, operand, node), + .ty => _ = try gz.addUnNode(.ret_node, operand, node), else => unreachable, } } diff --git a/test/behavior/bugs/12891.zig b/test/behavior/bugs/12891.zig index 97126bde4a..78947d1776 100644 --- a/test/behavior/bugs/12891.zig +++ b/test/behavior/bugs/12891.zig @@ -7,6 +7,7 @@ test "issue12891" { try std.testing.expect(i < f); } test "nan" { + if (builtin.zig_backend == .stage1) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO const f = comptime std.math.nan(f64); From b529d8e48f2082c4e8df10d0ff26e2c7702bb693 Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Fri, 23 Sep 2022 11:50:55 -0700 Subject: [PATCH 07/12] stage2: Propagate error return trace into fn call This change extends the "lifetime" of the error return trace associated with an error to include the duration of a function call it is passed to. This means that if a function returns an error, its return trace will include the error return trace for any error inputs. This is needed to support `testing.expectError` and similar functions. If a function returns a non-error, we have to clean up any error return traces created by error-able call arguments. --- src/AstGen.zig | 10 +- src/Sema.zig | 237 ++++++++++++++++++++++++++++++------------ test/stack_traces.zig | 69 ++++++++++++ 3 files changed, 247 insertions(+), 69 deletions(-) diff --git a/src/AstGen.zig b/src/AstGen.zig index 65818d98df..0250abfdbc 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -335,6 +335,8 @@ pub const ResultInfo = struct { error_handling_expr, /// The expression is the right-hand side of a shift operation. shift_op, + /// The expression is an argument in a function call. + fn_arg, /// No specific operator in particular. none, }; @@ -5217,9 +5219,9 @@ fn popErrorReturnTrace( const result_is_err = nodeMayEvalToError(tree, node); - // If we are breaking to a try/catch/error-union-if/return, the error trace propagates. + // If we are breaking to a try/catch/error-union-if/return or a function call, the error trace propagates. const propagate_error_trace = switch (ri.ctx) { - .error_handling_expr, .@"return" => true, + .error_handling_expr, .@"return", .fn_arg => true, else => false, }; @@ -8548,7 +8550,7 @@ fn callExpr( defer arg_block.unstack(); // `call_inst` is reused to provide the param type. - const arg_ref = try expr(&arg_block, &arg_block.base, .{ .rl = .{ .coerced_ty = call_inst } }, param_node); + const arg_ref = try expr(&arg_block, &arg_block.base, .{ .rl = .{ .coerced_ty = call_inst }, .ctx = .fn_arg }, param_node); _ = try arg_block.addBreak(.break_inline, call_index, arg_ref); const body = arg_block.instructionsSlice(); @@ -8562,7 +8564,7 @@ fn callExpr( // If our result location is a try/catch/error-union-if/return, the error trace propagates. // Otherwise, it should always be popped (handled in Sema). const propagate_error_trace = switch (ri.ctx) { - .error_handling_expr, .@"return" => true, // Propagate to try/catch/error-union-if and return + .error_handling_expr, .@"return", .fn_arg => true, // Propagate to try/catch/error-union-if, return, and other function calls else => false, }; diff --git a/src/Sema.zig b/src/Sema.zig index 791519c12a..70cff08e29 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -499,6 +499,25 @@ pub const Block = struct { return result_index; } + /// Insert an instruction into the block at `index`. Moves all following + /// instructions forward in the block to make room. Operation is O(N). + pub fn insertInst(block: *Block, index: Air.Inst.Index, inst: Air.Inst) error{OutOfMemory}!Air.Inst.Ref { + return Air.indexToRef(try block.insertInstAsIndex(index, inst)); + } + + pub fn insertInstAsIndex(block: *Block, index: Air.Inst.Index, inst: Air.Inst) error{OutOfMemory}!Air.Inst.Index { + const sema = block.sema; + const gpa = sema.gpa; + + try sema.air_instructions.ensureUnusedCapacity(gpa, 1); + + const result_index = @intCast(Air.Inst.Index, sema.air_instructions.len); + sema.air_instructions.appendAssumeCapacity(inst); + + try block.instructions.insert(gpa, index, result_index); + return result_index; + } + fn addUnreachable(block: *Block, src: LazySrcLoc, safety_check: bool) !void { if (safety_check and block.wantSafety()) { _ = try block.sema.safetyPanic(block, src, .unreach); @@ -5648,6 +5667,85 @@ fn funcDeclSrc(sema: *Sema, block: *Block, src: LazySrcLoc, func_inst: Air.Inst. return owner_decl.srcLoc(); } +/// Add instructions to block to "pop" the error return trace. +/// If `operand` is provided, only pops if operand is non-error. +fn popErrorReturnTrace( + sema: *Sema, + block: *Block, + src: LazySrcLoc, + operand: ?Air.Inst.Ref, + saved_error_trace_index: Air.Inst.Ref, +) CompileError!void { + var is_non_error: ?bool = null; + var is_non_error_inst: Air.Inst.Ref = undefined; + if (operand) |op| { + is_non_error_inst = try sema.analyzeIsNonErr(block, src, op); + if (try sema.resolveDefinedValue(block, src, is_non_error_inst)) |cond_val| + is_non_error = cond_val.toBool(); + } else is_non_error = true; // no operand means pop unconditionally + + if (is_non_error == true) { + // AstGen determined this result does not go to an error-handling expr (try/catch/return etc.), or + // the result is comptime-known to be a non-error. Either way, pop unconditionally. + + const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace"); + const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty); + const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty); + const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty); + const field_ptr = try sema.structFieldPtr(block, src, err_return_trace, "index", src, stack_trace_ty, true); + try sema.storePtr2(block, src, field_ptr, src, saved_error_trace_index, src, .store); + } else if (is_non_error == null) { + // The result might be an error. If it is, we leave the error trace alone. If it isn't, we need + // to pop any error trace that may have been propagated from our arguments. + + try sema.air_extra.ensureUnusedCapacity(sema.gpa, @typeInfo(Air.Block).Struct.fields.len); + const cond_block_inst = try block.addInstAsIndex(.{ + .tag = .block, + .data = .{ + .ty_pl = .{ + .ty = Air.Inst.Ref.void_type, + .payload = undefined, // updated below + }, + }, + }); + + var then_block = block.makeSubBlock(); + defer then_block.instructions.deinit(sema.gpa); + + // If non-error, then pop the error return trace by restoring the index. + const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace"); + const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty); + const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty); + const err_return_trace = try then_block.addTy(.err_return_trace, ptr_stack_trace_ty); + const field_ptr = try sema.structFieldPtr(&then_block, src, err_return_trace, "index", src, stack_trace_ty, true); + try sema.storePtr2(&then_block, src, field_ptr, src, saved_error_trace_index, src, .store); + _ = try then_block.addBr(cond_block_inst, Air.Inst.Ref.void_value); + + // Otherwise, do nothing + var else_block = block.makeSubBlock(); + defer else_block.instructions.deinit(sema.gpa); + _ = try else_block.addBr(cond_block_inst, Air.Inst.Ref.void_value); + + try sema.air_extra.ensureUnusedCapacity(sema.gpa, @typeInfo(Air.CondBr).Struct.fields.len + + then_block.instructions.items.len + else_block.instructions.items.len + + @typeInfo(Air.Block).Struct.fields.len + 1); // +1 for the sole .cond_br instruction in the .block + + const cond_br_inst = @intCast(Air.Inst.Index, sema.air_instructions.len); + try sema.air_instructions.append(sema.gpa, .{ .tag = .cond_br, .data = .{ .pl_op = .{ + .operand = is_non_error_inst, + .payload = sema.addExtraAssumeCapacity(Air.CondBr{ + .then_body_len = @intCast(u32, then_block.instructions.items.len), + .else_body_len = @intCast(u32, else_block.instructions.items.len), + }), + } } }); + sema.air_extra.appendSliceAssumeCapacity(then_block.instructions.items); + sema.air_extra.appendSliceAssumeCapacity(else_block.instructions.items); + + sema.air_instructions.items(.data)[cond_block_inst].ty_pl.payload = sema.addExtraAssumeCapacity(Air.Block{ .body_len = 1 }); + sema.air_extra.appendAssumeCapacity(cond_br_inst); + } +} + fn zirCall( sema: *Sema, block: *Block, @@ -5737,6 +5835,9 @@ fn zirCall( const args_body = sema.code.extra[extra.end..]; + var input_is_error = false; + const block_index = @intCast(Air.Inst.Index, block.instructions.items.len); + const parent_comptime = block.is_comptime; // `extra_index` and `arg_index` are separate since the bound function is passed as the first argument. var extra_index: usize = 0; @@ -5754,10 +5855,8 @@ fn zirCall( else func_ty_info.param_types[arg_index]; - const old_comptime = block.is_comptime; - defer block.is_comptime = old_comptime; // Generate args to comptime params in comptime block. - block.is_comptime = parent_comptime; + defer block.is_comptime = parent_comptime; if (arg_index < fn_params_len and func_ty_info.comptime_params[arg_index]) { block.is_comptime = true; } @@ -5766,13 +5865,58 @@ fn zirCall( try sema.inst_map.put(sema.gpa, inst, param_ty_inst); const resolved = try sema.resolveBody(block, args_body[arg_start..arg_end], inst); - if (sema.typeOf(resolved).zigTypeTag() == .NoReturn) { + const resolved_ty = sema.typeOf(resolved); + if (resolved_ty.zigTypeTag() == .NoReturn) { return resolved; } + if (resolved_ty.isError()) { + input_is_error = true; + } resolved_args[arg_index] = resolved; } + if (sema.owner_func == null or !sema.owner_func.?.calls_or_awaits_errorable_fn) + input_is_error = false; // input was an error type, but no errorable fn's were actually called - return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, pop_error_return_trace, resolved_args, bound_arg_src); + const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; + if (backend_supports_error_return_tracing and sema.mod.comp.bin_file.options.error_return_tracing and + !block.is_comptime and (input_is_error or pop_error_return_trace)) + { + const call_inst: Air.Inst.Ref = if (modifier == .always_tail) undefined else b: { + break :b try sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, resolved_args, bound_arg_src); + }; + + const return_ty = sema.typeOf(call_inst); + if (modifier != .always_tail and return_ty.isNoReturn()) + return call_inst; // call to "fn(...) noreturn", don't pop + + // If any input is an error-type, we might need to pop any trace it generated. Otherwise, we only + // need to clean-up our own trace if we were passed to a non-error-handling expression. + if (input_is_error or (pop_error_return_trace and modifier != .always_tail and return_ty.isError())) { + const unresolved_stack_trace_ty = try sema.getBuiltinType(block, call_src, "StackTrace"); + const stack_trace_ty = try sema.resolveTypeFields(block, call_src, unresolved_stack_trace_ty); + const field_index = try sema.structFieldIndex(block, stack_trace_ty, "index", call_src); + + // Insert a save instruction before the arg resolution + call instructions we just generated + const save_inst = try block.insertInst(block_index, .{ + .tag = .save_err_return_trace_index, + .data = .{ .ty_pl = .{ + .ty = try sema.addType(stack_trace_ty), + .payload = @intCast(u32, field_index), + } }, + }); + + // Pop the error return trace, testing the result for non-error if necessary + const operand = if (pop_error_return_trace or modifier == .always_tail) null else call_inst; + try sema.popErrorReturnTrace(block, call_src, operand, save_inst); + } + + if (modifier == .always_tail) // Perform the call *after* the restore, so that a tail call is possible. + return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, resolved_args, bound_arg_src); + + return call_inst; + } else { + return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, resolved_args, bound_arg_src); + } } const GenericCallAdapter = struct { @@ -5884,7 +6028,6 @@ fn analyzeCall( call_src: LazySrcLoc, modifier: std.builtin.CallOptions.Modifier, ensure_result_used: bool, - pop_error_return_trace: bool, uncasted_args: []const Air.Inst.Ref, bound_arg_src: ?LazySrcLoc, ) CompileError!Air.Inst.Ref { @@ -6335,55 +6478,19 @@ fn analyzeCall( sema.owner_func.?.calls_or_awaits_errorable_fn = true; } - const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; - const emit_error_trace_save_restore = sema.mod.comp.bin_file.options.error_return_tracing and - backend_supports_error_return_tracing and - pop_error_return_trace and func_ty_info.return_type.isError(); - - if (emit_error_trace_save_restore) { - // This function call is error-able (and so can generate an error trace), but AstGen determined - // that its result does not go to an error-handling operator (try/catch/return etc.). We need to - // save and restore the error trace index here, effectively "popping" the new entries immediately. - - const unresolved_stack_trace_ty = try sema.getBuiltinType(block, call_src, "StackTrace"); - const stack_trace_ty = try sema.resolveTypeFields(block, call_src, unresolved_stack_trace_ty); - const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty); - const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty); - const field_ptr = try sema.structFieldPtr(block, call_src, err_return_trace, "index", call_src, stack_trace_ty, true); - - const saved_index = try sema.analyzeLoad(block, call_src, field_ptr, call_src); - - try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Call).Struct.fields.len + - args.len); - const func_inst = try block.addInst(.{ - .tag = call_tag, - .data = .{ .pl_op = .{ - .operand = func, - .payload = sema.addExtraAssumeCapacity(Air.Call{ - .args_len = @intCast(u32, args.len), - }), - } }, - }); - sema.appendRefsAssumeCapacity(args); - - try sema.storePtr2(block, call_src, field_ptr, call_src, saved_index, call_src, .store); - - break :res func_inst; - } else { - try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Call).Struct.fields.len + - args.len); - const func_inst = try block.addInst(.{ - .tag = call_tag, - .data = .{ .pl_op = .{ - .operand = func, - .payload = sema.addExtraAssumeCapacity(Air.Call{ - .args_len = @intCast(u32, args.len), - }), - } }, - }); - sema.appendRefsAssumeCapacity(args); - break :res func_inst; - } + try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Call).Struct.fields.len + + args.len); + const func_inst = try block.addInst(.{ + .tag = call_tag, + .data = .{ .pl_op = .{ + .operand = func, + .payload = sema.addExtraAssumeCapacity(Air.Call{ + .args_len = @intCast(u32, args.len), + }), + } }, + }); + sema.appendRefsAssumeCapacity(args); + break :res func_inst; }; if (ensure_result_used) { @@ -10965,7 +11072,7 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op const panic_fn = try sema.getBuiltin(block, src, "panicUnwrapError"); const err_return_trace = try sema.getErrorReturnTrace(block, src); const args: [2]Air.Inst.Ref = .{ err_return_trace, operand }; - _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, false, &args, null); + _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, &args, null); return true; }, .panic => { @@ -10976,7 +11083,7 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op const panic_fn = try sema.getBuiltin(block, src, "panic"); const err_return_trace = try sema.getErrorReturnTrace(block, src); const args: [3]Air.Inst.Ref = .{ msg_inst, err_return_trace, .null_value }; - _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, false, &args, null); + _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, &args, null); return true; }, else => unreachable, @@ -16179,7 +16286,7 @@ fn retWithErrTracing( const args: [1]Air.Inst.Ref = .{err_return_trace}; if (!need_check) { - _ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, false, &args, null); + _ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, &args, null); _ = try block.addUnOp(ret_tag, operand); return always_noreturn; } @@ -16190,7 +16297,7 @@ fn retWithErrTracing( var else_block = block.makeSubBlock(); defer else_block.instructions.deinit(gpa); - _ = try sema.analyzeCall(&else_block, return_err_fn, src, src, .never_inline, false, false, &args, null); + _ = try sema.analyzeCall(&else_block, return_err_fn, src, src, .never_inline, false, &args, null); _ = try else_block.addUnOp(ret_tag, operand); try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.CondBr).Struct.fields.len + @@ -20414,7 +20521,7 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError } } const ensure_result_used = extra.flags.ensure_result_used; - return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, false, resolved_args, bound_arg_src); + return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, resolved_args, bound_arg_src); } fn zirFieldParentPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { @@ -21848,7 +21955,7 @@ fn panicWithMsg( Value.@"null", ); const args: [3]Air.Inst.Ref = .{ msg_inst, null_stack_trace, .null_value }; - _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, false, &args, null); + _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, &args, null); return always_noreturn; } @@ -21889,7 +21996,7 @@ fn panicUnwrapError( const err = try fail_block.addTyOp(unwrap_err_tag, Type.anyerror, operand); const err_return_trace = try sema.getErrorReturnTrace(&fail_block, src); const args: [2]Air.Inst.Ref = .{ err_return_trace, err }; - _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, false, &args, null); + _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, &args, null); } } try sema.addSafetyCheckExtra(parent_block, ok, &fail_block); @@ -21930,7 +22037,7 @@ fn panicIndexOutOfBounds( } else { const panic_fn = try sema.getBuiltin(&fail_block, src, "panicOutOfBounds"); const args: [2]Air.Inst.Ref = .{ index, len }; - _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, false, &args, null); + _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, &args, null); } } try sema.addSafetyCheckExtra(parent_block, ok, &fail_block); @@ -21972,7 +22079,7 @@ fn panicSentinelMismatch( else { const panic_fn = try sema.getBuiltin(parent_block, src, "checkNonScalarSentinel"); const args: [2]Air.Inst.Ref = .{ expected_sentinel, actual_sentinel }; - _ = try sema.analyzeCall(parent_block, panic_fn, src, src, .auto, false, false, &args, null); + _ = try sema.analyzeCall(parent_block, panic_fn, src, src, .auto, false, &args, null); return; }; const gpa = sema.gpa; @@ -22001,7 +22108,7 @@ fn panicSentinelMismatch( } else { const panic_fn = try sema.getBuiltin(&fail_block, src, "panicSentinelMismatch"); const args: [2]Air.Inst.Ref = .{ expected_sentinel, actual_sentinel }; - _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, false, &args, null); + _ = try sema.analyzeCall(&fail_block, panic_fn, src, src, .auto, false, &args, null); } } try sema.addSafetyCheckExtra(parent_block, ok, &fail_block); diff --git a/test/stack_traces.zig b/test/stack_traces.zig index 1494f86104..24c2b16373 100644 --- a/test/stack_traces.zig +++ b/test/stack_traces.zig @@ -260,6 +260,75 @@ pub fn addCases(cases: *tests.StackTracesContext) void { }, }); + cases.addCase(.{ + .name = "error passed to function has its trace preserved for duration of the call", + .source = + \\pub fn expectError(expected_error: anyerror, actual_error: anyerror!void) !void { + \\ actual_error catch |err| { + \\ if (err == expected_error) return {}; + \\ }; + \\ return error.TestExpectedError; + \\} + \\ + \\fn alwaysErrors() !void { return error.ThisErrorShouldNotAppearInAnyTrace; } + \\fn foo() !void { return error.Foo; } + \\ + \\pub fn main() !void { + \\ try expectError(error.ThisErrorShouldNotAppearInAnyTrace, alwaysErrors()); + \\ try expectError(error.ThisErrorShouldNotAppearInAnyTrace, alwaysErrors()); + \\ try expectError(error.Foo, foo()); + \\ + \\ // Only the error trace for this failing check should appear: + \\ try expectError(error.Bar, foo()); + \\} + , + .Debug = .{ + .expect = + \\error: TestExpectedError + \\source.zig:9:18: [address] in foo (test) + \\fn foo() !void { return error.Foo; } + \\ ^ + \\source.zig:5:5: [address] in expectError (test) + \\ return error.TestExpectedError; + \\ ^ + \\source.zig:17:5: [address] in main (test) + \\ try expectError(error.Bar, foo()); + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + }, + .expect = + \\error: TestExpectedError + \\source.zig:9:18: [address] in [function] + \\fn foo() !void { return error.Foo; } + \\ ^ + \\source.zig:5:5: [address] in [function] + \\ return error.TestExpectedError; + \\ ^ + \\source.zig:17:5: [address] in [function] + \\ try expectError(error.Bar, foo()); + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: TestExpectedError + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: TestExpectedError + \\ + , + }, + }); + cases.addCase(.{ .name = "try return from within catch", .source = From 597ead5318421befba3619fed389820d241ecc78 Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Fri, 23 Sep 2022 14:40:55 -0700 Subject: [PATCH 08/12] stage2: Fix usage of getError() Despite the old doc-comment, this function cannot be valid for all types since it operates with only a value and Error (Union) types have overlapping Value representations with other Types. --- src/Sema.zig | 1 + src/value.zig | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/Sema.zig b/src/Sema.zig index 70cff08e29..6daedd12cc 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -11107,6 +11107,7 @@ fn maybeErrorUnwrapCondbr(sema: *Sema, block: *Block, body: []const Zir.Inst.Ind return; } if (try sema.resolveDefinedValue(block, cond_src, err_operand)) |val| { + if (!operand_ty.isError()) return; if (val.getError() == null) return; try sema.maybeErrorUnwrapComptime(block, body, err_operand); } diff --git a/src/value.zig b/src/value.zig index ee5b357a70..d24c5a1c17 100644 --- a/src/value.zig +++ b/src/value.zig @@ -2971,9 +2971,10 @@ pub const Value = extern union { }; } - /// Valid for all types. Asserts the value is not undefined and not unreachable. - /// Prefer `errorUnionIsPayload` to find out whether something is an error or not - /// because it works without having to figure out the string. + /// Valid only for error (union) types. Asserts the value is not undefined and not + /// unreachable. For error unions, prefer `errorUnionIsPayload` to find out whether + /// something is an error or not because it works without having to figure out the + /// string. pub fn getError(self: Value) ?[]const u8 { return switch (self.tag()) { .@"error" => self.castTag(.@"error").?.data.name, From d060cbbec75ac7b0204c706e4dfdfb38f1b24dfd Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Sun, 25 Sep 2022 19:51:38 -0700 Subject: [PATCH 09/12] stage2: Keep error return traces alive when storing to `const` This change extends the "lifetime" of the error return trace associated with an error to continue throughout the block of a `const` variable that it is assigned to. This is necessary to support patterns like this one in test_runner.zig: ```zig const result = foo(); if (result) |_| { // ... success logic } else |err| { // `foo()` should be included in the error trace here return error.TestFailed; } ``` To make this happen, the majority of the error return trace popping logic needed to move into Sema, since `const x = foo();` cannot be examined syntactically to determine whether it modifies the error return trace. We also have to make sure not to delete pertinent block information before it makes it to Sema, so that Sema can pop/restore around blocks correctly. * Why do this only for `const` and not `var`? * There is room to relax things for `var`, but only a little bit. We could do the same thing we do for const and keep the error trace alive for the remainder of the block where the *assignment* happens. Any wider scope would violate the stack discipline for traces, so it's not viable. In the end, I decided the most consistent behavior for the user is just to kill all error return traces assigned to a mutable `var`. --- lib/test_runner.zig | 35 ++-- src/Air.zig | 2 +- src/AstGen.zig | 346 ++++++++++++++++++++++------------------ src/Module.zig | 7 + src/Sema.zig | 225 +++++++++++++++++--------- src/Zir.zig | 23 ++- src/print_zir.zig | 21 ++- test/behavior/error.zig | 13 ++ test/stack_traces.zig | 184 ++++++++++++++++++++- 9 files changed, 601 insertions(+), 255 deletions(-) diff --git a/lib/test_runner.zig b/lib/test_runner.zig index 8cde13f9a6..aafaf1b073 100644 --- a/lib/test_runner.zig +++ b/lib/test_runner.zig @@ -44,24 +44,23 @@ pub fn main() void { if (!have_tty) { std.debug.print("{d}/{d} {s}... ", .{ i + 1, test_fn_list.len, test_fn.name }); } - if (result: { - if (test_fn.async_frame_size) |size| switch (io_mode) { - .evented => { - if (async_frame_buffer.len < size) { - std.heap.page_allocator.free(async_frame_buffer); - async_frame_buffer = std.heap.page_allocator.alignedAlloc(u8, std.Target.stack_align, size) catch @panic("out of memory"); - } - const casted_fn = @ptrCast(fn () callconv(.Async) anyerror!void, test_fn.func); - break :result await @asyncCall(async_frame_buffer, {}, casted_fn, .{}); - }, - .blocking => { - skip_count += 1; - test_node.end(); - progress.log("SKIP (async test)\n", .{}); - continue; - }, - } else break :result test_fn.func(); - }) |_| { + const result = if (test_fn.async_frame_size) |size| switch (io_mode) { + .evented => blk: { + if (async_frame_buffer.len < size) { + std.heap.page_allocator.free(async_frame_buffer); + async_frame_buffer = std.heap.page_allocator.alignedAlloc(u8, std.Target.stack_align, size) catch @panic("out of memory"); + } + const casted_fn = @ptrCast(fn () callconv(.Async) anyerror!void, test_fn.func); + break :blk await @asyncCall(async_frame_buffer, {}, casted_fn, .{}); + }, + .blocking => { + skip_count += 1; + test_node.end(); + progress.log("SKIP (async test)\n", .{}); + continue; + }, + } else test_fn.func(); + if (result) |_| { ok_count += 1; test_node.end(); if (!have_tty) std.debug.print("OK\n", .{}); diff --git a/src/Air.zig b/src/Air.zig index 1cd5d85d75..3bcbdb8e98 100644 --- a/src/Air.zig +++ b/src/Air.zig @@ -734,7 +734,7 @@ pub const Inst = struct { addrspace_cast, /// Saves the error return trace index, if any. Otherwise, returns 0. - /// Uses the `ty_op` field. + /// Uses the `ty_pl` field. save_err_return_trace_index, pub fn fromCmpOp(op: std.math.CompareOperator, optimized: bool) Tag { diff --git a/src/AstGen.zig b/src/AstGen.zig index 0250abfdbc..07a972eaab 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -337,6 +337,8 @@ pub const ResultInfo = struct { shift_op, /// The expression is an argument in a function call. fn_arg, + /// The expression is the right-hand side of an initializer for a `const` variable + const_init, /// No specific operator in particular. none, }; @@ -1850,6 +1852,45 @@ fn comptimeExprAst( return result; } +/// Restore the error return trace index. Performs the restore only if the result is a non-error or +/// if the result location is a non-error-handling expression. +fn restoreErrRetIndex( + gz: *GenZir, + bt: GenZir.BranchTarget, + ri: ResultInfo, + node: Ast.Node.Index, + result: Zir.Inst.Ref, +) !void { + const op = switch (nodeMayEvalToError(gz.astgen.tree, node)) { + .always => return, // never restore/pop + .never => .none, // always restore/pop + .maybe => switch (ri.ctx) { + .error_handling_expr, .@"return", .fn_arg, .const_init => switch (ri.rl) { + .ptr => |ptr_res| try gz.addUnNode(.load, ptr_res.inst, node), + .inferred_ptr => |ptr| try gz.addUnNode(.load, ptr, node), + .block_ptr => |block_scope| if (block_scope.rvalue_rl_count != block_scope.break_count) b: { + // The result location may have been used by this expression, in which case + // the operand is not the result and we need to load the rl ptr. + switch (gz.astgen.instructions.items(.tag)[Zir.refToIndex(block_scope.rl_ptr).?]) { + .alloc_inferred, .alloc_inferred_mut => { + // This is a terrible workaround for Sema's inability to load from a .alloc_inferred ptr + // before its type has been resolved. The operand we use here instead is not guaranteed + // to be valid, and when it's not, we will pop error traces prematurely. + // + // TODO: Update this to do a proper load from the rl_ptr, once Sema can support it. + break :b result; + }, + else => break :b try gz.addUnNode(.load, block_scope.rl_ptr, node), + } + } else result, + else => result, + }, + else => .none, // always restore/pop + }, + }; + _ = try gz.addRestoreErrRetIndex(bt, .{ .if_non_error = op }); +} + fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref { const astgen = parent_gz.astgen; const tree = astgen.tree; @@ -1857,13 +1898,6 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn const break_label = node_datas[node].lhs; const rhs = node_datas[node].rhs; - // Breaking out of a `catch { ... }` or `else |err| { ... }` block with a non-error value - // means that the corresponding error was correctly handled, and the error trace index - // needs to be restored so that any entries from the caught error are effectively "popped" - // - // Note: We only restore for the outermost block, since that will "pop" any nested blocks. - var err_trace_index_to_restore: Zir.Inst.Ref = .none; - // Look for the label in the scope. var scope = parent_scope; while (true) { @@ -1882,11 +1916,6 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn }); } - if (block_gz.saved_err_trace_index != .none) { - // We are breaking out of a `catch { ... }` or `else |err| { ... }`. - err_trace_index_to_restore = block_gz.saved_err_trace_index; - } - const block_inst = blk: { if (break_label != 0) { if (block_gz.label) |*label| { @@ -1913,10 +1942,8 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn try genDefers(parent_gz, scope, parent_scope, .normal_only); // As our last action before the break, "pop" the error trace if needed - if (err_trace_index_to_restore != .none) { - // void is a non-error so we always pop - no need to call `popErrorReturnTrace` - _ = try parent_gz.addUnNode(.restore_err_ret_index, err_trace_index_to_restore, node); - } + if (!block_gz.force_comptime) + _ = try parent_gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always); _ = try parent_gz.addBreak(break_tag, block_inst, .void_value); return Zir.Inst.Ref.unreachable_value; @@ -1929,17 +1956,8 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn try genDefers(parent_gz, scope, parent_scope, .normal_only); // As our last action before the break, "pop" the error trace if needed - if (err_trace_index_to_restore != .none) { - // Pop the error trace, unless the operand is an error and breaking to an error-handling expr. - try popErrorReturnTrace( - parent_gz, - scope, - block_gz.break_result_info, - rhs, - operand, - err_trace_index_to_restore, - ); - } + if (!block_gz.force_comptime) + try restoreErrRetIndex(parent_gz, .{ .block = block_inst }, block_gz.break_result_info, rhs, operand); switch (block_gz.break_result_info.rl) { .block_ptr => { @@ -2066,8 +2084,34 @@ fn blockExpr( return labeledBlockExpr(gz, scope, ri, block_node, statements); } - var sub_gz = gz.makeSubBlock(scope); - try blockExprStmts(&sub_gz, &sub_gz.base, statements); + if (!gz.force_comptime) { + // Since this block is unlabeled, its control flow is effectively linear and we + // can *almost* get away with inlining the block here. However, we actually need + // to preserve the .block for Sema, to properly pop the error return trace. + + const block_tag: Zir.Inst.Tag = .block; + const block_inst = try gz.makeBlockInst(block_tag, block_node); + try gz.instructions.append(astgen.gpa, block_inst); + + var block_scope = gz.makeSubBlock(scope); + defer block_scope.unstack(); + + try blockExprStmts(&block_scope, &block_scope.base, statements); + + if (!block_scope.endsWithNoReturn()) { + // As our last action before the break, "pop" the error trace if needed + _ = try gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always); + + const break_tag: Zir.Inst.Tag = if (block_scope.force_comptime) .break_inline else .@"break"; + _ = try block_scope.addBreak(break_tag, block_inst, .void_value); + } + + try block_scope.setBlockBody(block_inst); + } else { + var sub_gz = gz.makeSubBlock(scope); + try blockExprStmts(&sub_gz, &sub_gz.base, statements); + } + return rvalue(gz, ri, .void_value, block_node); } @@ -2141,6 +2185,9 @@ fn labeledBlockExpr( try blockExprStmts(&block_scope, &block_scope.base, statements); if (!block_scope.endsWithNoReturn()) { + // As our last action before the return, "pop" the error trace if needed + _ = try gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always); + const break_tag: Zir.Inst.Tag = if (block_scope.force_comptime) .break_inline else .@"break"; _ = try block_scope.addBreak(break_tag, block_inst, .void_value); } @@ -2164,7 +2211,8 @@ fn labeledBlockExpr( return indexToRef(block_inst); }, .break_operand => { - // All break operands are values that did not use the result location pointer. + // All break operands are values that did not use the result location pointer + // (except for a single .store_to_block_ptr inst which we re-write here). // The break instructions need to have their operands coerced if the // block's result location is a `ty`. In this case we overwrite the // `store_to_block_ptr` instruction with an `as` instruction and repurpose @@ -2528,7 +2576,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As .try_ptr, //.try_inline, //.try_ptr_inline, - .save_err_ret_index, => break :b false, .extended => switch (gz.astgen.instructions.items(.data)[inst].extended.opcode) { @@ -2591,6 +2638,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As .validate_array_init_ty, .validate_struct_init_ty, .validate_deref, + .save_err_ret_index, .restore_err_ret_index, => break :b true, @@ -2877,7 +2925,8 @@ fn varDecl( { const result_info: ResultInfo = if (type_node != 0) .{ .rl = .{ .ty = try typeExpr(gz, scope, type_node) }, - } else .{ .rl = .none }; + .ctx = .const_init, + } else .{ .rl = .none, .ctx = .const_init }; const prev_anon_name_strategy = gz.anon_name_strategy; gz.anon_name_strategy = .dbg_var; const init_inst = try reachableExpr(gz, scope, result_info, var_decl.ast.init_node, node); @@ -2885,6 +2934,11 @@ fn varDecl( try gz.addDbgVar(.dbg_var_val, ident_name, init_inst); + // The const init expression may have modified the error return trace, so signal + // to Sema that it should save the new index for restoring later. + if (nodeMayAppendToErrorTrace(tree, var_decl.ast.init_node)) + _ = try gz.addSaveErrRetIndex(.{ .if_of_error_type = init_inst }); + const sub_scope = try block_arena.create(Scope.LocalVal); sub_scope.* = .{ .parent = scope, @@ -2950,9 +3004,14 @@ fn varDecl( init_scope.rl_ptr = alloc; init_scope.rl_ty_inst = .none; } - const init_result_info: ResultInfo = .{ .rl = .{ .block_ptr = &init_scope } }; + const init_result_info: ResultInfo = .{ .rl = .{ .block_ptr = &init_scope }, .ctx = .const_init }; const init_inst = try reachableExpr(&init_scope, &init_scope.base, init_result_info, var_decl.ast.init_node, node); + // The const init expression may have modified the error return trace, so signal + // to Sema that it should save the new index for restoring later. + if (nodeMayAppendToErrorTrace(tree, var_decl.ast.init_node)) + _ = try init_scope.addSaveErrRetIndex(.{ .if_of_error_type = init_inst }); + const zir_tags = astgen.instructions.items(.tag); const zir_datas = astgen.instructions.items(.data); @@ -3775,6 +3834,9 @@ fn fnDecl( try checkUsed(gz, &fn_gz.base, params_scope); if (!fn_gz.endsWithNoReturn()) { + // As our last action before the return, "pop" the error trace if needed + _ = try gz.addRestoreErrRetIndex(.ret, .always); + // Since we are adding the return instruction here, we must handle the coercion. // We do this by using the `ret_tok` instruction. _ = try fn_gz.addUnTok(.ret_tok, .void_value, tree.lastToken(body_node)); @@ -4217,6 +4279,10 @@ fn testDecl( const block_result = try expr(&fn_block, &fn_block.base, .{ .rl = .none }, body_node); if (fn_block.isEmpty() or !fn_block.refIsNoReturn(block_result)) { + + // As our last action before the return, "pop" the error trace if needed + _ = try gz.addRestoreErrRetIndex(.ret, .always); + // Since we are adding the return instruction here, we must handle the coercion. // We do this by using the `ret_tok` instruction. _ = try fn_block.addUnTok(.ret_tok, .void_value, tree.lastToken(body_node)); @@ -5196,76 +5262,6 @@ fn tryExpr( } } -/// Pops the error return trace, unless: -/// 1. the result is a non-error, AND -/// 2. the result location corresponds to an error-handling expression -/// -/// For reference, the full list of error-handling expressions is: -/// - try X -/// - X catch ... -/// - if (X) |_| { ... } |_| { ... } -/// - return X -/// -fn popErrorReturnTrace( - gz: *GenZir, - scope: *Scope, - ri: ResultInfo, - node: Ast.Node.Index, - result_inst: Zir.Inst.Ref, - error_trace_index: Zir.Inst.Ref, -) InnerError!void { - const astgen = gz.astgen; - const tree = astgen.tree; - - const result_is_err = nodeMayEvalToError(tree, node); - - // If we are breaking to a try/catch/error-union-if/return or a function call, the error trace propagates. - const propagate_error_trace = switch (ri.ctx) { - .error_handling_expr, .@"return", .fn_arg => true, - else => false, - }; - - if (result_is_err == .never or !propagate_error_trace) { - // We are returning a non-error, or returning to a non-error-handling operator. - // In either case, we need to pop the error trace. - _ = try gz.addUnNode(.restore_err_ret_index, error_trace_index, node); - } else if (result_is_err == .maybe) { - // We are returning to an error-handling operator with a maybe-error. - // Restore only if it's a non-error, implying the catch was successfully handled. - var block_scope = gz.makeSubBlock(scope); - block_scope.setBreakResultInfo(.{ .rl = .discard }); - defer block_scope.unstack(); - - // Emit conditional branch for restoring error trace index - const is_non_err = switch (ri.rl) { - .ref => try block_scope.addUnNode(.is_non_err_ptr, result_inst, node), - .ptr => |ptr| try block_scope.addUnNode(.is_non_err_ptr, ptr.inst, node), - .ty, .none => try block_scope.addUnNode(.is_non_err, result_inst, node), - else => unreachable, // Error-handling operators only generate the above result locations - }; - const condbr = try block_scope.addCondBr(.condbr, node); - - const block = try gz.makeBlockInst(.block, node); - try block_scope.setBlockBody(block); - // block_scope unstacked now, can add new instructions to gz - - try gz.instructions.append(astgen.gpa, block); - - var then_scope = block_scope.makeSubBlock(scope); - defer then_scope.unstack(); - - _ = try then_scope.addUnNode(.restore_err_ret_index, error_trace_index, node); - const then_break = try then_scope.makeBreak(.@"break", block, .void_value); - - var else_scope = block_scope.makeSubBlock(scope); - defer else_scope.unstack(); - - const else_break = try else_scope.makeBreak(.@"break", block, .void_value); - - try setCondBrPayload(condbr, is_non_err, &then_scope, then_break, &else_scope, else_break); - } -} - fn orelseCatchExpr( parent_gz: *GenZir, scope: *Scope, @@ -5287,8 +5283,6 @@ fn orelseCatchExpr( block_scope.setBreakResultInfo(ri); defer block_scope.unstack(); - const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none; - const operand_ri: ResultInfo = switch (block_scope.break_result_info.rl) { .ref => .{ .rl = .ref, .ctx = if (do_err_trace) .error_handling_expr else .none }, else => .{ .rl = .none, .ctx = if (do_err_trace) .error_handling_expr else .none }, @@ -5320,11 +5314,10 @@ fn orelseCatchExpr( var else_scope = block_scope.makeSubBlock(scope); defer else_scope.unstack(); - // Any break (of a non-error value) that navigates out of this scope means - // that the error was handled successfully, so this index will be restored. - else_scope.saved_err_trace_index = saved_err_trace_index; - if (else_scope.outermost_err_trace_index == .none) - else_scope.outermost_err_trace_index = saved_err_trace_index; + // We know that the operand (almost certainly) modified the error return trace, + // so signal to Sema that it should save the new index for restoring later. + if (do_err_trace and nodeMayAppendToErrorTrace(tree, lhs)) + _ = try else_scope.addSaveErrRetIndex(.always); var err_val_scope: Scope.LocalVal = undefined; const else_sub_scope = blk: { @@ -5352,16 +5345,9 @@ fn orelseCatchExpr( if (!else_scope.endsWithNoReturn()) { block_scope.break_count += 1; - if (do_err_trace) { - try popErrorReturnTrace( - &else_scope, - else_sub_scope, - block_scope.break_result_info, - rhs, - else_result, - saved_err_trace_index, - ); - } + // As our last action before the break, "pop" the error trace if needed + if (do_err_trace) + try restoreErrRetIndex(&else_scope, .{ .block = block }, block_scope.break_result_info, rhs, else_result); } try checkUsed(parent_gz, &else_scope.base, else_sub_scope); @@ -5587,8 +5573,6 @@ fn ifExpr( block_scope.setBreakResultInfo(ri); defer block_scope.unstack(); - const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none; - const payload_is_ref = if (if_full.payload_token) |payload_token| token_tags[payload_token] == .asterisk else @@ -5705,11 +5689,10 @@ fn ifExpr( var else_scope = parent_gz.makeSubBlock(scope); defer else_scope.unstack(); - // Any break (of a non-error value) that navigates out of this scope means - // that the error was handled successfully, so this index will be restored. - else_scope.saved_err_trace_index = saved_err_trace_index; - if (else_scope.outermost_err_trace_index == .none) - else_scope.outermost_err_trace_index = saved_err_trace_index; + // We know that the operand (almost certainly) modified the error return trace, + // so signal to Sema that it should save the new index for restoring later. + if (do_err_trace and nodeMayAppendToErrorTrace(tree, if_full.ast.cond_expr)) + _ = try else_scope.addSaveErrRetIndex(.always); const else_node = if_full.ast.else_expr; const else_info: struct { @@ -5747,16 +5730,9 @@ fn ifExpr( if (!else_scope.endsWithNoReturn()) { block_scope.break_count += 1; - if (do_err_trace) { - try popErrorReturnTrace( - &else_scope, - sub_scope, - block_scope.break_result_info, - else_node, - e, - saved_err_trace_index, - ); - } + // As our last action before the break, "pop" the error trace if needed + if (do_err_trace) + try restoreErrRetIndex(&else_scope, .{ .block = block }, block_scope.break_result_info, else_node, e); } try checkUsed(parent_gz, &else_scope.base, sub_scope); try else_scope.addDbgBlockEnd(); @@ -6886,6 +6862,10 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref if (operand_node == 0) { // Returning a void value; skip error defers. try genDefers(gz, defer_outer, scope, .normal_only); + + // As our last action before the return, "pop" the error trace if needed + _ = try gz.addRestoreErrRetIndex(.ret, .always); + _ = try gz.addUnNode(.ret_node, .void_value, node); return Zir.Inst.Ref.unreachable_value; } @@ -6921,15 +6901,13 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref const operand = try reachableExpr(gz, scope, ri, operand_node, node); gz.anon_name_strategy = prev_anon_name_strategy; - // TODO: This should be almost identical for every break/ret switch (nodeMayEvalToError(tree, operand_node)) { .never => { // Returning a value that cannot be an error; skip error defers. try genDefers(gz, defer_outer, scope, .normal_only); // As our last action before the return, "pop" the error trace if needed - if (gz.outermost_err_trace_index != .none) - _ = try gz.addUnNode(.restore_err_ret_index, gz.outermost_err_trace_index, node); + _ = try gz.addRestoreErrRetIndex(.ret, .always); try emitDbgStmt(gz, ret_line, ret_column); try gz.addRet(ri, operand, node); @@ -6949,6 +6927,11 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref // Only regular defers; no branch needed. try genDefers(gz, defer_outer, scope, .normal_only); try emitDbgStmt(gz, ret_line, ret_column); + + // As our last action before the return, "pop" the error trace if needed + const result = if (ri.rl == .ptr) try gz.addUnNode(.load, ri.rl.ptr.inst, node) else operand; + _ = try gz.addRestoreErrRetIndex(.ret, .{ .if_non_error = result }); + try gz.addRet(ri, operand, node); return Zir.Inst.Ref.unreachable_value; } @@ -6964,8 +6947,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref try genDefers(&then_scope, defer_outer, scope, .normal_only); // As our last action before the return, "pop" the error trace if needed - if (then_scope.outermost_err_trace_index != .none) - _ = try then_scope.addUnNode(.restore_err_ret_index, then_scope.outermost_err_trace_index, node); + _ = try then_scope.addRestoreErrRetIndex(.ret, .always); try emitDbgStmt(&then_scope, ret_line, ret_column); try then_scope.addRet(ri, operand, node); @@ -8561,10 +8543,11 @@ fn callExpr( scratch_index += 1; } - // If our result location is a try/catch/error-union-if/return, the error trace propagates. + // If our result location is a try/catch/error-union-if/return, a function argument, + // or an initializer for a `const` variable, the error trace propagates. // Otherwise, it should always be popped (handled in Sema). const propagate_error_trace = switch (ri.ctx) { - .error_handling_expr, .@"return", .fn_arg => true, // Propagate to try/catch/error-union-if, return, and other function calls + .error_handling_expr, .@"return", .fn_arg, .const_init => true, else => false, }; @@ -8932,6 +8915,33 @@ fn nodeMayNeedMemoryLocation(tree: *const Ast, start_node: Ast.Node.Index, have_ } } +fn nodeMayAppendToErrorTrace(tree: *const Ast, start_node: Ast.Node.Index) bool { + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + + var node = start_node; + while (true) { + switch (node_tags[node]) { + // These don't have the opportunity to call any runtime functions. + .error_value, + .identifier, + .@"comptime", + => return false, + + // Forward the question to the LHS sub-expression. + .grouped_expression, + .@"try", + .@"nosuspend", + .unwrap_optional, + => node = node_datas[node].lhs, + + // Anything that does not eval to an error is guaranteed to pop any + // additions to the error trace, so it effectively does not append. + else => return nodeMayEvalToError(tree, start_node) != .never, + } + } +} + fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.EvalToError { const node_tags = tree.nodes.items(.tag); const node_datas = tree.nodes.items(.data); @@ -10494,13 +10504,6 @@ const GenZir = struct { /// Keys are the raw instruction index, values are the closure_capture instruction. captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{}, - /// If this GenZir corresponds to a `catch { ... }` or `else |err| { ... }` block, - /// this err_trace_index can be restored to "pop" the trace entries for the block. - saved_err_trace_index: Zir.Inst.Ref = .none, - /// When returning from a function with a non-error, we must pop all trace entries - /// from any containing `catch { ... }` or `else |err| { ... }` blocks. - outermost_err_trace_index: Zir.Inst.Ref = .none, - const unstacked_top = std.math.maxInt(usize); /// Call unstack before adding any new instructions to containing GenZir. fn unstack(self: *GenZir) void { @@ -10545,7 +10548,6 @@ const GenZir = struct { .any_defer_node = gz.any_defer_node, .instructions = gz.instructions, .instructions_top = gz.instructions.items.len, - .outermost_err_trace_index = gz.outermost_err_trace_index, }; } @@ -11359,6 +11361,46 @@ const GenZir = struct { }); } + fn addSaveErrRetIndex( + gz: *GenZir, + cond: union(enum) { + always: void, + if_of_error_type: Zir.Inst.Ref, + }, + ) !Zir.Inst.Index { + return gz.addAsIndex(.{ + .tag = .save_err_ret_index, + .data = .{ .save_err_ret_index = .{ + .operand = if (cond == .if_of_error_type) cond.if_of_error_type else .none, + } }, + }); + } + + const BranchTarget = union(enum) { + ret, + block: Zir.Inst.Index, + }; + + fn addRestoreErrRetIndex( + gz: *GenZir, + bt: BranchTarget, + cond: union(enum) { + always: void, + if_non_error: Zir.Inst.Ref, + }, + ) !Zir.Inst.Index { + return gz.addAsIndex(.{ + .tag = .restore_err_ret_index, + .data = .{ .restore_err_ret_index = .{ + .block = switch (bt) { + .ret => .none, + .block => |b| Zir.indexToRef(b), + }, + .operand = if (cond == .if_non_error) cond.if_non_error else .none, + } }, + }); + } + fn addBreak( gz: *GenZir, tag: Zir.Inst.Tag, diff --git a/src/Module.zig b/src/Module.zig index 8483c41ae8..c23014f92a 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -5633,6 +5633,13 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air { const last_arg_index = inner_block.instructions.items.len; + // Save the error trace as our first action in the function. + // If this is unnecessary after all, Liveness will clean it up for us. + const err_ret_trace_index = try sema.analyzeSaveErrRetIndex(&inner_block); + inner_block.error_return_trace_index = err_ret_trace_index; + inner_block.error_return_trace_index_on_block_entry = err_ret_trace_index; + inner_block.error_return_trace_index_on_function_entry = err_ret_trace_index; + sema.analyzeBody(&inner_block, fn_info.body) catch |err| switch (err) { // TODO make these unreachable instead of @panic error.NeededSourceLocation => @panic("zig compiler bug: NeededSourceLocation"), diff --git a/src/Sema.zig b/src/Sema.zig index 6daedd12cc..12bf8d0404 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -153,6 +153,12 @@ pub const Block = struct { is_typeof: bool = false, is_coerce_result_ptr: bool = false, + /// Keep track of the active error return trace index around blocks so that we can correctly + /// pop the error trace upon block exit. + error_return_trace_index: Air.Inst.Ref = .none, + error_return_trace_index_on_block_entry: Air.Inst.Ref = .none, + error_return_trace_index_on_function_entry: Air.Inst.Ref = .none, + /// when null, it is determined by build mode, changed by @setRuntimeSafety want_safety: ?bool = null, @@ -226,6 +232,9 @@ pub const Block = struct { .float_mode = parent.float_mode, .c_import_buf = parent.c_import_buf, .switch_else_err_ty = parent.switch_else_err_ty, + .error_return_trace_index = parent.error_return_trace_index, + .error_return_trace_index_on_block_entry = parent.error_return_trace_index, + .error_return_trace_index_on_function_entry = parent.error_return_trace_index_on_function_entry, }; } @@ -945,8 +954,6 @@ fn analyzeBodyInner( .ret_ptr => try sema.zirRetPtr(block, inst), .ret_type => try sema.addType(sema.fn_ret_ty), - .save_err_ret_index => try sema.zirSaveErrRetIndex(block, inst), - // Instructions that we know to *always* be noreturn based solely on their tag. // These functions match the return type of analyzeBody so that we can // tail call them here. @@ -1229,6 +1236,11 @@ fn analyzeBodyInner( i += 1; continue; }, + .save_err_ret_index => { + try sema.zirSaveErrRetIndex(block, inst); + i += 1; + continue; + }, .restore_err_ret_index => { try sema.zirRestoreErrRetIndex(block, inst); i += 1; @@ -1326,31 +1338,32 @@ fn analyzeBodyInner( const extra = sema.code.extraData(Zir.Inst.Block, inst_data.payload_index); const inline_body = sema.code.extra[extra.end..][0..extra.data.body_len]; const gpa = sema.gpa; - // If this block contains a function prototype, we need to reset the - // current list of parameters and restore it later. - // Note: this probably needs to be resolved in a more general manner. - const prev_params = block.params; - const need_sub_block = tags[inline_body[inline_body.len - 1]] == .repeat_inline; - var sub_block = block; - var block_space: Block = undefined; - // NOTE: this has to be done like this because branching in - // defers here breaks stage1. - block_space.instructions = .{}; - if (need_sub_block) { - block_space = block.makeSubBlock(); - block_space.inline_block = inline_body[0]; - sub_block = &block_space; - } - block.params = .{}; - defer { - block.params.deinit(gpa); - block.params = prev_params; - block_space.instructions.deinit(gpa); - } - const opt_break_data = try sema.analyzeBodyBreak(sub_block, inline_body); - if (need_sub_block) { - try block.instructions.appendSlice(gpa, block_space.instructions.items); - } + + const opt_break_data = b: { + // Create a temporary child block so that this inline block is properly + // labeled for any .restore_err_ret_index instructions + var child_block = block.makeSubBlock(); + + // If this block contains a function prototype, we need to reset the + // current list of parameters and restore it later. + // Note: this probably needs to be resolved in a more general manner. + if (tags[inline_body[inline_body.len - 1]] == .repeat_inline) { + child_block.inline_block = inline_body[0]; + } else child_block.inline_block = block.inline_block; + + var label: Block.Label = .{ + .zir_block = inst, + .merges = undefined, + }; + child_block.label = &label; + defer child_block.params.deinit(gpa); + + // Write these instructions directly into the parent block + child_block.instructions = block.instructions; + defer block.instructions = child_block.instructions; + + break :b try sema.analyzeBodyBreak(&child_block, inline_body); + }; // A runtime conditional branch that needs a post-hoc block to be // emitted communicates this by mapping the block index into the inst map. @@ -4994,7 +5007,7 @@ fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErro // Reserve space for a Block instruction so that generated Break instructions can // point to it, even if it doesn't end up getting used because the code ends up being - // comptime evaluated. + // comptime evaluated or is an unlabeled block. const block_inst = @intCast(Air.Inst.Index, sema.air_instructions.len); try sema.air_instructions.append(gpa, .{ .tag = .block, @@ -5025,6 +5038,9 @@ fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErro .runtime_cond = parent_block.runtime_cond, .runtime_loop = parent_block.runtime_loop, .runtime_index = parent_block.runtime_index, + .error_return_trace_index = parent_block.error_return_trace_index, + .error_return_trace_index_on_block_entry = parent_block.error_return_trace_index, + .error_return_trace_index_on_function_entry = parent_block.error_return_trace_index_on_function_entry, }; defer child_block.instructions.deinit(gpa); @@ -5667,19 +5683,51 @@ fn funcDeclSrc(sema: *Sema, block: *Block, src: LazySrcLoc, func_inst: Air.Inst. return owner_decl.srcLoc(); } +pub fn analyzeSaveErrRetIndex(sema: *Sema, block: *Block) SemaError!Air.Inst.Ref { + const src = sema.src; + + const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; + if (!backend_supports_error_return_tracing or !sema.mod.comp.bin_file.options.error_return_tracing) + return .none; + + if (block.is_comptime) + return .none; + + const unresolved_stack_trace_ty = sema.getBuiltinType(block, src, "StackTrace") catch |err| switch (err) { + error.NeededSourceLocation, error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable, + else => |e| return e, + }; + const stack_trace_ty = sema.resolveTypeFields(block, src, unresolved_stack_trace_ty) catch |err| switch (err) { + error.NeededSourceLocation, error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable, + else => |e| return e, + }; + const field_index = sema.structFieldIndex(block, stack_trace_ty, "index", src) catch |err| switch (err) { + error.NeededSourceLocation, error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable, + else => |e| return e, + }; + + return try block.addInst(.{ + .tag = .save_err_return_trace_index, + .data = .{ .ty_pl = .{ + .ty = try sema.addType(stack_trace_ty), + .payload = @intCast(u32, field_index), + } }, + }); +} + /// Add instructions to block to "pop" the error return trace. /// If `operand` is provided, only pops if operand is non-error. fn popErrorReturnTrace( sema: *Sema, block: *Block, src: LazySrcLoc, - operand: ?Air.Inst.Ref, + operand: Air.Inst.Ref, saved_error_trace_index: Air.Inst.Ref, ) CompileError!void { var is_non_error: ?bool = null; var is_non_error_inst: Air.Inst.Ref = undefined; - if (operand) |op| { - is_non_error_inst = try sema.analyzeIsNonErr(block, src, op); + if (operand != .none) { + is_non_error_inst = try sema.analyzeIsNonErr(block, src, operand); if (try sema.resolveDefinedValue(block, src, is_non_error_inst)) |cond_val| is_non_error = cond_val.toBool(); } else is_non_error = true; // no operand means pop unconditionally @@ -5906,7 +5954,7 @@ fn zirCall( }); // Pop the error return trace, testing the result for non-error if necessary - const operand = if (pop_error_return_trace or modifier == .always_tail) null else call_inst; + const operand = if (pop_error_return_trace or modifier == .always_tail) .none else call_inst; try sema.popErrorReturnTrace(block, call_src, operand, save_inst); } @@ -6221,6 +6269,9 @@ fn analyzeCall( .label = null, .inlining = &inlining, .is_comptime = is_comptime_call, + .error_return_trace_index = block.error_return_trace_index, + .error_return_trace_index_on_block_entry = block.error_return_trace_index, + .error_return_trace_index_on_function_entry = block.error_return_trace_index, }; const merges = &child_block.inlining.?.merges; @@ -6966,6 +7017,14 @@ fn instantiateGenericCall( } arg_i += 1; } + + // Save the error trace as our first action in the function. + // If this is unnecessary after all, Liveness will clean it up for us. + const err_ret_trace_index = try sema.analyzeSaveErrRetIndex(&child_block); + child_block.error_return_trace_index = err_ret_trace_index; + child_block.error_return_trace_index_on_block_entry = err_ret_trace_index; + child_block.error_return_trace_index_on_function_entry = err_ret_trace_index; + const new_func_inst = child_sema.resolveBody(&child_block, fn_info.param_body, fn_info.param_body_inst) catch |err| { // TODO look up the compile error that happened here and attach a note to it // pointing here, at the generic instantiation callsite. @@ -9855,6 +9914,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError .defer_err_code, .err_union_code, .ret_err_value_code, + .restore_err_ret_index, .is_non_err, .condbr, => {}, @@ -10157,6 +10217,9 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError .runtime_cond = block.runtime_cond, .runtime_loop = block.runtime_loop, .runtime_index = block.runtime_index, + .error_return_trace_index = block.error_return_trace_index, + .error_return_trace_index_on_block_entry = block.error_return_trace_index, + .error_return_trace_index_on_function_entry = block.error_return_trace_index_on_function_entry, }; const merges = &child_block.label.?.merges; defer child_block.instructions.deinit(gpa); @@ -11040,6 +11103,7 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op const tags = sema.code.instructions.items(.tag); for (body) |inst| { switch (tags[inst]) { + .save_err_ret_index, .dbg_block_begin, .dbg_block_end, .dbg_stmt, @@ -11062,6 +11126,10 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op try sema.zirDbgStmt(block, inst); continue; }, + .save_err_ret_index => { + try sema.zirSaveErrRetIndex(block, inst); + continue; + }, .str => try sema.zirStr(block, inst), .as_node => try sema.zirAsNode(block, inst), .field_val => try sema.zirFieldVal(block, inst), @@ -15672,6 +15740,9 @@ fn zirTypeofBuiltin(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr .is_comptime = false, .is_typeof = true, .want_safety = false, + .error_return_trace_index = block.error_return_trace_index, + .error_return_trace_index_on_block_entry = block.error_return_trace_index, + .error_return_trace_index_on_function_entry = block.error_return_trace_index_on_function_entry, }; defer child_block.instructions.deinit(sema.gpa); @@ -16329,61 +16400,67 @@ fn wantErrorReturnTracing(sema: *Sema, fn_ret_ty: Type) bool { backend_supports_error_return_tracing; } -fn zirSaveErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { - const inst_data = sema.code.instructions.items(.data)[inst].node; - const src = LazySrcLoc.nodeOffset(inst_data); - - // This is only relevant at runtime. - if (block.is_comptime) return Air.Inst.Ref.zero_usize; +fn zirSaveErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void { + const inst_data = sema.code.instructions.items(.data)[inst].save_err_ret_index; const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; - const ok = sema.mod.comp.bin_file.options.error_return_tracing and - backend_supports_error_return_tracing; - if (!ok) return Air.Inst.Ref.zero_usize; - - // This is encoded as a primitive AIR instruction to resolve one corner case: A function - // may include a `catch { ... }` or `else |err| { ... }` block but not call any errorable - // fn. In that case, there is no error return trace to save the index of and codegen needs - // to avoid interacting with the non-existing error trace. - // - // By using a primitive AIR op, we can depend on Liveness to mark this unused in this corner case. - - const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace"); - const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty); - const field_index = try sema.structFieldIndex(block, stack_trace_ty, "index", src); - return block.addInst(.{ - .tag = .save_err_return_trace_index, - .data = .{ .ty_pl = .{ - .ty = try sema.addType(stack_trace_ty), - .payload = @intCast(u32, field_index), - } }, - }); -} - -fn zirRestoreErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void { - const inst_data = sema.code.instructions.items(.data)[inst].un_node; - const src = inst_data.src(); + const ok = backend_supports_error_return_tracing and sema.mod.comp.bin_file.options.error_return_tracing; + if (!ok) return; // This is only relevant at runtime. if (block.is_comptime) return; + // This is only relevant within functions. + if (sema.func == null) return; + + const save_index = inst_data.operand == .none or b: { + const operand = try sema.resolveInst(inst_data.operand); + const operand_ty = sema.typeOf(operand); + break :b operand_ty.isError(); + }; + + if (save_index) + block.error_return_trace_index = try sema.analyzeSaveErrRetIndex(block); +} + +fn zirRestoreErrRetIndex(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) CompileError!void { + const inst_data = sema.code.instructions.items(.data)[inst].restore_err_ret_index; + const src = sema.src; // TODO + + // This is only relevant at runtime. + if (start_block.is_comptime) return; + const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm; const ok = sema.owner_func.?.calls_or_awaits_errorable_fn and sema.mod.comp.bin_file.options.error_return_tracing and backend_supports_error_return_tracing; if (!ok) return; - const operand = if (inst_data.operand != .none) - try sema.resolveInst(inst_data.operand) - else - .zero_usize; + const tracy = trace(@src()); + defer tracy.end(); - const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace"); - const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty); - const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty); - const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty); - const field_ptr = try sema.structFieldPtr(block, src, err_return_trace, "index", src, stack_trace_ty, true); - try sema.storePtr2(block, src, field_ptr, src, operand, src, .store); + const saved_index = if (Zir.refToIndex(inst_data.block)) |zir_block| b: { + var block = start_block; + while (true) { + if (block.label) |label| { + if (label.zir_block == zir_block) { + if (start_block.error_return_trace_index != block.error_return_trace_index_on_block_entry) + break :b block.error_return_trace_index_on_block_entry; + return; // No need to restore + } + } + block = block.parent.?; + } + } else b: { + if (start_block.error_return_trace_index != start_block.error_return_trace_index_on_function_entry) + break :b start_block.error_return_trace_index_on_function_entry; + return; // No need to restore + }; + + assert(saved_index != .none); // The .error_return_trace_index field was dropped somewhere + + const operand = try sema.resolveInst(inst_data.operand); + return sema.popErrorReturnTrace(start_block, src, operand, saved_index); } fn addToInferredErrorSet(sema: *Sema, uncasted_operand: Air.Inst.Ref) !void { diff --git a/src/Zir.zig b/src/Zir.zig index d616ea6d14..b17d6d7fec 100644 --- a/src/Zir.zig +++ b/src/Zir.zig @@ -988,13 +988,13 @@ pub const Inst = struct { /// Uses the `err_defer_code` union field. defer_err_code, - /// Saves the current error return case if it exists, - /// otherwise just returns zero. - /// Uses the `node` union field. + /// Requests that Sema update the saved error return trace index for the enclosing + /// block, if the operand is .none or of an error/error-union type. + /// Uses the `save_err_ret_index` field. save_err_ret_index, /// Sets error return trace to zero if no operand is given, /// otherwise sets the value to the given amount. - /// Uses the `un_node` union field. + /// Uses the `restore_err_ret_index` union field. restore_err_ret_index, /// The ZIR instruction tag is one of the `Extended` ones. @@ -1317,6 +1317,7 @@ pub const Inst = struct { .@"defer", .defer_err_code, .restore_err_ret_index, + .save_err_ret_index, => true, .param, @@ -1542,7 +1543,6 @@ pub const Inst = struct { .try_ptr, //.try_inline, //.try_ptr_inline, - .save_err_ret_index, => false, .extended => switch (data.extended.opcode) { @@ -1823,8 +1823,8 @@ pub const Inst = struct { .@"defer" = .@"defer", .defer_err_code = .defer_err_code, - .save_err_ret_index = .node, - .restore_err_ret_index = .un_node, + .save_err_ret_index = .save_err_ret_index, + .restore_err_ret_index = .restore_err_ret_index, .extended = .extended, }); @@ -2602,6 +2602,13 @@ pub const Inst = struct { err_code: Ref, payload_index: u32, }, + save_err_ret_index: struct { + operand: Ref, // If error type (or .none), save new trace index + }, + restore_err_ret_index: struct { + block: Ref, // If restored, the index is from this block's entrypoint + operand: Ref, // If non-error (or .none), then restore the index + }, // Make sure we don't accidentally add a field to make this union // bigger than expected. Note that in Debug builds, Zig is allowed @@ -2640,6 +2647,8 @@ pub const Inst = struct { str_op, @"defer", defer_err_code, + save_err_ret_index, + restore_err_ret_index, }; }; diff --git a/src/print_zir.zig b/src/print_zir.zig index 5d336b5a73..f1b1068920 100644 --- a/src/print_zir.zig +++ b/src/print_zir.zig @@ -232,7 +232,6 @@ const Writer = struct { .validate_deref, .overflow_arithmetic_ptr, .check_comptime_control_flow, - .restore_err_ret_index, => try self.writeUnNode(stream, inst), .ref, @@ -255,6 +254,9 @@ const Writer = struct { .str => try self.writeStr(stream, inst), .int_type => try self.writeIntType(stream, inst), + .save_err_ret_index => try self.writeSaveErrRetIndex(stream, inst), + .restore_err_ret_index => try self.writeRestoreErrRetIndex(stream, inst), + .@"break", .break_inline, => try self.writeBreak(stream, inst), @@ -406,7 +408,6 @@ const Writer = struct { .alloc_inferred_comptime_mut, .ret_ptr, .ret_type, - .save_err_ret_index, => try self.writeNode(stream, inst), .error_value, @@ -2274,6 +2275,22 @@ const Writer = struct { try self.writeSrc(stream, int_type.src()); } + fn writeSaveErrRetIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + const inst_data = self.code.instructions.items(.data)[inst].save_err_ret_index; + + try self.writeInstRef(stream, inst_data.operand); + try stream.writeAll(")"); + } + + fn writeRestoreErrRetIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + const inst_data = self.code.instructions.items(.data)[inst].restore_err_ret_index; + + try self.writeInstRef(stream, inst_data.block); + try stream.writeAll(", "); + try self.writeInstRef(stream, inst_data.operand); + try stream.writeAll(")"); + } + fn writeBreak(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[inst].@"break"; diff --git a/test/behavior/error.zig b/test/behavior/error.zig index c93d7aebf7..6b64a0dc01 100644 --- a/test/behavior/error.zig +++ b/test/behavior/error.zig @@ -830,3 +830,16 @@ test "compare error union and error set" { try expect(a != b); try expect(b != a); } + +fn non_errorable() void { + // Make sure catch works even in a function that does not call any errorable functions. + // + // This test is needed because stage 2's fix for #1923 means that catch blocks interact + // with the error return trace index. + var x: error{Foo}!void = {}; + return x catch {}; +} + +test "catch within a function that calls no errorable functions" { + non_errorable(); +} diff --git a/test/stack_traces.zig b/test/stack_traces.zig index 24c2b16373..ebd910563b 100644 --- a/test/stack_traces.zig +++ b/test/stack_traces.zig @@ -97,6 +97,59 @@ pub fn addCases(cases: *tests.StackTracesContext) void { , }, }); + cases.addCase(.{ + .name = "non-error return pops error trace", + .source = + \\fn bar() !void { + \\ return error.UhOh; + \\} + \\ + \\fn foo() !void { + \\ bar() catch { + \\ return; // non-error result: success + \\ }; + \\} + \\ + \\pub fn main() !void { + \\ try foo(); + \\ return error.UnrelatedError; + \\} + , + .Debug = .{ + .expect = + \\error: UnrelatedError + \\source.zig:13:5: [address] in main (test) + \\ return error.UnrelatedError; + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + .linux, // defeated by aggressive inlining + }, + .expect = + \\error: UnrelatedError + \\source.zig:13:5: [address] in [function] + \\ return error.UnrelatedError; + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: UnrelatedError + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: UnrelatedError + \\ + , + }, + }); cases.addCase(.{ .name = "try return + handled catch/if-else", @@ -155,6 +208,59 @@ pub fn addCases(cases: *tests.StackTracesContext) void { }, }); + cases.addCase(.{ + .name = "break from inline loop pops error return trace", + .source = + \\fn foo() !void { return error.FooBar; } + \\ + \\pub fn main() !void { + \\ comptime var i: usize = 0; + \\ b: inline while (i < 5) : (i += 1) { + \\ foo() catch { + \\ break :b; // non-error break, success + \\ }; + \\ } + \\ // foo() was successfully handled, should not appear in trace + \\ + \\ return error.BadTime; + \\} + , + .Debug = .{ + .expect = + \\error: BadTime + \\source.zig:12:5: [address] in main (test) + \\ return error.BadTime; + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + .linux, // defeated by aggressive inlining + }, + .expect = + \\error: BadTime + \\source.zig:12:5: [address] in [function] + \\ return error.BadTime; + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: BadTime + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: BadTime + \\ + , + }, + }); + cases.addCase(.{ .name = "catch and re-throw error", .source = @@ -209,7 +315,7 @@ pub fn addCases(cases: *tests.StackTracesContext) void { }); cases.addCase(.{ - .name = "stored errors do not contribute to error trace", + .name = "errors stored in var do not contribute to error trace", .source = \\fn foo() !void { \\ return error.TheSkyIsFalling; @@ -260,6 +366,82 @@ pub fn addCases(cases: *tests.StackTracesContext) void { }, }); + cases.addCase(.{ + .name = "error stored in const has trace preserved for duration of block", + .source = + \\fn foo() !void { return error.TheSkyIsFalling; } + \\fn bar() !void { return error.InternalError; } + \\fn baz() !void { return error.UnexpectedReality; } + \\ + \\pub fn main() !void { + \\ const x = foo(); + \\ const y = b: { + \\ if (true) + \\ break :b bar(); + \\ + \\ break :b {}; + \\ }; + \\ x catch {}; + \\ y catch {}; + \\ // foo()/bar() error traces not popped until end of block + \\ + \\ { + \\ const z = baz(); + \\ z catch {}; + \\ // baz() error trace still alive here + \\ } + \\ // baz() error trace popped, foo(), bar() still alive + \\ return error.StillUnresolved; + \\} + , + .Debug = .{ + .expect = + \\error: StillUnresolved + \\source.zig:1:18: [address] in foo (test) + \\fn foo() !void { return error.TheSkyIsFalling; } + \\ ^ + \\source.zig:2:18: [address] in bar (test) + \\fn bar() !void { return error.InternalError; } + \\ ^ + \\source.zig:23:5: [address] in main (test) + \\ return error.StillUnresolved; + \\ ^ + \\ + , + }, + .ReleaseSafe = .{ + .exclude_os = .{ + .windows, // TODO + .linux, // defeated by aggressive inlining + }, + .expect = + \\error: StillUnresolved + \\source.zig:1:18: [address] in [function] + \\fn foo() !void { return error.TheSkyIsFalling; } + \\ ^ + \\source.zig:2:18: [address] in [function] + \\fn bar() !void { return error.InternalError; } + \\ ^ + \\source.zig:23:5: [address] in [function] + \\ return error.StillUnresolved; + \\ ^ + \\ + , + }, + .ReleaseFast = .{ + .expect = + \\error: StillUnresolved + \\ + , + }, + .ReleaseSmall = .{ + .expect = + \\error: StillUnresolved + \\ + , + }, + }); + cases.addCase(.{ .name = "error passed to function has its trace preserved for duration of the call", .source = From a4523a2d4a0fb2b5c660a11aa37718080eebe9d0 Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Sun, 25 Sep 2022 20:18:15 -0700 Subject: [PATCH 10/12] builtin.zig: Do not overwrite error frames when trace full Previously, we'd overwrite the errors in a circular buffer. Now that error return traces are intended to follow a stack discipline, we no longer have to support the index rolling over. By treating the trace like a saturating stack, any pop/restore code still behaves correctly past-the-end of the trace. As a bonus, this adds a small blurb to let the user know when the trace saturated and x number of frames were dropped. --- lib/std/builtin.zig | 6 ++++-- lib/std/debug.zig | 8 ++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/lib/std/builtin.zig b/lib/std/builtin.zig index c772d8e6f9..430a29c9d7 100644 --- a/lib/std/builtin.zig +++ b/lib/std/builtin.zig @@ -869,8 +869,10 @@ pub noinline fn returnError(st: *StackTrace) void { } pub inline fn addErrRetTraceAddr(st: *StackTrace, addr: usize) void { - st.instruction_addresses[st.index & (st.instruction_addresses.len - 1)] = addr; - st.index +%= 1; + if (st.index < st.instruction_addresses.len) + st.instruction_addresses[st.index] = addr; + + st.index += 1; } const std = @import("std.zig"); diff --git a/lib/std/debug.zig b/lib/std/debug.zig index 93216f0058..21b05249a1 100644 --- a/lib/std/debug.zig +++ b/lib/std/debug.zig @@ -411,6 +411,14 @@ pub fn writeStackTrace( const return_address = stack_trace.instruction_addresses[frame_index]; try printSourceAtAddress(debug_info, out_stream, return_address - 1, tty_config); } + + if (stack_trace.index > stack_trace.instruction_addresses.len) { + const dropped_frames = stack_trace.index - stack_trace.instruction_addresses.len; + + tty_config.setColor(out_stream, .Bold); + try out_stream.print("({d} additional stack frames skipped...)\n", .{dropped_frames}); + tty_config.setColor(out_stream, .Reset); + } } pub const StackIterator = struct { From 74b9cbd8950f4752c5c80925a8baa5fb2492d99f Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Sat, 15 Oct 2022 14:45:12 -0700 Subject: [PATCH 11/12] stage2: Skip test exposing #13175 This PR (#12873) in combination with this particular test exposed a pre-existing bug (#13175). This means that the test for #13038 has regressed --- test/behavior/eval.zig | 38 ++++++++++++++++++++++++++++++-------- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/test/behavior/eval.zig b/test/behavior/eval.zig index aa92f42e24..2fa07d0de7 100644 --- a/test/behavior/eval.zig +++ b/test/behavior/eval.zig @@ -1401,7 +1401,21 @@ test "continue in inline for inside a comptime switch" { try expect(count == 4); } +test "length of global array is determinable at comptime" { + const S = struct { + var bytes: [1024]u8 = undefined; + + fn foo() !void { + try std.testing.expect(bytes.len == 1024); + } + }; + comptime try S.foo(); +} + test "continue nested inline for loop" { + // TODO: https://github.com/ziglang/zig/issues/13175 + if (builtin.zig_backend != .stage1) return error.SkipZigTest; + var a: u8 = 0; loop: inline for ([_]u8{ 1, 2 }) |x| { inline for ([_]u8{1}) |y| { @@ -1415,13 +1429,21 @@ test "continue nested inline for loop" { try expect(a == 2); } -test "length of global array is determinable at comptime" { - const S = struct { - var bytes: [1024]u8 = undefined; +test "continue nested inline for loop in named block expr" { + // TODO: https://github.com/ziglang/zig/issues/13175 + if (builtin.zig_backend != .stage1) return error.SkipZigTest; - fn foo() !void { - try std.testing.expect(bytes.len == 1024); - } - }; - comptime try S.foo(); + var a: u8 = 0; + loop: inline for ([_]u8{ 1, 2 }) |x| { + a = b: { + inline for ([_]u8{1}) |y| { + if (x == y) { + continue :loop; + } + } + break :b x; + }; + try expect(x == 2); + } + try expect(a == 2); } From c36a2c27a51039d486f4149018154687a300d1eb Mon Sep 17 00:00:00 2001 From: Cody Tapscott Date: Fri, 21 Oct 2022 12:42:27 -0700 Subject: [PATCH 12/12] Change how `Block` propagates (error return) trace index Instead of adding 3 fields to every `Block`, this adds just one. The function-level information is saved in the `Sema` struct instead, which is created/copied more rarely. --- src/Module.zig | 7 +++---- src/Sema.zig | 39 +++++++++++++++++++-------------------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/src/Module.zig b/src/Module.zig index c23014f92a..4f150b0148 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -5635,10 +5635,9 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air { // Save the error trace as our first action in the function. // If this is unnecessary after all, Liveness will clean it up for us. - const err_ret_trace_index = try sema.analyzeSaveErrRetIndex(&inner_block); - inner_block.error_return_trace_index = err_ret_trace_index; - inner_block.error_return_trace_index_on_block_entry = err_ret_trace_index; - inner_block.error_return_trace_index_on_function_entry = err_ret_trace_index; + const error_return_trace_index = try sema.analyzeSaveErrRetIndex(&inner_block); + sema.error_return_trace_index_on_fn_entry = error_return_trace_index; + inner_block.error_return_trace_index = error_return_trace_index; sema.analyzeBody(&inner_block, fn_info.body) catch |err| switch (err) { // TODO make these unreachable instead of @panic diff --git a/src/Sema.zig b/src/Sema.zig index 12bf8d0404..4c2f72034e 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -32,6 +32,8 @@ owner_func: ?*Module.Fn, /// This starts out the same as `owner_func` and then diverges in the case of /// an inline or comptime function call. func: ?*Module.Fn, +/// Used to restore the error return trace when returning a non-error from a function. +error_return_trace_index_on_fn_entry: Air.Inst.Ref = .none, /// When semantic analysis needs to know the return type of the function whose body /// is being analyzed, this `Type` should be used instead of going through `func`. /// This will correctly handle the case of a comptime/inline function call of a @@ -156,8 +158,6 @@ pub const Block = struct { /// Keep track of the active error return trace index around blocks so that we can correctly /// pop the error trace upon block exit. error_return_trace_index: Air.Inst.Ref = .none, - error_return_trace_index_on_block_entry: Air.Inst.Ref = .none, - error_return_trace_index_on_function_entry: Air.Inst.Ref = .none, /// when null, it is determined by build mode, changed by @setRuntimeSafety want_safety: ?bool = null, @@ -233,8 +233,6 @@ pub const Block = struct { .c_import_buf = parent.c_import_buf, .switch_else_err_ty = parent.switch_else_err_ty, .error_return_trace_index = parent.error_return_trace_index, - .error_return_trace_index_on_block_entry = parent.error_return_trace_index, - .error_return_trace_index_on_function_entry = parent.error_return_trace_index_on_function_entry, }; } @@ -5039,8 +5037,6 @@ fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErro .runtime_loop = parent_block.runtime_loop, .runtime_index = parent_block.runtime_index, .error_return_trace_index = parent_block.error_return_trace_index, - .error_return_trace_index_on_block_entry = parent_block.error_return_trace_index, - .error_return_trace_index_on_function_entry = parent_block.error_return_trace_index_on_function_entry, }; defer child_block.instructions.deinit(gpa); @@ -6256,6 +6252,10 @@ fn analyzeCall( sema.func = module_fn; defer sema.func = parent_func; + const parent_err_ret_index = sema.error_return_trace_index_on_fn_entry; + sema.error_return_trace_index_on_fn_entry = block.error_return_trace_index; + defer sema.error_return_trace_index_on_fn_entry = parent_err_ret_index; + var wip_captures = try WipCaptureScope.init(gpa, sema.perm_arena, fn_owner_decl.src_scope); defer wip_captures.deinit(); @@ -6270,8 +6270,6 @@ fn analyzeCall( .inlining = &inlining, .is_comptime = is_comptime_call, .error_return_trace_index = block.error_return_trace_index, - .error_return_trace_index_on_block_entry = block.error_return_trace_index, - .error_return_trace_index_on_function_entry = block.error_return_trace_index, }; const merges = &child_block.inlining.?.merges; @@ -7020,10 +7018,9 @@ fn instantiateGenericCall( // Save the error trace as our first action in the function. // If this is unnecessary after all, Liveness will clean it up for us. - const err_ret_trace_index = try sema.analyzeSaveErrRetIndex(&child_block); - child_block.error_return_trace_index = err_ret_trace_index; - child_block.error_return_trace_index_on_block_entry = err_ret_trace_index; - child_block.error_return_trace_index_on_function_entry = err_ret_trace_index; + const error_return_trace_index = try sema.analyzeSaveErrRetIndex(&child_block); + child_sema.error_return_trace_index_on_fn_entry = error_return_trace_index; + child_block.error_return_trace_index = error_return_trace_index; const new_func_inst = child_sema.resolveBody(&child_block, fn_info.param_body, fn_info.param_body_inst) catch |err| { // TODO look up the compile error that happened here and attach a note to it @@ -10218,8 +10215,6 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError .runtime_loop = block.runtime_loop, .runtime_index = block.runtime_index, .error_return_trace_index = block.error_return_trace_index, - .error_return_trace_index_on_block_entry = block.error_return_trace_index, - .error_return_trace_index_on_function_entry = block.error_return_trace_index_on_function_entry, }; const merges = &child_block.label.?.merges; defer child_block.instructions.deinit(gpa); @@ -15741,8 +15736,6 @@ fn zirTypeofBuiltin(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr .is_typeof = true, .want_safety = false, .error_return_trace_index = block.error_return_trace_index, - .error_return_trace_index_on_block_entry = block.error_return_trace_index, - .error_return_trace_index_on_function_entry = block.error_return_trace_index_on_function_entry, }; defer child_block.instructions.deinit(sema.gpa); @@ -16444,16 +16437,22 @@ fn zirRestoreErrRetIndex(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) while (true) { if (block.label) |label| { if (label.zir_block == zir_block) { - if (start_block.error_return_trace_index != block.error_return_trace_index_on_block_entry) - break :b block.error_return_trace_index_on_block_entry; + const target_trace_index = if (block.parent) |parent_block| tgt: { + break :tgt parent_block.error_return_trace_index; + } else sema.error_return_trace_index_on_fn_entry; + + if (start_block.error_return_trace_index != target_trace_index) + break :b target_trace_index; + return; // No need to restore } } block = block.parent.?; } } else b: { - if (start_block.error_return_trace_index != start_block.error_return_trace_index_on_function_entry) - break :b start_block.error_return_trace_index_on_function_entry; + if (start_block.error_return_trace_index != sema.error_return_trace_index_on_fn_entry) + break :b sema.error_return_trace_index_on_fn_entry; + return; // No need to restore };