introduce std.debug.Trace

And use it to debug a LazySrcLoc in stage2 that is set to a bogus value.

The actual fix in this commit is:

```diff
-        try sema.emitBackwardBranch(&child_block, call_src);
+        try sema.emitBackwardBranch(block, call_src);
```
This commit is contained in:
Andrew Kelley 2022-06-07 17:48:53 -07:00
parent f1cff4fa4a
commit bac132bc8f
9 changed files with 185 additions and 95 deletions

View File

@ -1943,3 +1943,60 @@ test "#4353: std.debug should manage resources correctly" {
noinline fn showMyTrace() usize {
return @returnAddress();
}
pub fn Trace(comptime size: usize, comptime stack_frame_count: usize) type {
return struct {
addrs: [size][stack_frame_count]usize = undefined,
notes: [size][]const u8 = undefined,
index: usize = 0,
const frames_init = [1]usize{0} ** stack_frame_count;
pub noinline fn add(t: *@This(), note: []const u8) void {
return addAddr(t, @returnAddress(), note);
}
pub fn addAddr(t: *@This(), addr: usize, note: []const u8) void {
if (t.index < size) {
t.notes[t.index] = note;
t.addrs[t.index] = [1]usize{0} ** stack_frame_count;
var stack_trace: std.builtin.StackTrace = .{
.index = 0,
.instruction_addresses = &t.addrs[t.index],
};
captureStackTrace(addr, &stack_trace);
}
// Keep counting even if the end is reached so that the
// user can find out how much more size they need.
t.index += 1;
}
pub fn dump(t: @This()) void {
const tty_config = detectTTYConfig();
const stderr = io.getStdErr().writer();
const end = @maximum(t.index, size);
const debug_info = getSelfDebugInfo() catch |err| {
stderr.print(
"Unable to dump stack trace: Unable to open debug info: {s}\n",
.{@errorName(err)},
) catch return;
return;
};
for (t.addrs[0..end]) |frames_array, i| {
stderr.print("{s}:\n", .{t.notes[i]}) catch return;
var frames_array_mutable = frames_array;
const frames = mem.sliceTo(frames_array_mutable[0..], 0);
const stack_trace: std.builtin.StackTrace = .{
.index = frames.len,
.instruction_addresses = frames,
};
writeStackTrace(stack_trace, stderr, getDebugInfoAllocator(), debug_info, tty_config) catch continue;
}
if (t.index > end) {
stderr.print("{d} more traces not shown; consider increasing trace size\n", .{
t.index - end,
}) catch return;
}
}
};
}

View File

@ -659,7 +659,7 @@ pub const Decl = struct {
}
pub fn nodeSrcLoc(decl: Decl, node_index: Ast.Node.Index) LazySrcLoc {
return .{ .node_offset = decl.nodeIndexToRelative(node_index) };
return LazySrcLoc.nodeOffset(decl.nodeIndexToRelative(node_index));
}
pub fn srcLoc(decl: Decl) SrcLoc {
@ -670,7 +670,7 @@ pub const Decl = struct {
return .{
.file_scope = decl.getFileScope(),
.parent_decl_node = decl.src_node,
.lazy = .{ .node_offset = node_offset },
.lazy = LazySrcLoc.nodeOffset(node_offset),
};
}
@ -861,7 +861,7 @@ pub const ErrorSet = struct {
return .{
.file_scope = owner_decl.getFileScope(),
.parent_decl_node = owner_decl.src_node,
.lazy = .{ .node_offset = self.node_offset },
.lazy = LazySrcLoc.nodeOffset(self.node_offset),
};
}
@ -947,7 +947,7 @@ pub const Struct = struct {
return .{
.file_scope = owner_decl.getFileScope(),
.parent_decl_node = owner_decl.src_node,
.lazy = .{ .node_offset = s.node_offset },
.lazy = LazySrcLoc.nodeOffset(s.node_offset),
};
}
@ -1066,7 +1066,7 @@ pub const EnumSimple = struct {
return .{
.file_scope = owner_decl.getFileScope(),
.parent_decl_node = owner_decl.src_node,
.lazy = .{ .node_offset = self.node_offset },
.lazy = LazySrcLoc.nodeOffset(self.node_offset),
};
}
};
@ -1097,7 +1097,7 @@ pub const EnumNumbered = struct {
return .{
.file_scope = owner_decl.getFileScope(),
.parent_decl_node = owner_decl.src_node,
.lazy = .{ .node_offset = self.node_offset },
.lazy = LazySrcLoc.nodeOffset(self.node_offset),
};
}
};
@ -1131,7 +1131,7 @@ pub const EnumFull = struct {
return .{
.file_scope = owner_decl.getFileScope(),
.parent_decl_node = owner_decl.src_node,
.lazy = .{ .node_offset = self.node_offset },
.lazy = LazySrcLoc.nodeOffset(self.node_offset),
};
}
};
@ -1197,7 +1197,7 @@ pub const Union = struct {
return .{
.file_scope = owner_decl.getFileScope(),
.parent_decl_node = owner_decl.src_node,
.lazy = .{ .node_offset = self.node_offset },
.lazy = LazySrcLoc.nodeOffset(self.node_offset),
};
}
@ -1404,7 +1404,7 @@ pub const Opaque = struct {
return .{
.file_scope = owner_decl.getFileScope(),
.parent_decl_node = owner_decl.src_node,
.lazy = .{ .node_offset = self.node_offset },
.lazy = LazySrcLoc.nodeOffset(self.node_offset),
};
}
@ -2105,7 +2105,17 @@ pub const SrcLoc = struct {
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset, .node_offset_bin_op => |node_off| {
.node_offset => |traced_off| {
const node_off = traced_off.x;
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
assert(src_loc.file_scope.tree_loaded);
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[node];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset_bin_op => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
assert(src_loc.file_scope.tree_loaded);
@ -2515,6 +2525,17 @@ pub const SrcLoc = struct {
}
};
/// This wraps a simple integer in debug builds so that later on we can find out
/// where in semantic analysis the value got set.
const TracedOffset = struct {
x: i32,
trace: Trace = trace_init,
const want_tracing = builtin.mode == .Debug;
const trace_init = if (want_tracing) std.debug.Trace(1, 3){} else {};
const Trace = @TypeOf(trace_init);
};
/// Resolving a source location into a byte offset may require doing work
/// that we would rather not do unless the error actually occurs.
/// Therefore we need a data structure that contains the information necessary
@ -2555,7 +2576,7 @@ pub const LazySrcLoc = union(enum) {
/// The source location points to an AST node, which is this value offset
/// from its containing Decl node AST index.
/// The Decl is determined contextually.
node_offset: i32,
node_offset: TracedOffset,
/// The source location points to two tokens left of the first token of an AST node,
/// which is this value offset from its containing Decl node AST index.
/// The Decl is determined contextually.
@ -2705,6 +2726,18 @@ pub const LazySrcLoc = union(enum) {
/// The Decl is determined contextually.
node_offset_array_type_elem: i32,
pub const nodeOffset = if (TracedOffset.want_tracing) nodeOffsetDebug else nodeOffsetRelease;
noinline fn nodeOffsetDebug(node_offset: i32) LazySrcLoc {
var result: LazySrcLoc = .{ .node_offset = .{ .x = node_offset } };
result.node_offset.trace.addAddr(@returnAddress(), "init");
return result;
}
fn nodeOffsetRelease(node_offset: i32) LazySrcLoc {
return .{ .node_offset = .{ .x = node_offset } };
}
/// Upgrade to a `SrcLoc` based on the `Decl` provided.
pub fn toSrcLoc(lazy: LazySrcLoc, decl: *Decl) SrcLoc {
return switch (lazy) {
@ -4014,7 +4047,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
const body = zir.extra[extra.end..][0..extra.data.body_len];
const result_ref = (try sema.analyzeBodyBreak(&block_scope, body)).?.operand;
try wip_captures.finalize();
const src: LazySrcLoc = .{ .node_offset = 0 };
const src = LazySrcLoc.nodeOffset(0);
const decl_tv = try sema.resolveInstValue(&block_scope, src, result_ref);
const decl_align: u32 = blk: {
const align_ref = decl.zirAlignRef();
@ -5044,7 +5077,7 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air {
// Crucially, this happens *after* we set the function state to success above,
// so that dependencies on the function body will now be satisfied rather than
// result in circular dependency errors.
const src: LazySrcLoc = .{ .node_offset = 0 };
const src = LazySrcLoc.nodeOffset(0);
sema.resolveFnTypes(&inner_block, src, fn_ty_info) catch |err| switch (err) {
error.NeededSourceLocation => unreachable,
error.GenericPoison => unreachable,
@ -5338,7 +5371,7 @@ pub const SwitchProngSrc = union(enum) {
log.warn("unable to load {s}: {s}", .{
decl.getFileScope().sub_file_path, @errorName(err),
});
return LazySrcLoc{ .node_offset = 0 };
return LazySrcLoc.nodeOffset(0);
};
const switch_node = decl.relativeToNodeIndex(switch_node_offset);
const main_tokens = tree.nodes.items(.main_token);
@ -5367,17 +5400,17 @@ pub const SwitchProngSrc = union(enum) {
node_tags[case.ast.values[0]] == .switch_range;
switch (prong_src) {
.scalar => |i| if (!is_multi and i == scalar_i) return LazySrcLoc{
.node_offset = decl.nodeIndexToRelative(case.ast.values[0]),
},
.scalar => |i| if (!is_multi and i == scalar_i) return LazySrcLoc.nodeOffset(
decl.nodeIndexToRelative(case.ast.values[0]),
),
.multi => |s| if (is_multi and s.prong == multi_i) {
var item_i: u32 = 0;
for (case.ast.values) |item_node| {
if (node_tags[item_node] == .switch_range) continue;
if (item_i == s.item) return LazySrcLoc{
.node_offset = decl.nodeIndexToRelative(item_node),
};
if (item_i == s.item) return LazySrcLoc.nodeOffset(
decl.nodeIndexToRelative(item_node),
);
item_i += 1;
} else unreachable;
},
@ -5387,15 +5420,15 @@ pub const SwitchProngSrc = union(enum) {
if (node_tags[range] != .switch_range) continue;
if (range_i == s.item) switch (range_expand) {
.none => return LazySrcLoc{
.node_offset = decl.nodeIndexToRelative(range),
},
.first => return LazySrcLoc{
.node_offset = decl.nodeIndexToRelative(node_datas[range].lhs),
},
.last => return LazySrcLoc{
.node_offset = decl.nodeIndexToRelative(node_datas[range].rhs),
},
.none => return LazySrcLoc.nodeOffset(
decl.nodeIndexToRelative(range),
),
.first => return LazySrcLoc.nodeOffset(
decl.nodeIndexToRelative(node_datas[range].lhs),
),
.last => return LazySrcLoc.nodeOffset(
decl.nodeIndexToRelative(node_datas[range].rhs),
),
};
range_i += 1;
} else unreachable;
@ -5450,7 +5483,7 @@ pub const PeerTypeCandidateSrc = union(enum) {
log.warn("unable to load {s}: {s}", .{
decl.getFileScope().sub_file_path, @errorName(err),
});
return LazySrcLoc{ .node_offset = 0 };
return LazySrcLoc.nodeOffset(0);
};
const node = decl.relativeToNodeIndex(node_offset);
const node_datas = tree.nodes.items(.data);

View File

@ -1154,7 +1154,7 @@ fn analyzeBodyInner(
.repeat => {
if (block.is_comptime) {
// Send comptime control flow back to the beginning of this block.
const src: LazySrcLoc = .{ .node_offset = datas[inst].node };
const src = LazySrcLoc.nodeOffset(datas[inst].node);
try sema.emitBackwardBranch(block, src);
if (wip_captures.scope.captures.count() != orig_captures) {
try wip_captures.reset(parent_capture_scope);
@ -1165,14 +1165,14 @@ fn analyzeBodyInner(
continue;
} else {
const src_node = sema.code.instructions.items(.data)[inst].node;
const src: LazySrcLoc = .{ .node_offset = src_node };
const src = LazySrcLoc.nodeOffset(src_node);
try sema.requireRuntimeBlock(block, src);
break always_noreturn;
}
},
.repeat_inline => {
// Send comptime control flow back to the beginning of this block.
const src: LazySrcLoc = .{ .node_offset = datas[inst].node };
const src = LazySrcLoc.nodeOffset(datas[inst].node);
try sema.emitBackwardBranch(block, src);
if (wip_captures.scope.captures.count() != orig_captures) {
try wip_captures.reset(parent_capture_scope);
@ -2087,7 +2087,7 @@ fn zirStructDecl(
const small = @bitCast(Zir.Inst.StructDecl.Small, extended.small);
const src: LazySrcLoc = if (small.has_src_node) blk: {
const node_offset = @bitCast(i32, sema.code.extra[extended.operand]);
break :blk .{ .node_offset = node_offset };
break :blk LazySrcLoc.nodeOffset(node_offset);
} else sema.src;
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
@ -2108,7 +2108,7 @@ fn zirStructDecl(
struct_obj.* = .{
.owner_decl = new_decl_index,
.fields = .{},
.node_offset = src.node_offset,
.node_offset = src.node_offset.x,
.zir_index = inst,
.layout = small.layout,
.status = .none,
@ -2210,7 +2210,7 @@ fn zirEnumDecl(
const src: LazySrcLoc = if (small.has_src_node) blk: {
const node_offset = @bitCast(i32, sema.code.extra[extra_index]);
extra_index += 1;
break :blk .{ .node_offset = node_offset };
break :blk LazySrcLoc.nodeOffset(node_offset);
} else sema.src;
const tag_type_ref = if (small.has_tag_type) blk: {
@ -2263,7 +2263,7 @@ fn zirEnumDecl(
.tag_ty_inferred = true,
.fields = .{},
.values = .{},
.node_offset = src.node_offset,
.node_offset = src.node_offset.x,
.namespace = .{
.parent = block.namespace,
.ty = enum_ty,
@ -2385,8 +2385,8 @@ fn zirEnumDecl(
const gop = enum_obj.fields.getOrPutAssumeCapacity(field_name);
if (gop.found_existing) {
const tree = try sema.getAstTree(block);
const field_src = enumFieldSrcLoc(sema.mod.declPtr(block.src_decl), tree.*, src.node_offset, field_i);
const other_tag_src = enumFieldSrcLoc(sema.mod.declPtr(block.src_decl), tree.*, src.node_offset, gop.index);
const field_src = enumFieldSrcLoc(sema.mod.declPtr(block.src_decl), tree.*, src.node_offset.x, field_i);
const other_tag_src = enumFieldSrcLoc(sema.mod.declPtr(block.src_decl), tree.*, src.node_offset.x, gop.index);
const msg = msg: {
const msg = try sema.errMsg(block, field_src, "duplicate enum tag", .{});
errdefer msg.destroy(gpa);
@ -2442,7 +2442,7 @@ fn zirUnionDecl(
const src: LazySrcLoc = if (small.has_src_node) blk: {
const node_offset = @bitCast(i32, sema.code.extra[extra_index]);
extra_index += 1;
break :blk .{ .node_offset = node_offset };
break :blk LazySrcLoc.nodeOffset(node_offset);
} else sema.src;
extra_index += @boolToInt(small.has_tag_type);
@ -2480,7 +2480,7 @@ fn zirUnionDecl(
.owner_decl = new_decl_index,
.tag_ty = Type.initTag(.@"null"),
.fields = .{},
.node_offset = src.node_offset,
.node_offset = src.node_offset.x,
.zir_index = inst,
.layout = small.layout,
.status = .none,
@ -2516,7 +2516,7 @@ fn zirOpaqueDecl(
const src: LazySrcLoc = if (small.has_src_node) blk: {
const node_offset = @bitCast(i32, sema.code.extra[extra_index]);
extra_index += 1;
break :blk .{ .node_offset = node_offset };
break :blk LazySrcLoc.nodeOffset(node_offset);
} else sema.src;
const decls_len = if (small.has_decls_len) blk: {
@ -2547,7 +2547,7 @@ fn zirOpaqueDecl(
opaque_obj.* = .{
.owner_decl = new_decl_index,
.node_offset = src.node_offset,
.node_offset = src.node_offset.x,
.namespace = .{
.parent = block.namespace,
.ty = opaque_ty,
@ -2623,7 +2623,7 @@ fn zirRetPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].node;
const src: LazySrcLoc = .{ .node_offset = inst_data };
const src = LazySrcLoc.nodeOffset(inst_data);
try sema.requireFunctionBlock(block, src);
if (block.is_comptime or try sema.typeRequiresComptime(block, src, sema.fn_ret_ty)) {
@ -2661,7 +2661,7 @@ fn zirRetType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].node;
const src: LazySrcLoc = .{ .node_offset = inst_data };
const src = LazySrcLoc.nodeOffset(inst_data);
try sema.requireFunctionBlock(block, src);
return sema.addType(sema.fn_ret_ty);
}
@ -2750,7 +2750,7 @@ fn zirAllocExtended(
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const extra = sema.code.extraData(Zir.Inst.AllocExtended, extended.operand);
const src: LazySrcLoc = .{ .node_offset = extra.data.src_node };
const src = LazySrcLoc.nodeOffset(extra.data.src_node);
const ty_src = src; // TODO better source location
const align_src = src; // TODO better source location
const small = @bitCast(Zir.Inst.AllocExtended.Small, extended.small);
@ -2903,7 +2903,7 @@ fn zirAllocInferredComptime(
inferred_alloc_ty: Type,
) CompileError!Air.Inst.Ref {
const src_node = sema.code.instructions.items(.data)[inst].node;
const src: LazySrcLoc = .{ .node_offset = src_node };
const src = LazySrcLoc.nodeOffset(src_node);
sema.src = src;
return sema.addConstant(
inferred_alloc_ty,
@ -2967,7 +2967,7 @@ fn zirAllocInferred(
defer tracy.end();
const src_node = sema.code.instructions.items(.data)[inst].node;
const src: LazySrcLoc = .{ .node_offset = src_node };
const src = LazySrcLoc.nodeOffset(src_node);
sema.src = src;
if (block.is_comptime) {
@ -3718,7 +3718,7 @@ fn zirValidateArrayInit(
outer: for (instrs) |elem_ptr, i| {
const elem_ptr_data = sema.code.instructions.items(.data)[elem_ptr].pl_node;
const elem_src: LazySrcLoc = .{ .node_offset = elem_ptr_data.src_node };
const elem_src = LazySrcLoc.nodeOffset(elem_ptr_data.src_node);
// Determine whether the value stored to this pointer is comptime-known.
@ -4203,7 +4203,7 @@ fn zirCompileLog(
const extra = sema.code.extraData(Zir.Inst.NodeMultiOp, extended.operand);
const src_node = extra.data.src_node;
const src: LazySrcLoc = .{ .node_offset = src_node };
const src = LazySrcLoc.nodeOffset(src_node);
const args = sema.code.refSlice(extra.end, extended.small);
for (args) |arg_ref, i| {
@ -4707,7 +4707,7 @@ pub fn analyzeExport(
fn zirSetAlignStack(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!void {
const extra = sema.code.extraData(Zir.Inst.UnNode, extended.operand).data;
const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = extra.node };
const src: LazySrcLoc = .{ .node_offset = extra.node };
const src = LazySrcLoc.nodeOffset(extra.node);
const alignment = try sema.resolveAlign(block, operand_src, extra.operand);
if (alignment > 256) {
return sema.fail(block, src, "attempt to @setAlignStack({d}); maximum is 256", .{
@ -5312,7 +5312,7 @@ fn analyzeCall(
delete_memoized_call_key = true;
}
try sema.emitBackwardBranch(&child_block, call_src);
try sema.emitBackwardBranch(block, call_src);
// Whether this call should be memoized, set to false if the call can mutate
// comptime state.
@ -6988,7 +6988,7 @@ fn funcCommon(
const param_types = try sema.arena.alloc(Type, block.params.items.len);
const comptime_params = try sema.arena.alloc(bool, block.params.items.len);
for (block.params.items) |param, i| {
const param_src: LazySrcLoc = .{ .node_offset = src_node_offset }; // TODO better src
const param_src = LazySrcLoc.nodeOffset(src_node_offset); // TODO better src
param_types[i] = param.ty;
comptime_params[i] = param.is_comptime or
try sema.typeRequiresComptime(block, param_src, param.ty);
@ -7378,7 +7378,7 @@ fn zirFieldCallBindNamed(sema: *Sema, block: *Block, extended: Zir.Inst.Extended
defer tracy.end();
const extra = sema.code.extraData(Zir.Inst.FieldNamedNode, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = extra.node };
const src = LazySrcLoc.nodeOffset(extra.node);
const field_name_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = extra.node };
const object_ptr = try sema.resolveInst(extra.lhs);
const field_name = try sema.resolveConstString(block, field_name_src, extra.field_name);
@ -10088,7 +10088,7 @@ fn zirOverflowArithmetic(
defer tracy.end();
const extra = sema.code.extraData(Zir.Inst.OverflowArithmetic, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = extra.node };
const src = LazySrcLoc.nodeOffset(extra.node);
const lhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = extra.node };
const rhs_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = extra.node };
@ -11309,7 +11309,7 @@ fn zirAsm(
defer tracy.end();
const extra = sema.code.extraData(Zir.Inst.Asm, extended.operand);
const src: LazySrcLoc = .{ .node_offset = extra.data.src_node };
const src = LazySrcLoc.nodeOffset(extra.data.src_node);
const ret_ty_src: LazySrcLoc = .{ .node_offset_asm_ret_ty = extra.data.src_node };
const outputs_len = @truncate(u5, extended.small);
const inputs_len = @truncate(u5, extended.small >> 5);
@ -11761,7 +11761,7 @@ fn zirThis(
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const this_decl_index = block.namespace.getDeclIndex();
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
const src = LazySrcLoc.nodeOffset(@bitCast(i32, extended.operand));
return sema.analyzeDeclVal(block, src, this_decl_index);
}
@ -11815,7 +11815,7 @@ fn zirRetAddr(
block: *Block,
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
const src = LazySrcLoc.nodeOffset(@bitCast(i32, extended.operand));
try sema.requireRuntimeBlock(block, src);
return try block.addNoOp(.ret_addr);
}
@ -11825,7 +11825,7 @@ fn zirFrameAddress(
block: *Block,
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
const src = LazySrcLoc.nodeOffset(@bitCast(i32, extended.operand));
try sema.requireRuntimeBlock(block, src);
return try block.addNoOp(.frame_addr);
}
@ -11838,7 +11838,7 @@ fn zirBuiltinSrc(
const tracy = trace(@src());
defer tracy.end();
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
const src = LazySrcLoc.nodeOffset(@bitCast(i32, extended.operand));
const extra = sema.code.extraData(Zir.Inst.LineColumn, extended.operand).data;
const func = sema.func orelse return sema.fail(block, src, "@src outside function", .{});
const fn_owner_decl = sema.mod.declPtr(func.owner_decl);
@ -12842,7 +12842,7 @@ fn zirTypeofPeer(
defer tracy.end();
const extra = sema.code.extraData(Zir.Inst.TypeOfPeer, extended.operand);
const src: LazySrcLoc = .{ .node_offset = extra.data.src_node };
const src = LazySrcLoc.nodeOffset(extra.data.src_node);
const body = sema.code.extra[extra.data.body_index..][0..extra.data.body_len];
var child_block: Block = .{
@ -14157,7 +14157,7 @@ fn zirErrorReturnTrace(
block: *Block,
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
const src = LazySrcLoc.nodeOffset(@bitCast(i32, extended.operand));
return sema.getErrorReturnTrace(block, src);
}
@ -14185,7 +14185,7 @@ fn zirFrame(
block: *Block,
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
const src = LazySrcLoc.nodeOffset(@bitCast(i32, extended.operand));
return sema.fail(block, src, "TODO: Sema.zirFrame", .{});
}
@ -14629,7 +14629,7 @@ fn zirReify(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.I
.tag_ty_inferred = false,
.fields = .{},
.values = .{},
.node_offset = src.node_offset,
.node_offset = src.node_offset.x,
.namespace = .{
.parent = block.namespace,
.ty = enum_ty,
@ -14711,7 +14711,7 @@ fn zirReify(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.I
opaque_obj.* = .{
.owner_decl = new_decl_index,
.node_offset = src.node_offset,
.node_offset = src.node_offset.x,
.namespace = .{
.parent = block.namespace,
.ty = opaque_ty,
@ -14763,7 +14763,7 @@ fn zirReify(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.I
.owner_decl = new_decl_index,
.tag_ty = Type.initTag(.@"null"),
.fields = .{},
.node_offset = src.node_offset,
.node_offset = src.node_offset.x,
.zir_index = inst,
.layout = layout_val.toEnum(std.builtin.Type.ContainerLayout),
.status = .have_field_types,
@ -14930,7 +14930,7 @@ fn reifyStruct(
struct_obj.* = .{
.owner_decl = new_decl_index,
.fields = .{},
.node_offset = src.node_offset,
.node_offset = src.node_offset.x,
.zir_index = inst,
.layout = layout_val.toEnum(std.builtin.Type.ContainerLayout),
.status = .have_field_types,
@ -15130,7 +15130,7 @@ fn zirIntToPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
fn zirErrSetCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
const extra = sema.code.extraData(Zir.Inst.BinNode, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = extra.node };
const src = LazySrcLoc.nodeOffset(extra.node);
const dest_ty_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = extra.node };
const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = extra.node };
const dest_ty = try sema.resolveType(block, dest_ty_src, extra.lhs);
@ -17114,7 +17114,7 @@ fn zirAwaitNosuspend(
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const extra = sema.code.extraData(Zir.Inst.UnNode, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = extra.node };
const src = LazySrcLoc.nodeOffset(extra.node);
return sema.fail(block, src, "TODO: Sema.zirAwaitNosuspend", .{});
}
@ -17443,7 +17443,7 @@ fn zirWasmMemorySize(
) CompileError!Air.Inst.Ref {
const extra = sema.code.extraData(Zir.Inst.UnNode, extended.operand).data;
const index_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = extra.node };
const builtin_src: LazySrcLoc = .{ .node_offset = extra.node };
const builtin_src = LazySrcLoc.nodeOffset(extra.node);
const target = sema.mod.getTarget();
if (!target.isWasm()) {
return sema.fail(block, builtin_src, "builtin @wasmMemorySize is available when targeting WebAssembly; targeted CPU architecture is {s}", .{@tagName(target.cpu.arch)});
@ -17466,7 +17466,7 @@ fn zirWasmMemoryGrow(
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const extra = sema.code.extraData(Zir.Inst.BinNode, extended.operand).data;
const builtin_src: LazySrcLoc = .{ .node_offset = extra.node };
const builtin_src = LazySrcLoc.nodeOffset(extra.node);
const index_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = extra.node };
const delta_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = extra.node };
const target = sema.mod.getTarget();
@ -17534,7 +17534,7 @@ fn zirBuiltinExtern(
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const extra = sema.code.extraData(Zir.Inst.BinNode, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = extra.node };
const src = LazySrcLoc.nodeOffset(extra.node);
const ty_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = extra.node };
const options_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = extra.node };
@ -23586,7 +23586,7 @@ fn semaStructFields(
const small = @bitCast(Zir.Inst.StructDecl.Small, extended.small);
var extra_index: usize = extended.operand;
const src: LazySrcLoc = .{ .node_offset = struct_obj.node_offset };
const src = LazySrcLoc.nodeOffset(struct_obj.node_offset);
extra_index += @boolToInt(small.has_src_node);
const body_len = if (small.has_body_len) blk: {
@ -23773,7 +23773,7 @@ fn semaUnionFields(block: *Block, mod: *Module, union_obj: *Module.Union) Compil
const small = @bitCast(Zir.Inst.UnionDecl.Small, extended.small);
var extra_index: usize = extended.operand;
const src: LazySrcLoc = .{ .node_offset = union_obj.node_offset };
const src = LazySrcLoc.nodeOffset(union_obj.node_offset);
extra_index += @boolToInt(small.has_src_node);
const tag_type_ref: Zir.Inst.Ref = if (small.has_tag_type) blk: {
@ -24459,7 +24459,7 @@ fn enumFieldSrcLoc(
.container_field,
=> {
if (it_index == field_index) {
return .{ .node_offset = decl.nodeIndexToRelative(member_node) };
return LazySrcLoc.nodeOffset(decl.nodeIndexToRelative(member_node));
}
it_index += 1;
},

View File

@ -2427,7 +2427,7 @@ pub const Inst = struct {
operand: Ref,
pub fn src(self: @This()) LazySrcLoc {
return .{ .node_offset = self.src_node };
return LazySrcLoc.nodeOffset(self.src_node);
}
},
/// Used for unary operators, with a token source location.
@ -2450,7 +2450,7 @@ pub const Inst = struct {
payload_index: u32,
pub fn src(self: @This()) LazySrcLoc {
return .{ .node_offset = self.src_node };
return LazySrcLoc.nodeOffset(self.src_node);
}
},
pl_tok: struct {
@ -2526,7 +2526,7 @@ pub const Inst = struct {
bit_count: u16,
pub fn src(self: @This()) LazySrcLoc {
return .{ .node_offset = self.src_node };
return LazySrcLoc.nodeOffset(self.src_node);
}
},
bool_br: struct {
@ -2545,7 +2545,7 @@ pub const Inst = struct {
force_comptime: bool,
pub fn src(self: @This()) LazySrcLoc {
return .{ .node_offset = self.src_node };
return LazySrcLoc.nodeOffset(self.src_node);
}
},
@"break": struct {
@ -2566,7 +2566,7 @@ pub const Inst = struct {
inst: Index,
pub fn src(self: @This()) LazySrcLoc {
return .{ .node_offset = self.src_node };
return LazySrcLoc.nodeOffset(self.src_node);
}
},
str_op: struct {

View File

@ -622,7 +622,7 @@ pub fn deinit(self: *Self) void {
/// Sets `err_msg` on `CodeGen` and returns `error.CodegenFail` which is caught in link/Wasm.zig
fn fail(self: *Self, comptime fmt: []const u8, args: anytype) InnerError {
const src: LazySrcLoc = .{ .node_offset = 0 };
const src = LazySrcLoc.nodeOffset(0);
const src_loc = src.toSrcLoc(self.decl);
self.err_msg = try Module.ErrorMsg.create(self.gpa, src_loc, fmt, args);
return error.CodegenFail;

View File

@ -363,7 +363,7 @@ pub const DeclGen = struct {
fn fail(dg: *DeclGen, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } {
@setCold(true);
const src: LazySrcLoc = .{ .node_offset = 0 };
const src = LazySrcLoc.nodeOffset(0);
const src_loc = src.toSrcLoc(dg.decl);
dg.error_msg = try Module.ErrorMsg.create(dg.module.gpa, src_loc, format, args);
return error.AnalysisFail;

View File

@ -2163,7 +2163,7 @@ pub const DeclGen = struct {
fn todo(self: *DeclGen, comptime format: []const u8, args: anytype) Error {
@setCold(true);
assert(self.err_msg == null);
const src_loc = @as(LazySrcLoc, .{ .node_offset = 0 }).toSrcLoc(self.decl);
const src_loc = LazySrcLoc.nodeOffset(0).toSrcLoc(self.decl);
self.err_msg = try Module.ErrorMsg.create(self.gpa, src_loc, "TODO (LLVM): " ++ format, args);
return error.CodegenFail;
}

View File

@ -184,7 +184,7 @@ pub const DeclGen = struct {
fn fail(self: *DeclGen, comptime format: []const u8, args: anytype) Error {
@setCold(true);
const src: LazySrcLoc = .{ .node_offset = 0 };
const src = LazySrcLoc.nodeOffset(0);
const src_loc = src.toSrcLoc(self.decl);
assert(self.error_msg == null);
self.error_msg = try Module.ErrorMsg.create(self.module.gpa, src_loc, format, args);
@ -193,7 +193,7 @@ pub const DeclGen = struct {
fn todo(self: *DeclGen, comptime format: []const u8, args: anytype) Error {
@setCold(true);
const src: LazySrcLoc = .{ .node_offset = 0 };
const src = LazySrcLoc.nodeOffset(0);
const src_loc = src.toSrcLoc(self.decl);
assert(self.error_msg == null);
self.error_msg = try Module.ErrorMsg.create(self.module.gpa, src_loc, "TODO (SPIR-V): " ++ format, args);

View File

@ -497,7 +497,7 @@ const Writer = struct {
.wasm_memory_size,
=> {
const inst_data = self.code.extraData(Zir.Inst.UnNode, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = inst_data.node };
const src = LazySrcLoc.nodeOffset(inst_data.node);
try self.writeInstRef(stream, inst_data.operand);
try stream.writeAll(")) ");
try self.writeSrc(stream, src);
@ -510,7 +510,7 @@ const Writer = struct {
.prefetch,
=> {
const inst_data = self.code.extraData(Zir.Inst.BinNode, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = inst_data.node };
const src = LazySrcLoc.nodeOffset(inst_data.node);
try self.writeInstRef(stream, inst_data.lhs);
try stream.writeAll(", ");
try self.writeInstRef(stream, inst_data.rhs);
@ -520,7 +520,7 @@ const Writer = struct {
.field_call_bind_named => {
const extra = self.code.extraData(Zir.Inst.FieldNamedNode, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = extra.node };
const src = LazySrcLoc.nodeOffset(extra.node);
try self.writeInstRef(stream, extra.lhs);
try stream.writeAll(", ");
try self.writeInstRef(stream, extra.field_name);
@ -531,7 +531,7 @@ const Writer = struct {
}
fn writeExtNode(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
const src = LazySrcLoc.nodeOffset(@bitCast(i32, extended.operand));
try stream.writeAll(")) ");
try self.writeSrc(stream, src);
}
@ -1050,7 +1050,7 @@ const Writer = struct {
fn writeNodeMultiOp(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
const extra = self.code.extraData(Zir.Inst.NodeMultiOp, extended.operand);
const src: LazySrcLoc = .{ .node_offset = extra.data.src_node };
const src = LazySrcLoc.nodeOffset(extra.data.src_node);
const operands = self.code.refSlice(extra.end, extended.small);
for (operands) |operand, i| {
@ -1074,7 +1074,7 @@ const Writer = struct {
fn writeAsm(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
const extra = self.code.extraData(Zir.Inst.Asm, extended.operand);
const src: LazySrcLoc = .{ .node_offset = extra.data.src_node };
const src = LazySrcLoc.nodeOffset(extra.data.src_node);
const outputs_len = @truncate(u5, extended.small);
const inputs_len = @truncate(u5, extended.small >> 5);
const clobbers_len = @truncate(u5, extended.small >> 10);
@ -1145,7 +1145,7 @@ const Writer = struct {
fn writeOverflowArithmetic(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
const extra = self.code.extraData(Zir.Inst.OverflowArithmetic, extended.operand).data;
const src: LazySrcLoc = .{ .node_offset = extra.node };
const src = LazySrcLoc.nodeOffset(extra.node);
try self.writeInstRef(stream, extra.lhs);
try stream.writeAll(", ");
@ -1898,7 +1898,7 @@ const Writer = struct {
inst: Zir.Inst.Index,
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
const src_node = self.code.instructions.items(.data)[inst].node;
const src: LazySrcLoc = .{ .node_offset = src_node };
const src = LazySrcLoc.nodeOffset(src_node);
try stream.writeAll(") ");
try self.writeSrc(stream, src);
}
@ -2117,7 +2117,7 @@ const Writer = struct {
fn writeAllocExtended(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
const extra = self.code.extraData(Zir.Inst.AllocExtended, extended.operand);
const small = @bitCast(Zir.Inst.AllocExtended.Small, extended.small);
const src: LazySrcLoc = .{ .node_offset = extra.data.src_node };
const src = LazySrcLoc.nodeOffset(extra.data.src_node);
var extra_index: usize = extra.end;
const type_inst: Zir.Inst.Ref = if (!small.has_type) .none else blk: {
@ -2351,7 +2351,7 @@ const Writer = struct {
fn writeSrcNode(self: *Writer, stream: anytype, src_node: ?i32) !void {
const node_offset = src_node orelse return;
const src: LazySrcLoc = .{ .node_offset = node_offset };
const src = LazySrcLoc.nodeOffset(node_offset);
try stream.writeAll(" ");
return self.writeSrc(stream, src);
}