lldb: add pretty printer for intern pool indices

This commit is contained in:
Jacob Young 2024-12-20 16:37:25 -05:00
parent 06206479a9
commit 5c76e08f49
13 changed files with 631 additions and 235 deletions

View File

@ -2181,7 +2181,8 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void {
}
if (comp.zcu) |zcu| {
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = .main };
const pt: Zcu.PerThread = .activate(zcu, .main);
defer pt.deactivate();
zcu.compile_log_text.shrinkAndFree(gpa, 0);
@ -2251,7 +2252,8 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void {
try comp.performAllTheWork(main_progress_node);
if (comp.zcu) |zcu| {
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = .main };
const pt: Zcu.PerThread = .activate(zcu, .main);
defer pt.deactivate();
if (build_options.enable_debug_extensions and comp.verbose_intern_pool) {
std.debug.print("intern pool stats for '{s}':\n", .{
@ -3609,7 +3611,8 @@ fn performAllTheWorkInner(
}
if (comp.zcu) |zcu| {
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = .main };
const pt: Zcu.PerThread = .activate(zcu, .main);
defer pt.deactivate();
if (comp.incremental) {
const update_zir_refs_node = main_progress_node.start("Update ZIR References", 0);
defer update_zir_refs_node.end();
@ -3683,14 +3686,16 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre
const named_frame = tracy.namedFrame("analyze_func");
defer named_frame.end();
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.ensureFuncBodyAnalyzed(func) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return,
};
},
.analyze_cau => |cau_index| {
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.ensureCauAnalyzed(cau_index) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return,
@ -3719,7 +3724,8 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre
const named_frame = tracy.namedFrame("resolve_type_fully");
defer named_frame.end();
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
Type.fromInterned(ty).resolveFully(pt) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return,
@ -3729,7 +3735,8 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre
const named_frame = tracy.namedFrame("analyze_mod");
defer named_frame.end();
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.semaPkg(mod) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return,
@ -4183,7 +4190,8 @@ fn workerAstGenFile(
const child_prog_node = prog_node.start(file.sub_file_path, 0);
defer child_prog_node.end();
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.astGenFile(file, path_digest) catch |err| switch (err) {
error.AnalysisFail => return,
else => {

View File

@ -1580,6 +1580,8 @@ pub const String = enum(u32) {
const strings = ip.getLocalShared(unwrapped_string.tid).strings.acquire();
return strings.view().items(.@"0")[unwrapped_string.index..];
}
const debug_state = InternPool.debug_state;
};
/// An index into `strings` which might be `none`.
@ -1596,6 +1598,8 @@ pub const OptionalString = enum(u32) {
pub fn toSlice(string: OptionalString, len: u64, ip: *const InternPool) ?[]const u8 {
return (string.unwrap() orelse return null).toSlice(len, ip);
}
const debug_state = InternPool.debug_state;
};
/// An index into `strings`.
@ -1692,6 +1696,8 @@ pub const NullTerminatedString = enum(u32) {
pub fn fmt(string: NullTerminatedString, ip: *const InternPool) std.fmt.Formatter(format) {
return .{ .data = .{ .string = string, .ip = ip } };
}
const debug_state = InternPool.debug_state;
};
/// An index into `strings` which might be `none`.
@ -1708,6 +1714,8 @@ pub const OptionalNullTerminatedString = enum(u32) {
pub fn toSlice(string: OptionalNullTerminatedString, ip: *const InternPool) ?[:0]const u8 {
return (string.unwrap() orelse return null).toSlice(ip);
}
const debug_state = InternPool.debug_state;
};
/// A single value captured in the closure of a namespace type. This is not a plain
@ -4519,6 +4527,8 @@ pub const Index = enum(u32) {
.data_ptr = &slice.items(.data)[unwrapped.index],
};
}
const debug_state = InternPool.debug_state;
};
pub fn unwrap(index: Index, ip: *const InternPool) Unwrapped {
return if (single_threaded) .{
@ -4532,7 +4542,6 @@ pub const Index = enum(u32) {
/// This function is used in the debugger pretty formatters in tools/ to fetch the
/// Tag to encoding mapping to facilitate fancy debug printing for this type.
/// TODO merge this with `Tag.Payload`.
fn dbHelper(self: *Index, tag_to_encoding_map: *struct {
const DataIsIndex = struct { data: Index };
const DataIsExtraIndexOfEnumExplicit = struct {
@ -4689,44 +4698,38 @@ pub const Index = enum(u32) {
}
}
}
comptime {
if (!builtin.strip_debug_info) switch (builtin.zig_backend) {
.stage2_llvm => _ = &dbHelper,
.stage2_x86_64 => {
for (@typeInfo(Tag).@"enum".fields) |tag| {
if (!@hasField(@TypeOf(Tag.encodings), tag.name)) {
if (false) @compileLog("missing: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name);
continue;
}
const encoding = @field(Tag.encodings, tag.name);
for (@typeInfo(encoding.trailing).@"struct".fields) |field| {
struct {
fn checkConfig(name: []const u8) void {
if (!@hasField(@TypeOf(encoding.config), name)) @compileError("missing field: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name ++ ".config.@\"" ++ name ++ "\"");
const FieldType = @TypeOf(@field(encoding.config, name));
if (@typeInfo(FieldType) != .enum_literal) @compileError("expected enum literal: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name ++ ".config.@\"" ++ name ++ "\": " ++ @typeName(FieldType));
.stage2_x86_64 => for (@typeInfo(Tag).@"enum".fields) |tag| {
if (!@hasField(@TypeOf(Tag.encodings), tag.name)) @compileLog("missing: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name);
const encoding = @field(Tag.encodings, tag.name);
if (@hasField(@TypeOf(encoding), "trailing")) for (@typeInfo(encoding.trailing).@"struct".fields) |field| {
struct {
fn checkConfig(name: []const u8) void {
if (!@hasField(@TypeOf(encoding.config), name)) @compileError("missing field: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name ++ ".config.@\"" ++ name ++ "\"");
const FieldType = @TypeOf(@field(encoding.config, name));
if (@typeInfo(FieldType) != .enum_literal) @compileError("expected enum literal: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name ++ ".config.@\"" ++ name ++ "\": " ++ @typeName(FieldType));
}
fn checkField(name: []const u8, Type: type) void {
switch (@typeInfo(Type)) {
.int => {},
.@"enum" => {},
.@"struct" => |info| assert(info.layout == .@"packed"),
.optional => |info| {
checkConfig(name ++ ".?");
checkField(name ++ ".?", info.child);
},
.pointer => |info| {
assert(info.size == .Slice);
checkConfig(name ++ ".len");
checkField(name ++ "[0]", info.child);
},
else => @compileError("unsupported type: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name ++ "." ++ name ++ ": " ++ @typeName(Type)),
}
fn checkField(name: []const u8, Type: type) void {
switch (@typeInfo(Type)) {
.int => {},
.@"enum" => {},
.@"struct" => |info| assert(info.layout == .@"packed"),
.optional => |info| {
checkConfig(name ++ ".?");
checkField(name ++ ".?", info.child);
},
.pointer => |info| {
assert(info.size == .Slice);
checkConfig(name ++ ".len");
checkField(name ++ "[0]", info.child);
},
else => @compileError("unsupported type: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name ++ "." ++ name ++ ": " ++ @typeName(Type)),
}
}
}.checkField("trailing." ++ field.name, field.type);
}
}
}
}.checkField("trailing." ++ field.name, field.type);
};
},
else => {},
};
@ -5035,7 +5038,6 @@ pub const Tag = enum(u8) {
/// data is payload index to `EnumExplicit`.
type_enum_nonexhaustive,
/// A type that can be represented with only an enum tag.
/// data is SimpleType enum value.
simple_type,
/// An opaque type.
/// data is index of Tag.TypeOpaque in extra.
@ -5064,7 +5066,6 @@ pub const Tag = enum(u8) {
/// Untyped `undefined` is stored instead via `simple_value`.
undef,
/// A value that can be represented with only an enum tag.
/// data is SimpleValue enum value.
simple_value,
/// A pointer to a `Nav`.
/// data is extra index of `PtrNav`, which contains the type and address.
@ -5244,95 +5245,85 @@ pub const Tag = enum(u8) {
const Union = Key.Union;
const TypePointer = Key.PtrType;
fn Payload(comptime tag: Tag) type {
return switch (tag) {
.removed => unreachable,
.type_int_signed => unreachable,
.type_int_unsigned => unreachable,
.type_array_big => Array,
.type_array_small => Vector,
.type_vector => Vector,
.type_pointer => TypePointer,
.type_slice => unreachable,
.type_optional => unreachable,
.type_anyframe => unreachable,
.type_error_union => ErrorUnionType,
.type_anyerror_union => unreachable,
.type_error_set => ErrorSet,
.type_inferred_error_set => unreachable,
.type_enum_auto => EnumAuto,
.type_enum_explicit => EnumExplicit,
.type_enum_nonexhaustive => EnumExplicit,
.simple_type => unreachable,
.type_opaque => TypeOpaque,
.type_struct => TypeStruct,
.type_struct_packed, .type_struct_packed_inits => TypeStructPacked,
.type_tuple => TypeTuple,
.type_union => TypeUnion,
.type_function => TypeFunction,
.undef => unreachable,
.simple_value => unreachable,
.ptr_nav => PtrNav,
.ptr_comptime_alloc => PtrComptimeAlloc,
.ptr_uav => PtrUav,
.ptr_uav_aligned => PtrUavAligned,
.ptr_comptime_field => PtrComptimeField,
.ptr_int => PtrInt,
.ptr_eu_payload => PtrBase,
.ptr_opt_payload => PtrBase,
.ptr_elem => PtrBaseIndex,
.ptr_field => PtrBaseIndex,
.ptr_slice => PtrSlice,
.opt_payload => TypeValue,
.opt_null => unreachable,
.int_u8 => unreachable,
.int_u16 => unreachable,
.int_u32 => unreachable,
.int_i32 => unreachable,
.int_usize => unreachable,
.int_comptime_int_u32 => unreachable,
.int_comptime_int_i32 => unreachable,
.int_small => IntSmall,
.int_positive => unreachable,
.int_negative => unreachable,
.int_lazy_align => IntLazy,
.int_lazy_size => IntLazy,
.error_set_error => Error,
.error_union_error => Error,
.error_union_payload => TypeValue,
.enum_literal => unreachable,
.enum_tag => EnumTag,
.float_f16 => unreachable,
.float_f32 => unreachable,
.float_f64 => unreachable,
.float_f80 => unreachable,
.float_f128 => unreachable,
.float_c_longdouble_f80 => unreachable,
.float_c_longdouble_f128 => unreachable,
.float_comptime_float => unreachable,
.variable => Variable,
.@"extern" => Extern,
.func_decl => FuncDecl,
.func_instance => FuncInstance,
.func_coerced => FuncCoerced,
.only_possible_value => unreachable,
.union_value => Union,
.bytes => Bytes,
.aggregate => Aggregate,
.repeated => Repeated,
.memoized_call => MemoizedCall,
};
}
const enum_explicit_encoding = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = EnumExplicit,
.trailing = struct {
owner_union: Index,
cau: ?Cau.Index,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_names: []NullTerminatedString,
tag_values: []Index,
},
.config = .{
.@"trailing.owner_union.?" = .@"payload.zir_index == .none",
.@"trailing.cau.?" = .@"payload.zir_index != .none",
.@"trailing.captures.?" = .@"payload.captures_len < 0xffffffff",
.@"trailing.captures.?.len" = .@"payload.captures_len",
.@"trailing.type_hash.?" = .@"payload.captures_len == 0xffffffff",
.@"trailing.field_names.len" = .@"payload.fields_len",
.@"trailing.tag_values.len" = .@"payload.fields_len",
},
};
const encodings = .{
.removed = .{},
.type_int_signed = .{ .summary = .@"i{.data%value}", .data = u32 },
.type_int_unsigned = .{ .summary = .@"u{.data%value}", .data = u32 },
.type_array_big = .{ .summary = .@"[{.payload.len1%value} << 32 | {.payload.len0%value}:{.payload.sentinel%summary}]{.payload.child%summary}", .payload = Array },
.type_array_small = .{ .summary = .@"[{.payload.len%value}]{.payload.child%summary}", .payload = Vector },
.type_vector = .{ .summary = .@"@Vector({.payload.len%value}, {.payload.child%summary})", .payload = Vector },
.type_pointer = .{ .summary = .@"*... {.payload.child%summary}", .payload = TypePointer },
.type_slice = .{ .summary = .@"[]... {.data.unwrapped.payload.child%summary}", .data = Index },
.type_optional = .{ .summary = .@"?{.data%summary}", .data = Index },
.type_anyframe = .{ .summary = .@"anyframe->{.data%summary}", .data = Index },
.type_error_union = .{ .summary = .@"{.payload.error_set_type%summary}!{.payload.payload_type%summary}", .payload = ErrorUnionType },
.type_anyerror_union = .{ .summary = .@"anyerror!{.data%summary}", .data = Index },
.type_error_set = .{ .summary = .@"error{...}", .payload = ErrorSet },
.type_inferred_error_set = .{ .summary = .@"@typeInfo(@typeInfo(@TypeOf({.data%summary})).@\"fn\".return_type.?).error_union.error_set", .data = Index },
.type_enum_auto = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = EnumAuto,
.trailing = struct {
owner_union: ?Index,
cau: ?Cau.Index,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_names: []NullTerminatedString,
},
.config = .{
.@"trailing.owner_union.?" = .@"payload.zir_index == .none",
.@"trailing.cau.?" = .@"payload.zir_index != .none",
.@"trailing.captures.?" = .@"payload.captures_len < 0xffffffff",
.@"trailing.captures.?.len" = .@"payload.captures_len",
.@"trailing.type_hash.?" = .@"payload.captures_len == 0xffffffff",
.@"trailing.field_names.len" = .@"payload.fields_len",
},
},
.type_enum_explicit = enum_explicit_encoding,
.type_enum_nonexhaustive = enum_explicit_encoding,
.simple_type = .{ .summary = .@"{.index%value#.}", .index = SimpleType },
.type_opaque = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = TypeOpaque,
.trailing = struct {
captures: []CaptureValue,
},
.config = .{
.@"trailing.captures.len" = .@"payload.captures_len",
},
},
.type_struct = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = TypeStruct,
.trailing = struct {
captures_len: ?u32,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_types: []Index,
field_names_map: OptionalMapIndex,
field_names: []NullTerminatedString,
field_inits: ?[]Index,
field_aligns: ?[]Alignment,
field_is_comptime_bits: ?[]u32,
@ -5342,9 +5333,10 @@ pub const Tag = enum(u8) {
.config = .{
.@"trailing.captures_len.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?.len" = .@"trailing.captures_len",
.@"trailing.captures.?.len" = .@"trailing.captures_len.?",
.@"trailing.type_hash.?" = .@"payload.flags.is_reified",
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_names.len" = .@"payload.fields_len",
.@"trailing.field_inits.?" = .@"payload.flags.any_default_inits",
.@"trailing.field_inits.?.len" = .@"payload.fields_len",
.@"trailing.field_aligns.?" = .@"payload.flags.any_aligned_fields",
@ -5356,7 +5348,185 @@ pub const Tag = enum(u8) {
.@"trailing.field_offset.len" = .@"payload.fields_len",
},
},
.type_struct_packed = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = TypeStructPacked,
.trailing = struct {
captures_len: ?u32,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_types: []Index,
field_names: []NullTerminatedString,
},
.config = .{
.@"trailing.captures_len.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?.len" = .@"trailing.captures_len.?",
.@"trailing.type_hash.?" = .@"payload.is_flags.is_reified",
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_names.len" = .@"payload.fields_len",
},
},
.type_struct_packed_inits = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = TypeStructPacked,
.trailing = struct {
captures_len: ?u32,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_types: []Index,
field_names: []NullTerminatedString,
field_inits: []Index,
},
.config = .{
.@"trailing.captures_len.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?.len" = .@"trailing.captures_len.?",
.@"trailing.type_hash.?" = .@"payload.is_flags.is_reified",
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_names.len" = .@"payload.fields_len",
.@"trailing.field_inits.len" = .@"payload.fields_len",
},
},
.type_tuple = .{
.summary = .@"struct {...}",
.payload = TypeTuple,
.trailing = struct {
field_types: []Index,
field_values: []Index,
},
.config = .{
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_values.len" = .@"payload.fields_len",
},
},
.type_union = .{
.summary = .@"{.payload.name%summary#\"#\"}",
.payload = TypeUnion,
.trailing = struct {
captures_len: ?u32,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_types: []Index,
field_aligns: []Alignment,
},
.config = .{
.@"trailing.captures_len.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?.len" = .@"trailing.captures_len.?",
.@"trailing.type_hash.?" = .@"payload.is_flags.is_reified",
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_aligns.len" = .@"payload.fields_len",
},
},
.type_function = .{
.summary = .@"fn (...) ... {.payload.return_type%summary}",
.payload = TypeFunction,
.trailing = struct {
param_comptime_bits: ?[]u32,
param_noalias_bits: ?[]u32,
param_type: []Index,
},
.config = .{
.@"trailing.param_comptime_bits.?" = .@"payload.flags.has_comptime_bits",
.@"trailing.param_comptime_bits.?.len" = .@"(payload.params_len + 31) / 32",
.@"trailing.param_noalias_bits.?" = .@"payload.flags.has_noalias_bits",
.@"trailing.param_noalias_bits.?.len" = .@"(payload.params_len + 31) / 32",
.@"trailing.param_type.len" = .@"payload.params_len",
},
},
.undef = .{ .summary = .@"@as({.data%summary}, undefined)", .data = Index },
.simple_value = .{ .summary = .@"{.index%value#.}", .index = SimpleValue },
.ptr_nav = .{ .payload = PtrNav },
.ptr_comptime_alloc = .{ .payload = PtrComptimeAlloc },
.ptr_uav = .{ .payload = PtrUav },
.ptr_uav_aligned = .{ .payload = PtrUavAligned },
.ptr_comptime_field = .{ .payload = PtrComptimeField },
.ptr_int = .{ .payload = PtrInt },
.ptr_eu_payload = .{ .payload = PtrBase },
.ptr_opt_payload = .{ .payload = PtrBase },
.ptr_elem = .{ .payload = PtrBaseIndex },
.ptr_field = .{ .payload = PtrBaseIndex },
.ptr_slice = .{ .payload = PtrSlice },
.opt_payload = .{ .summary = .@"@as({.payload.ty%summary}, {.payload.val%summary})", .payload = TypeValue },
.opt_null = .{ .summary = .@"@as({.data%summary}, null)", .data = Index },
.int_u8 = .{ .summary = .@"@as(u8, {.data%value})", .data = u8 },
.int_u16 = .{ .summary = .@"@as(u16, {.data%value})", .data = u16 },
.int_u32 = .{ .summary = .@"@as(u32, {.data%value})", .data = u32 },
.int_i32 = .{ .summary = .@"@as(i32, {.data%value})", .data = i32 },
.int_usize = .{ .summary = .@"@as(usize, {.data%value})", .data = u32 },
.int_comptime_int_u32 = .{ .summary = .@"{.data%value}", .data = u32 },
.int_comptime_int_i32 = .{ .summary = .@"{.data%value}", .data = i32 },
.int_small = .{ .summary = .@"@as({.payload.ty%summary}, {.payload.value%value})", .payload = IntSmall },
.int_positive = .{},
.int_negative = .{},
.int_lazy_align = .{ .summary = .@"@as({.payload.ty%summary}, @alignOf({.payload.lazy_ty%summary}))", .payload = IntLazy },
.int_lazy_size = .{ .summary = .@"@as({.payload.ty%summary}, @sizeOf({.payload.lazy_ty%summary}))", .payload = IntLazy },
.error_set_error = .{ .summary = .@"@as({.payload.ty%summary}, error.@{.payload.name%summary})", .payload = Error },
.error_union_error = .{ .summary = .@"@as({.payload.ty%summary}, error.@{.payload.name%summary})", .payload = Error },
.error_union_payload = .{ .summary = .@"@as({.payload.ty%summary}, {.payload.val%summary})", .payload = TypeValue },
.enum_literal = .{ .summary = .@".@{.data%summary}", .data = NullTerminatedString },
.enum_tag = .{ .summary = .@"@as({.payload.ty%summary}, @enumFromInt({.payload.int%summary}))", .payload = EnumTag },
.float_f16 = .{ .summary = .@"@as(f16, {.data%value})", .data = f16 },
.float_f32 = .{ .summary = .@"@as(f32, {.data%value})", .data = f32 },
.float_f64 = .{ .summary = .@"@as(f64, {.payload%value})", .payload = f64 },
.float_f80 = .{ .summary = .@"@as(f80, {.payload%value})", .payload = f80 },
.float_f128 = .{ .summary = .@"@as(f128, {.payload%value})", .payload = f128 },
.float_c_longdouble_f80 = .{ .summary = .@"@as(c_longdouble, {.payload%value})", .payload = f80 },
.float_c_longdouble_f128 = .{ .summary = .@"@as(c_longdouble, {.payload%value})", .payload = f128 },
.float_comptime_float = .{ .summary = .@"{.payload%value}", .payload = f128 },
.variable = .{ .payload = Variable },
.@"extern" = .{ .payload = Extern },
.func_decl = .{
.payload = FuncDecl,
.trailing = struct {
inferred_error_set: ?Index,
},
.config = .{
.@"trailing.inferred_error_set.?" = .@"payload.analysis.inferred_error_set",
},
},
.func_instance = .{
.payload = FuncInstance,
.trailing = struct {
inferred_error_set: ?Index,
param_values: []Index,
},
.config = .{
.@"trailing.inferred_error_set.?" = .@"payload.analysis.inferred_error_set",
.@"trailing.param_values.len" = .@"payload.ty.payload.params_len",
},
},
.func_coerced = .{ .payload = FuncCoerced },
.only_possible_value = .{ .summary = .@"@as({.data%summary}, undefined)", .data = Index },
.union_value = .{ .summary = .@"@as({.payload.ty%summary}, {})", .payload = Union },
.bytes = .{ .summary = .@"@as({.payload.ty%summary}, {.payload.bytes%summary}.*)", .payload = Bytes },
.aggregate = .{
.summary = .@"@as({.payload.ty%summary}, .{...})",
.payload = Aggregate,
.trailing = struct {
elements: []Index,
},
.config = .{
.@"trailing.elements.len" = .@"payload.ty.payload.fields_len",
},
},
.repeated = .{ .summary = .@"@as({.payload.ty%summary}, @splat({.payload.elem_val%summary}))", .payload = Repeated },
.memoized_call = .{
.payload = MemoizedCall,
.trailing = struct {
arg_values: []Index,
},
.config = .{
.@"trailing.arg_values.len" = .@"payload.args_len",
},
},
};
fn Payload(comptime tag: Tag) type {
return @field(encodings, @tagName(tag)).payload;
}
pub const Variable = struct {
ty: Index,
@ -6271,6 +6441,8 @@ pub fn init(ip: *InternPool, gpa: Allocator, available_threads: usize) !void {
}
pub fn deinit(ip: *InternPool, gpa: Allocator) void {
if (!builtin.strip_debug_info) std.debug.assert(debug_state.intern_pool == null);
ip.file_deps.deinit(gpa);
ip.src_hash_deps.deinit(gpa);
ip.nav_val_deps.deinit(gpa);
@ -6311,6 +6483,28 @@ pub fn deinit(ip: *InternPool, gpa: Allocator) void {
ip.* = undefined;
}
pub fn activate(ip: *const InternPool) void {
if (builtin.strip_debug_info) return;
_ = Index.Unwrapped.debug_state;
_ = String.debug_state;
_ = OptionalString.debug_state;
_ = NullTerminatedString.debug_state;
_ = OptionalNullTerminatedString.debug_state;
std.debug.assert(debug_state.intern_pool == null);
debug_state.intern_pool = ip;
}
pub fn deactivate(ip: *const InternPool) void {
if (builtin.strip_debug_info) return;
std.debug.assert(debug_state.intern_pool == ip);
debug_state.intern_pool = null;
}
/// For debugger access only.
const debug_state = struct {
threadlocal var intern_pool: ?*const InternPool = null;
};
pub fn indexToKey(ip: *const InternPool, index: Index) Key {
assert(index != .none);
const unwrapped_index = index.unwrap(ip);

View File

@ -891,7 +891,7 @@ pub const ResolveStratLazy = enum {
};
/// The chosen strategy can be easily optimized away in release builds.
/// However, in debug builds, it helps to avoid acceidentally resolving types in backends.
/// However, in debug builds, it helps to avoid accidentally resolving types in backends.
pub const ResolveStrat = enum {
/// Assert that all necessary resolution is completed.
/// Backends should typically use this, since they must not perform type resolution.

View File

@ -2169,90 +2169,92 @@ pub fn init(zcu: *Zcu, thread_count: usize) !void {
}
pub fn deinit(zcu: *Zcu) void {
const pt: Zcu.PerThread = .{ .tid = .main, .zcu = zcu };
const gpa = zcu.gpa;
{
const pt: Zcu.PerThread = .activate(zcu, .main);
defer pt.deactivate();
if (zcu.llvm_object) |llvm_object| llvm_object.deinit();
if (zcu.llvm_object) |llvm_object| llvm_object.deinit();
for (zcu.import_table.keys()) |key| {
gpa.free(key);
for (zcu.import_table.keys()) |key| {
gpa.free(key);
}
for (zcu.import_table.values()) |file_index| {
pt.destroyFile(file_index);
}
zcu.import_table.deinit(gpa);
for (zcu.embed_table.keys(), zcu.embed_table.values()) |path, embed_file| {
gpa.free(path);
gpa.destroy(embed_file);
}
zcu.embed_table.deinit(gpa);
zcu.compile_log_text.deinit(gpa);
zcu.local_zir_cache.handle.close();
zcu.global_zir_cache.handle.close();
for (zcu.failed_analysis.values()) |value| {
value.destroy(gpa);
}
for (zcu.failed_codegen.values()) |value| {
value.destroy(gpa);
}
zcu.analysis_in_progress.deinit(gpa);
zcu.failed_analysis.deinit(gpa);
zcu.transitive_failed_analysis.deinit(gpa);
zcu.failed_codegen.deinit(gpa);
for (zcu.failed_files.values()) |value| {
if (value) |msg| msg.destroy(gpa);
}
zcu.failed_files.deinit(gpa);
for (zcu.failed_embed_files.values()) |msg| {
msg.destroy(gpa);
}
zcu.failed_embed_files.deinit(gpa);
for (zcu.failed_exports.values()) |value| {
value.destroy(gpa);
}
zcu.failed_exports.deinit(gpa);
for (zcu.cimport_errors.values()) |*errs| {
errs.deinit(gpa);
}
zcu.cimport_errors.deinit(gpa);
zcu.compile_log_sources.deinit(gpa);
zcu.all_exports.deinit(gpa);
zcu.free_exports.deinit(gpa);
zcu.single_exports.deinit(gpa);
zcu.multi_exports.deinit(gpa);
zcu.potentially_outdated.deinit(gpa);
zcu.outdated.deinit(gpa);
zcu.outdated_ready.deinit(gpa);
zcu.retryable_failures.deinit(gpa);
zcu.test_functions.deinit(gpa);
for (zcu.global_assembly.values()) |s| {
gpa.free(s);
}
zcu.global_assembly.deinit(gpa);
zcu.reference_table.deinit(gpa);
zcu.all_references.deinit(gpa);
zcu.free_references.deinit(gpa);
zcu.type_reference_table.deinit(gpa);
zcu.all_type_references.deinit(gpa);
zcu.free_type_references.deinit(gpa);
if (zcu.resolved_references) |*r| r.deinit(gpa);
}
for (zcu.import_table.values()) |file_index| {
pt.destroyFile(file_index);
}
zcu.import_table.deinit(gpa);
for (zcu.embed_table.keys(), zcu.embed_table.values()) |path, embed_file| {
gpa.free(path);
gpa.destroy(embed_file);
}
zcu.embed_table.deinit(gpa);
zcu.compile_log_text.deinit(gpa);
zcu.local_zir_cache.handle.close();
zcu.global_zir_cache.handle.close();
for (zcu.failed_analysis.values()) |value| {
value.destroy(gpa);
}
for (zcu.failed_codegen.values()) |value| {
value.destroy(gpa);
}
zcu.analysis_in_progress.deinit(gpa);
zcu.failed_analysis.deinit(gpa);
zcu.transitive_failed_analysis.deinit(gpa);
zcu.failed_codegen.deinit(gpa);
for (zcu.failed_files.values()) |value| {
if (value) |msg| msg.destroy(gpa);
}
zcu.failed_files.deinit(gpa);
for (zcu.failed_embed_files.values()) |msg| {
msg.destroy(gpa);
}
zcu.failed_embed_files.deinit(gpa);
for (zcu.failed_exports.values()) |value| {
value.destroy(gpa);
}
zcu.failed_exports.deinit(gpa);
for (zcu.cimport_errors.values()) |*errs| {
errs.deinit(gpa);
}
zcu.cimport_errors.deinit(gpa);
zcu.compile_log_sources.deinit(gpa);
zcu.all_exports.deinit(gpa);
zcu.free_exports.deinit(gpa);
zcu.single_exports.deinit(gpa);
zcu.multi_exports.deinit(gpa);
zcu.potentially_outdated.deinit(gpa);
zcu.outdated.deinit(gpa);
zcu.outdated_ready.deinit(gpa);
zcu.retryable_failures.deinit(gpa);
zcu.test_functions.deinit(gpa);
for (zcu.global_assembly.values()) |s| {
gpa.free(s);
}
zcu.global_assembly.deinit(gpa);
zcu.reference_table.deinit(gpa);
zcu.all_references.deinit(gpa);
zcu.free_references.deinit(gpa);
zcu.type_reference_table.deinit(gpa);
zcu.all_type_references.deinit(gpa);
zcu.free_type_references.deinit(gpa);
if (zcu.resolved_references) |*r| r.deinit(gpa);
zcu.intern_pool.deinit(gpa);
}

View File

@ -35,6 +35,15 @@ tid: Id,
pub const IdBacking = u7;
pub const Id = if (InternPool.single_threaded) enum { main } else enum(IdBacking) { main, _ };
pub fn activate(zcu: *Zcu, tid: Id) Zcu.PerThread {
zcu.intern_pool.activate();
return .{ .zcu = zcu, .tid = tid };
}
pub fn deactivate(pt: Zcu.PerThread) void {
pt.zcu.intern_pool.deactivate();
}
fn deinitFile(pt: Zcu.PerThread, file_index: Zcu.File.Index) void {
const zcu = pt.zcu;
const gpa = zcu.gpa;

View File

@ -1537,20 +1537,23 @@ pub fn doTask(comp: *Compilation, tid: usize, task: Task) void {
};
},
.codegen_nav => |nav_index| {
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.linkerUpdateNav(nav_index) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailure(),
};
},
.codegen_func => |func| {
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
// This call takes ownership of `func.air`.
pt.linkerUpdateFunc(func.func, func.air) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailure(),
};
},
.codegen_type => |ty| {
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.linkerUpdateContainerType(ty) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailure(),
};

View File

@ -419,7 +419,8 @@ pub fn flushModule(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node:
const gpa = comp.gpa;
const zcu = self.base.comp.zcu.?;
const ip = &zcu.intern_pool;
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = tid };
const pt: Zcu.PerThread = .activate(zcu, tid);
defer pt.deactivate();
{
var i: usize = 0;

View File

@ -2218,10 +2218,11 @@ pub fn flushModule(coff: *Coff, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
const sub_prog_node = prog_node.start("COFF Flush", 0);
defer sub_prog_node.end();
const pt: Zcu.PerThread = .{
.zcu = comp.zcu orelse return error.LinkingWithoutZigSourceUnimplemented,
.tid = tid,
};
const pt: Zcu.PerThread = .activate(
comp.zcu orelse return error.LinkingWithoutZigSourceUnimplemented,
tid,
);
defer pt.deactivate();
if (coff.lazy_syms.getPtr(.anyerror_type)) |metadata| {
// Most lazy symbols can be updated on first use, but

View File

@ -267,7 +267,8 @@ pub fn deinit(self: *ZigObject, allocator: Allocator) void {
pub fn flush(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
// Handle any lazy symbols that were emitted by incremental compilation.
if (self.lazy_syms.getPtr(.anyerror_type)) |metadata| {
const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(elf_file.base.comp.zcu.?, tid);
defer pt.deactivate();
// Most lazy symbols can be updated on first use, but
// anyerror needs to wait for everything to be flushed.
@ -296,7 +297,8 @@ pub fn flush(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
}
if (build_options.enable_logging) {
const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(elf_file.base.comp.zcu.?, tid);
defer pt.deactivate();
for (self.navs.keys(), self.navs.values()) |nav_index, meta| {
checkNavAllocated(pt, nav_index, meta);
}
@ -306,7 +308,8 @@ pub fn flush(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
}
if (self.dwarf) |*dwarf| {
const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(elf_file.base.comp.zcu.?, tid);
defer pt.deactivate();
try dwarf.flushModule(pt);
const gpa = elf_file.base.comp.gpa;

View File

@ -549,7 +549,8 @@ pub fn getInputSection(self: ZigObject, atom: Atom, macho_file: *MachO) macho.se
pub fn flushModule(self: *ZigObject, macho_file: *MachO, tid: Zcu.PerThread.Id) !void {
// Handle any lazy symbols that were emitted by incremental compilation.
if (self.lazy_syms.getPtr(.anyerror_type)) |metadata| {
const pt: Zcu.PerThread = .{ .zcu = macho_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(macho_file.base.comp.zcu.?, tid);
defer pt.deactivate();
// Most lazy symbols can be updated on first use, but
// anyerror needs to wait for everything to be flushed.
@ -578,7 +579,8 @@ pub fn flushModule(self: *ZigObject, macho_file: *MachO, tid: Zcu.PerThread.Id)
}
if (self.dwarf) |*dwarf| {
const pt: Zcu.PerThread = .{ .zcu = macho_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(macho_file.base.comp.zcu.?, tid);
defer pt.deactivate();
try dwarf.flushModule(pt);
self.debug_abbrev_dirty = false;

View File

@ -604,10 +604,11 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
defer assert(self.hdr.entry != 0x0);
const pt: Zcu.PerThread = .{
.zcu = self.base.comp.zcu orelse return error.LinkingWithoutZigSourceUnimplemented,
.tid = tid,
};
const pt: Zcu.PerThread = .activate(
self.base.comp.zcu orelse return error.LinkingWithoutZigSourceUnimplemented,
tid,
);
defer pt.deactivate();
// finish up the lazy syms
if (self.lazy_syms.getPtr(.none)) |metadata| {

View File

@ -589,7 +589,8 @@ fn populateErrorNameTable(zig_object: *ZigObject, wasm: *Wasm, tid: Zcu.PerThrea
// Addend for each relocation to the table
var addend: u32 = 0;
const pt: Zcu.PerThread = .{ .zcu = wasm.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(wasm.base.comp.zcu.?, tid);
defer pt.deactivate();
const slice_ty = Type.slice_const_u8_sentinel_0;
const atom = wasm.getAtomPtr(atom_index);
{

View File

@ -13,21 +13,35 @@ page_size = 1 << 12
def log2_int(i): return i.bit_length() - 1
def create_struct(name, struct_type, **inits):
struct_bytes = bytearray(struct_type.size)
struct_data = lldb.SBData()
def create_struct(parent, name, struct_type, inits):
struct_bytes, struct_data = bytearray(struct_type.size), lldb.SBData()
for field in struct_type.fields:
field_size = field.type.size
field_bytes = inits[field.name].data.uint8[:field_size]
field_init = inits[field.name]
field_init_type = type(field_init)
if field_init_type == bool:
field_bytes = bytes([field_init])
elif field_init_type == int:
match struct_data.byte_order:
case lldb.eByteOrderLittle:
byte_order = 'little'
case lldb.eByteOrderBig:
byte_order = 'big'
field_bytes = field_init.to_bytes(field_size, byte_order, signed=field.type.GetTypeFlags() & lldb.eTypeIsSigned != 0)
elif field_init_type == lldb.SBValue:
field_bytes = field_init.data.uint8
else: return
match struct_data.byte_order:
case lldb.eByteOrderLittle:
field_bytes = field_bytes[:field_size]
field_start = field.byte_offset
struct_bytes[field_start:field_start + len(field_bytes)] = field_bytes
case lldb.eByteOrderBig:
field_bytes = field_bytes[-field_size:]
field_end = field.byte_offset + field_size
struct_bytes[field_end - len(field_bytes):field_end] = field_bytes
struct_data.SetData(lldb.SBError(), struct_bytes, struct_data.byte_order, struct_data.GetAddressByteSize())
return next(iter(inits.values())).CreateValueFromData(name, struct_data, struct_type)
return parent.CreateValueFromData(name, struct_data, struct_type)
# Define Zig Language
@ -292,6 +306,8 @@ class std_MultiArrayList_Slice_SynthProvider:
return self.ptrs.CreateValueFromData('[%d]' % index, data, self.entry_type)
except: return None
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
class std_HashMapUnmanaged_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
@ -702,7 +718,7 @@ class root_InternPool_Local_List_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
capacity = self.value.EvaluateExpression('@as(*@This().Header, @alignCast(@ptrCast(@this().bytes - @This().bytes_offset))).capacity')
self.view = create_struct('view', self.value.type.FindDirectNestedType('View'), bytes=self.value.GetChildMemberWithName('bytes'), len=capacity, capacity=capacity).GetNonSyntheticValue()
self.view = create_struct(self.value, '.view', self.value.type.FindDirectNestedType('View'), { 'bytes': self.value.GetChildMemberWithName('bytes'), 'len': capacity, 'capacity': capacity }).GetNonSyntheticValue()
def has_children(self): return True
def num_children(self): return 1
def get_child_index(self, name):
@ -712,6 +728,160 @@ class root_InternPool_Local_List_SynthProvider:
try: return (self.view,)[index]
except: pass
expr_path_re = re.compile(r'\{([^}]+)%([^%#}]+)(?:#([^%#}]+))?\}')
def root_InternPool_Index_SummaryProvider(value, _=None):
unwrapped = value.GetChildMemberWithName('unwrapped')
tag = unwrapped.GetChildMemberWithName('tag')
tag_value = tag.value
summary = tag.CreateValueFromType(tag.type).GetChildMemberWithName('encodings').GetChildMemberWithName(tag_value.removeprefix('.')).GetChildMemberWithName('summary')
if not summary: return tag_value
return re.sub(
expr_path_re,
lambda matchobj: getattr(unwrapped.GetValueForExpressionPath(matchobj[1]), matchobj[2]).strip(matchobj[3] or ''),
summary.summary.removeprefix('.').removeprefix('@"').removesuffix('"').replace(r'\"', '"'),
)
class root_InternPool_Index_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
self.unwrapped = None
wrapped = self.value.unsigned
if wrapped == (1 << 32) - 1: return
unwrapped_type = self.value.type.FindDirectNestedType('Unwrapped')
ip = self.value.CreateValueFromType(unwrapped_type).GetChildMemberWithName('debug_state').GetChildMemberWithName('intern_pool').GetNonSyntheticValue().GetChildMemberWithName('?')
tid_width, tid_shift_30 = ip.GetChildMemberWithName('tid_width').unsigned, ip.GetChildMemberWithName('tid_shift_30').unsigned
self.unwrapped = create_struct(self.value, '.unwrapped', unwrapped_type, { 'tid': wrapped >> tid_shift_30 & (1 << tid_width) - 1, 'index': wrapped & (1 << tid_shift_30) - 1 })
def has_children(self): return True
def num_children(self): return 0
def get_child_index(self, name):
try: return ('unwrapped',).index(name)
except: pass
def get_child_at_index(self, index):
try: return (self.unwrapped,)[index]
except: pass
class root_InternPool_Index_Unwrapped_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
self.tag, self.index, self.data, self.payload, self.trailing = None, None, None, None, None
index = self.value.GetChildMemberWithName('index')
ip = self.value.CreateValueFromType(self.value.type).GetChildMemberWithName('debug_state').GetChildMemberWithName('intern_pool').GetNonSyntheticValue().GetChildMemberWithName('?')
shared = ip.GetChildMemberWithName('locals').GetSyntheticValue().child[self.value.GetChildMemberWithName('tid').unsigned].GetChildMemberWithName('shared')
item = shared.GetChildMemberWithName('items').GetChildMemberWithName('view').child[index.unsigned]
self.tag, item_data = item.GetChildMemberWithName('tag'), item.GetChildMemberWithName('data')
encoding = self.tag.CreateValueFromType(self.tag.type).GetChildMemberWithName('encodings').GetChildMemberWithName(self.tag.value.removeprefix('.'))
encoding_index, encoding_data, encoding_payload, encoding_trailing, encoding_config = encoding.GetChildMemberWithName('index'), encoding.GetChildMemberWithName('data'), encoding.GetChildMemberWithName('payload'), encoding.GetChildMemberWithName('trailing'), encoding.GetChildMemberWithName('config')
if encoding_index:
index_type = encoding_index.GetValueAsType()
index_bytes, index_data = index.data.uint8, lldb.SBData()
match index_data.byte_order:
case lldb.eByteOrderLittle:
index_bytes = bytes(index_bytes[:index_type.size])
case lldb.eByteOrderBig:
index_bytes = bytes(index_bytes[-index_type.size:])
index_data.SetData(lldb.SBError(), index_bytes, index_data.byte_order, index_data.GetAddressByteSize())
self.index = self.value.CreateValueFromData('.index', index_data, index_type)
elif encoding_data:
data_type = encoding_data.GetValueAsType()
data_bytes, data_data = item_data.data.uint8, lldb.SBData()
match data_data.byte_order:
case lldb.eByteOrderLittle:
data_bytes = bytes(data_bytes[:data_type.size])
case lldb.eByteOrderBig:
data_bytes = bytes(data_bytes[-data_type.size:])
data_data.SetData(lldb.SBError(), data_bytes, data_data.byte_order, data_data.GetAddressByteSize())
self.data = self.value.CreateValueFromData('.data', data_data, data_type)
elif encoding_payload:
extra = shared.GetChildMemberWithName('extra').GetChildMemberWithName('view').GetChildMemberWithName('0')
extra_index = item_data.unsigned
payload_type = encoding_payload.GetValueAsType()
payload_fields = dict()
for payload_field in payload_type.fields:
payload_fields[payload_field.name] = extra.child[extra_index]
extra_index += 1
self.payload = create_struct(self.value, '.payload', payload_type, payload_fields)
if encoding_trailing and encoding_config:
trailing_type = encoding_trailing.GetValueAsType()
trailing_bytes, trailing_data = bytearray(trailing_type.size), lldb.SBData()
def eval_config(config_name):
expr = encoding_config.GetChildMemberWithName(config_name).summary.removeprefix('.').removeprefix('@"').removesuffix('"').replace(r'\"', '"')
if 'payload.' in expr:
return self.payload.EvaluateExpression(expr.replace('payload.', '@this().'))
elif expr.startswith('trailing.'):
field_type, field_byte_offset = trailing_type, 0
expr_parts = expr.split('.')
for expr_part in expr_parts[1:]:
field = next(filter(lambda field: field.name == expr_part, field_type.fields))
field_type = field.type
field_byte_offset += field.byte_offset
field_data = lldb.SBData()
field_bytes = trailing_bytes[field_byte_offset:field_byte_offset + field_type.size]
field_data.SetData(lldb.SBError(), field_bytes, field_data.byte_order, field_data.GetAddressByteSize())
return self.value.CreateValueFromData('.%s' % expr_parts[-1], field_data, field_type)
else:
return self.value.frame.EvaluateExpression(expr)
for trailing_field in trailing_type.fields:
trailing_field_type = trailing_field.type
trailing_field_name = 'trailing.%s' % trailing_field.name
trailing_field_byte_offset = trailing_field.byte_offset
while True:
match [trailing_field_type_field.name for trailing_field_type_field in trailing_field_type.fields]:
case ['has_value', '?']:
has_value_field, child_field = trailing_field_type.fields
trailing_field_name = '%s.%s' % (trailing_field_name, child_field.name)
match eval_config(trailing_field_name).value:
case 'true':
if has_value_field.type.name == 'bool':
trailing_bytes[trailing_field_byte_offset + has_value_field.byte_offset] = True
trailing_field_type = child_field.type
trailing_field_byte_offset += child_field.byte_offset
case 'false':
break
case ['ptr', 'len']:
ptr_field, len_field = trailing_field_type.fields
ptr_field_byte_offset, len_field_byte_offset = trailing_field_byte_offset + ptr_field.byte_offset, trailing_field_byte_offset + len_field.byte_offset
trailing_bytes[ptr_field_byte_offset:ptr_field_byte_offset + ptr_field.type.size] = extra.child[extra_index].address_of.data.uint8
len_field_value = eval_config('%s.len' % trailing_field_name)
len_field_size = len_field.type.size
match trailing_data.byte_order:
case lldb.eByteOrderLittle:
len_field_bytes = len_field_value.data.uint8[:len_field_size]
trailing_bytes[len_field_byte_offset:len_field_byte_offset + len(len_field_bytes)] = len_field_bytes
case lldb.eByteOrderBig:
len_field_bytes = len_field_value.data.uint8[-len_field_size:]
len_field_end = len_field_byte_offset + len_field_size
trailing_bytes[len_field_end - len(len_field_bytes):len_field_end] = len_field_bytes
extra_index += (ptr_field.type.GetPointeeType().size * len_field_value.unsigned + 3) // 4
break
case _:
for offset in range(0, trailing_field_type.size, 4):
trailing_bytes[trailing_field_byte_offset + offset:trailing_field_byte_offset + offset + 4] = extra.child[extra_index].data.uint8
extra_index += 1
break
trailing_data.SetData(lldb.SBError(), trailing_bytes, trailing_data.byte_order, trailing_data.GetAddressByteSize())
self.trailing = self.value.CreateValueFromData('.trailing', trailing_data, trailing_type)
def has_children(self): return True
def num_children(self): return 1 + ((self.index or self.data or self.payload) is not None) + (self.trailing is not None)
def get_child_index(self, name):
try: return ('tag', 'index' if self.index is not None else 'data' if self.data is not None else 'payload', 'trailing').index(name)
except: pass
def get_child_at_index(self, index):
try: return (self.tag, self.index or self.data or self.payload, self.trailing)[index]
except: pass
def root_InternPool_String_SummaryProvider(value, _=None):
ip = value.CreateValueFromType(value.type).GetChildMemberWithName('debug_state').GetChildMemberWithName('intern_pool').GetNonSyntheticValue().GetChildMemberWithName('?')
tid_shift_32 = ip.GetChildMemberWithName('tid_shift_32').unsigned
wrapped = value.unsigned
locals_value = ip.GetChildMemberWithName('locals').GetSyntheticValue()
local_value = locals_value.child[wrapped >> tid_shift_32]
if local_value is None:
wrapped = 0
local_value = locals_value.child[0]
string = local_value.GetChildMemberWithName('shared').GetChildMemberWithName('strings').GetChildMemberWithName('view').GetChildMemberWithName('0').child[wrapped & (1 << tid_shift_32) - 1].address_of
string.format = lldb.eFormatCString
return string.value
# Initialize
def add(debugger, *, category, regex=False, type, identifier=None, synth=False, inline_children=False, expand=False, summary=False):
@ -719,8 +889,6 @@ def add(debugger, *, category, regex=False, type, identifier=None, synth=False,
if summary: debugger.HandleCommand('type summary add --category %s%s%s "%s"' % (category, ' --inline-children' if inline_children else ''.join((' --expand' if expand else '', ' --python-function %s_SummaryProvider' % prefix if summary == True else ' --summary-string "%s"' % summary)), ' --regex' if regex else '', type))
if synth: debugger.HandleCommand('type synthetic add --category %s%s --python-class %s_SynthProvider "%s"' % (category, ' --regex' if regex else '', prefix, type))
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
def __lldb_init_module(debugger, _=None):
# Initialize Zig Categories
debugger.HandleCommand('type category define --language c99 zig.lang zig.std')
@ -765,4 +933,7 @@ def __lldb_init_module(debugger, _=None):
add(debugger, category='zig.stage2', type='arch.x86_64.CodeGen.MCValue', identifier='zig_TaggedUnion', synth=True, inline_children=True, summary=True)
# Initialize Zig Stage2 Compiler (compiled with the self-hosted backend)
add(debugger, category='zig', regex=True, type='^root\\.InternPool\\.Local\\.List\\(.*\\)$', identifier='root_InternPool_Local_List', synth=True, expand=True, summary='capacity=${var%#}')
add(debugger, category='zig', regex=True, type=r'^root\.InternPool\.Local\.List\(.*\)$', identifier='root_InternPool_Local_List', synth=True, expand=True, summary='capacity=${var%#}')
add(debugger, category='zig', type='root.InternPool.Index', synth=True, summary=True)
add(debugger, category='zig', type='root.InternPool.Index.Unwrapped', synth=True)
add(debugger, category='zig', regex=True, type=r'^root\.InternPool\.(Optional)?(NullTerminated)?String$', identifier='root_InternPool_String', summary=True)