Merge pull request #22280 from jacobly0/stage2-pp

lldb: add more stage2 pretty printers
This commit is contained in:
Andrew Kelley 2024-12-23 15:55:03 -05:00 committed by GitHub
commit af5e731729
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 740 additions and 302 deletions

View file

@ -225,7 +225,6 @@ pub const ZIG_padding = 0x2cce;
pub const ZIG_relative_decl = 0x2cd0;
pub const ZIG_decl_line_relative = 0x2cd1;
pub const ZIG_comptime_value = 0x2cd2;
pub const ZIG_comptime_default_value = 0x2cd3;
pub const ZIG_sentinel = 0x2ce2;
// UPC extension.

View file

@ -2181,7 +2181,8 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void {
}
if (comp.zcu) |zcu| {
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = .main };
const pt: Zcu.PerThread = .activate(zcu, .main);
defer pt.deactivate();
zcu.compile_log_text.shrinkAndFree(gpa, 0);
@ -2251,7 +2252,8 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void {
try comp.performAllTheWork(main_progress_node);
if (comp.zcu) |zcu| {
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = .main };
const pt: Zcu.PerThread = .activate(zcu, .main);
defer pt.deactivate();
if (build_options.enable_debug_extensions and comp.verbose_intern_pool) {
std.debug.print("intern pool stats for '{s}':\n", .{
@ -3609,7 +3611,8 @@ fn performAllTheWorkInner(
}
if (comp.zcu) |zcu| {
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = .main };
const pt: Zcu.PerThread = .activate(zcu, .main);
defer pt.deactivate();
if (comp.incremental) {
const update_zir_refs_node = main_progress_node.start("Update ZIR References", 0);
defer update_zir_refs_node.end();
@ -3683,14 +3686,16 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre
const named_frame = tracy.namedFrame("analyze_func");
defer named_frame.end();
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.ensureFuncBodyAnalyzed(func) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return,
};
},
.analyze_cau => |cau_index| {
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.ensureCauAnalyzed(cau_index) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return,
@ -3719,7 +3724,8 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre
const named_frame = tracy.namedFrame("resolve_type_fully");
defer named_frame.end();
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
Type.fromInterned(ty).resolveFully(pt) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return,
@ -3729,7 +3735,8 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre
const named_frame = tracy.namedFrame("analyze_mod");
defer named_frame.end();
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.semaPkg(mod) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return,
@ -4183,7 +4190,8 @@ fn workerAstGenFile(
const child_prog_node = prog_node.start(file.sub_file_path, 0);
defer child_prog_node.end();
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.astGenFile(file, path_digest) catch |err| switch (err) {
error.AnalysisFail => return,
else => {

View file

@ -470,6 +470,8 @@ pub const Cau = struct {
_ => @enumFromInt(@intFromEnum(opt)),
};
}
const debug_state = InternPool.debug_state;
};
pub fn toOptional(i: Cau.Index) Optional {
return @enumFromInt(@intFromEnum(i));
@ -491,6 +493,8 @@ pub const Cau = struct {
.index = @intFromEnum(cau_index) & ip.getIndexMask(u31),
};
}
const debug_state = InternPool.debug_state;
};
};
@ -568,6 +572,8 @@ pub const Nav = struct {
_ => @enumFromInt(@intFromEnum(opt)),
};
}
const debug_state = InternPool.debug_state;
};
pub fn toOptional(i: Nav.Index) Optional {
return @enumFromInt(@intFromEnum(i));
@ -589,6 +595,8 @@ pub const Nav = struct {
.index = @intFromEnum(nav_index) & ip.getIndexMask(u32),
};
}
const debug_state = InternPool.debug_state;
};
/// The compact in-memory representation of a `Nav`.
@ -1580,6 +1588,8 @@ pub const String = enum(u32) {
const strings = ip.getLocalShared(unwrapped_string.tid).strings.acquire();
return strings.view().items(.@"0")[unwrapped_string.index..];
}
const debug_state = InternPool.debug_state;
};
/// An index into `strings` which might be `none`.
@ -1596,6 +1606,8 @@ pub const OptionalString = enum(u32) {
pub fn toSlice(string: OptionalString, len: u64, ip: *const InternPool) ?[]const u8 {
return (string.unwrap() orelse return null).toSlice(len, ip);
}
const debug_state = InternPool.debug_state;
};
/// An index into `strings`.
@ -1692,6 +1704,8 @@ pub const NullTerminatedString = enum(u32) {
pub fn fmt(string: NullTerminatedString, ip: *const InternPool) std.fmt.Formatter(format) {
return .{ .data = .{ .string = string, .ip = ip } };
}
const debug_state = InternPool.debug_state;
};
/// An index into `strings` which might be `none`.
@ -1708,6 +1722,8 @@ pub const OptionalNullTerminatedString = enum(u32) {
pub fn toSlice(string: OptionalNullTerminatedString, ip: *const InternPool) ?[:0]const u8 {
return (string.unwrap() orelse return null).toSlice(ip);
}
const debug_state = InternPool.debug_state;
};
/// A single value captured in the closure of a namespace type. This is not a plain
@ -4519,6 +4535,8 @@ pub const Index = enum(u32) {
.data_ptr = &slice.items(.data)[unwrapped.index],
};
}
const debug_state = InternPool.debug_state;
};
pub fn unwrap(index: Index, ip: *const InternPool) Unwrapped {
return if (single_threaded) .{
@ -4532,7 +4550,6 @@ pub const Index = enum(u32) {
/// This function is used in the debugger pretty formatters in tools/ to fetch the
/// Tag to encoding mapping to facilitate fancy debug printing for this type.
/// TODO merge this with `Tag.Payload`.
fn dbHelper(self: *Index, tag_to_encoding_map: *struct {
const DataIsIndex = struct { data: Index };
const DataIsExtraIndexOfEnumExplicit = struct {
@ -4689,18 +4706,13 @@ pub const Index = enum(u32) {
}
}
}
comptime {
if (!builtin.strip_debug_info) switch (builtin.zig_backend) {
.stage2_llvm => _ = &dbHelper,
.stage2_x86_64 => {
for (@typeInfo(Tag).@"enum".fields) |tag| {
if (!@hasField(@TypeOf(Tag.encodings), tag.name)) {
if (false) @compileLog("missing: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name);
continue;
}
.stage2_x86_64 => for (@typeInfo(Tag).@"enum".fields) |tag| {
if (!@hasField(@TypeOf(Tag.encodings), tag.name)) @compileLog("missing: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name);
const encoding = @field(Tag.encodings, tag.name);
for (@typeInfo(encoding.trailing).@"struct".fields) |field| {
if (@hasField(@TypeOf(encoding), "trailing")) for (@typeInfo(encoding.trailing).@"struct".fields) |field| {
struct {
fn checkConfig(name: []const u8) void {
if (!@hasField(@TypeOf(encoding.config), name)) @compileError("missing field: " ++ @typeName(Tag) ++ ".encodings." ++ tag.name ++ ".config.@\"" ++ name ++ "\"");
@ -4725,8 +4737,7 @@ pub const Index = enum(u32) {
}
}
}.checkField("trailing." ++ field.name, field.type);
}
}
};
},
else => {},
};
@ -5035,7 +5046,6 @@ pub const Tag = enum(u8) {
/// data is payload index to `EnumExplicit`.
type_enum_nonexhaustive,
/// A type that can be represented with only an enum tag.
/// data is SimpleType enum value.
simple_type,
/// An opaque type.
/// data is index of Tag.TypeOpaque in extra.
@ -5064,7 +5074,6 @@ pub const Tag = enum(u8) {
/// Untyped `undefined` is stored instead via `simple_value`.
undef,
/// A value that can be represented with only an enum tag.
/// data is SimpleValue enum value.
simple_value,
/// A pointer to a `Nav`.
/// data is extra index of `PtrNav`, which contains the type and address.
@ -5244,95 +5253,90 @@ pub const Tag = enum(u8) {
const Union = Key.Union;
const TypePointer = Key.PtrType;
fn Payload(comptime tag: Tag) type {
return switch (tag) {
.removed => unreachable,
.type_int_signed => unreachable,
.type_int_unsigned => unreachable,
.type_array_big => Array,
.type_array_small => Vector,
.type_vector => Vector,
.type_pointer => TypePointer,
.type_slice => unreachable,
.type_optional => unreachable,
.type_anyframe => unreachable,
.type_error_union => ErrorUnionType,
.type_anyerror_union => unreachable,
.type_error_set => ErrorSet,
.type_inferred_error_set => unreachable,
.type_enum_auto => EnumAuto,
.type_enum_explicit => EnumExplicit,
.type_enum_nonexhaustive => EnumExplicit,
.simple_type => unreachable,
.type_opaque => TypeOpaque,
.type_struct => TypeStruct,
.type_struct_packed, .type_struct_packed_inits => TypeStructPacked,
.type_tuple => TypeTuple,
.type_union => TypeUnion,
.type_function => TypeFunction,
.undef => unreachable,
.simple_value => unreachable,
.ptr_nav => PtrNav,
.ptr_comptime_alloc => PtrComptimeAlloc,
.ptr_uav => PtrUav,
.ptr_uav_aligned => PtrUavAligned,
.ptr_comptime_field => PtrComptimeField,
.ptr_int => PtrInt,
.ptr_eu_payload => PtrBase,
.ptr_opt_payload => PtrBase,
.ptr_elem => PtrBaseIndex,
.ptr_field => PtrBaseIndex,
.ptr_slice => PtrSlice,
.opt_payload => TypeValue,
.opt_null => unreachable,
.int_u8 => unreachable,
.int_u16 => unreachable,
.int_u32 => unreachable,
.int_i32 => unreachable,
.int_usize => unreachable,
.int_comptime_int_u32 => unreachable,
.int_comptime_int_i32 => unreachable,
.int_small => IntSmall,
.int_positive => unreachable,
.int_negative => unreachable,
.int_lazy_align => IntLazy,
.int_lazy_size => IntLazy,
.error_set_error => Error,
.error_union_error => Error,
.error_union_payload => TypeValue,
.enum_literal => unreachable,
.enum_tag => EnumTag,
.float_f16 => unreachable,
.float_f32 => unreachable,
.float_f64 => unreachable,
.float_f80 => unreachable,
.float_f128 => unreachable,
.float_c_longdouble_f80 => unreachable,
.float_c_longdouble_f128 => unreachable,
.float_comptime_float => unreachable,
.variable => Variable,
.@"extern" => Extern,
.func_decl => FuncDecl,
.func_instance => FuncInstance,
.func_coerced => FuncCoerced,
.only_possible_value => unreachable,
.union_value => Union,
.bytes => Bytes,
.aggregate => Aggregate,
.repeated => Repeated,
.memoized_call => MemoizedCall,
const enum_explicit_encoding = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = EnumExplicit,
.trailing = struct {
owner_union: Index,
cau: ?Cau.Index,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_names: []NullTerminatedString,
tag_values: []Index,
},
.config = .{
.@"trailing.owner_union.?" = .@"payload.zir_index == .none",
.@"trailing.cau.?" = .@"payload.zir_index != .none",
.@"trailing.captures.?" = .@"payload.captures_len < 0xffffffff",
.@"trailing.captures.?.len" = .@"payload.captures_len",
.@"trailing.type_hash.?" = .@"payload.captures_len == 0xffffffff",
.@"trailing.field_names.len" = .@"payload.fields_len",
.@"trailing.tag_values.len" = .@"payload.fields_len",
},
};
}
const encodings = .{
.removed = .{},
.type_int_signed = .{ .summary = .@"i{.data%value}", .data = u32 },
.type_int_unsigned = .{ .summary = .@"u{.data%value}", .data = u32 },
.type_array_big = .{
.summary = .@"[{.payload.len1%value} << 32 | {.payload.len0%value}:{.payload.sentinel%summary}]{.payload.child%summary}",
.payload = Array,
},
.type_array_small = .{ .summary = .@"[{.payload.len%value}]{.payload.child%summary}", .payload = Vector },
.type_vector = .{ .summary = .@"@Vector({.payload.len%value}, {.payload.child%summary})", .payload = Vector },
.type_pointer = .{ .summary = .@"*... {.payload.child%summary}", .payload = TypePointer },
.type_slice = .{ .summary = .@"[]... {.data.unwrapped.payload.child%summary}", .data = Index },
.type_optional = .{ .summary = .@"?{.data%summary}", .data = Index },
.type_anyframe = .{ .summary = .@"anyframe->{.data%summary}", .data = Index },
.type_error_union = .{
.summary = .@"{.payload.error_set_type%summary}!{.payload.payload_type%summary}",
.payload = ErrorUnionType,
},
.type_anyerror_union = .{ .summary = .@"anyerror!{.data%summary}", .data = Index },
.type_error_set = .{ .summary = .@"error{...}", .payload = ErrorSet },
.type_inferred_error_set = .{
.summary = .@"@typeInfo(@typeInfo(@TypeOf({.data%summary})).@\"fn\".return_type.?).error_union.error_set",
.data = Index,
},
.type_enum_auto = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = EnumAuto,
.trailing = struct {
owner_union: ?Index,
cau: ?Cau.Index,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_names: []NullTerminatedString,
},
.config = .{
.@"trailing.owner_union.?" = .@"payload.zir_index == .none",
.@"trailing.cau.?" = .@"payload.zir_index != .none",
.@"trailing.captures.?" = .@"payload.captures_len < 0xffffffff",
.@"trailing.captures.?.len" = .@"payload.captures_len",
.@"trailing.type_hash.?" = .@"payload.captures_len == 0xffffffff",
.@"trailing.field_names.len" = .@"payload.fields_len",
},
},
.type_enum_explicit = enum_explicit_encoding,
.type_enum_nonexhaustive = enum_explicit_encoding,
.simple_type = .{ .summary = .@"{.index%value#.}", .index = SimpleType },
.type_opaque = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = TypeOpaque,
.trailing = struct { captures: []CaptureValue },
.config = .{ .@"trailing.captures.len" = .@"payload.captures_len" },
},
.type_struct = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = TypeStruct,
.trailing = struct {
captures_len: ?u32,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_types: []Index,
field_names_map: OptionalMapIndex,
field_names: []NullTerminatedString,
field_inits: ?[]Index,
field_aligns: ?[]Alignment,
field_is_comptime_bits: ?[]u32,
@ -5342,9 +5346,10 @@ pub const Tag = enum(u8) {
.config = .{
.@"trailing.captures_len.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?.len" = .@"trailing.captures_len",
.@"trailing.captures.?.len" = .@"trailing.captures_len.?",
.@"trailing.type_hash.?" = .@"payload.flags.is_reified",
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_names.len" = .@"payload.fields_len",
.@"trailing.field_inits.?" = .@"payload.flags.any_default_inits",
.@"trailing.field_inits.?.len" = .@"payload.fields_len",
.@"trailing.field_aligns.?" = .@"payload.flags.any_aligned_fields",
@ -5356,7 +5361,212 @@ pub const Tag = enum(u8) {
.@"trailing.field_offset.len" = .@"payload.fields_len",
},
},
.type_struct_packed = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = TypeStructPacked,
.trailing = struct {
captures_len: ?u32,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_types: []Index,
field_names: []NullTerminatedString,
},
.config = .{
.@"trailing.captures_len.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?.len" = .@"trailing.captures_len.?",
.@"trailing.type_hash.?" = .@"payload.is_flags.is_reified",
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_names.len" = .@"payload.fields_len",
},
},
.type_struct_packed_inits = .{
.summary = .@"{.payload.name%summary#\"}",
.payload = TypeStructPacked,
.trailing = struct {
captures_len: ?u32,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_types: []Index,
field_names: []NullTerminatedString,
field_inits: []Index,
},
.config = .{
.@"trailing.captures_len.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?.len" = .@"trailing.captures_len.?",
.@"trailing.type_hash.?" = .@"payload.is_flags.is_reified",
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_names.len" = .@"payload.fields_len",
.@"trailing.field_inits.len" = .@"payload.fields_len",
},
},
.type_tuple = .{
.summary = .@"struct {...}",
.payload = TypeTuple,
.trailing = struct {
field_types: []Index,
field_values: []Index,
},
.config = .{
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_values.len" = .@"payload.fields_len",
},
},
.type_union = .{
.summary = .@"{.payload.name%summary#\"#\"}",
.payload = TypeUnion,
.trailing = struct {
captures_len: ?u32,
captures: ?[]CaptureValue,
type_hash: ?u64,
field_types: []Index,
field_aligns: []Alignment,
},
.config = .{
.@"trailing.captures_len.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?" = .@"payload.flags.any_captures",
.@"trailing.captures.?.len" = .@"trailing.captures_len.?",
.@"trailing.type_hash.?" = .@"payload.is_flags.is_reified",
.@"trailing.field_types.len" = .@"payload.fields_len",
.@"trailing.field_aligns.len" = .@"payload.fields_len",
},
},
.type_function = .{
.summary = .@"fn (...) ... {.payload.return_type%summary}",
.payload = TypeFunction,
.trailing = struct {
param_comptime_bits: ?[]u32,
param_noalias_bits: ?[]u32,
param_type: []Index,
},
.config = .{
.@"trailing.param_comptime_bits.?" = .@"payload.flags.has_comptime_bits",
.@"trailing.param_comptime_bits.?.len" = .@"(payload.params_len + 31) / 32",
.@"trailing.param_noalias_bits.?" = .@"payload.flags.has_noalias_bits",
.@"trailing.param_noalias_bits.?.len" = .@"(payload.params_len + 31) / 32",
.@"trailing.param_type.len" = .@"payload.params_len",
},
},
.undef = .{ .summary = .@"@as({.data%summary}, undefined)", .data = Index },
.simple_value = .{ .summary = .@"{.index%value#.}", .index = SimpleValue },
.ptr_nav = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(&{.payload.nav.fqn%summary#\"}) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrNav,
},
.ptr_comptime_alloc = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(&comptime_allocs[{.payload.index%summary}]) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrComptimeAlloc,
},
.ptr_uav = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(&{.payload.val%summary}) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrUav,
},
.ptr_uav_aligned = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(@as({.payload.orig_ty%summary}, &{.payload.val%summary})) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrUavAligned,
},
.ptr_comptime_field = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(&{.payload.field_val%summary}) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrComptimeField,
},
.ptr_int = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value}))",
.payload = PtrInt,
},
.ptr_eu_payload = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(&({.payload.base%summary} catch unreachable)) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrBase,
},
.ptr_opt_payload = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(&{.payload.base%summary}.?) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrBase,
},
.ptr_elem = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(&{.payload.base%summary}[{.payload.index%summary}]) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrBaseIndex,
},
.ptr_field = .{
.summary = .@"@as({.payload.ty%summary}, @ptrFromInt(@intFromPtr(&{.payload.base%summary}[{.payload.index%summary}]) + ({.payload.byte_offset_a%value} << 32 | {.payload.byte_offset_b%value})))",
.payload = PtrBaseIndex,
},
.ptr_slice = .{
.summary = .@"{.payload.ptr%summary}[0..{.payload.len%summary}]",
.payload = PtrSlice,
},
.opt_payload = .{ .summary = .@"@as({.payload.ty%summary}, {.payload.val%summary})", .payload = TypeValue },
.opt_null = .{ .summary = .@"@as({.data%summary}, null)", .data = Index },
.int_u8 = .{ .summary = .@"@as(u8, {.data%value})", .data = u8 },
.int_u16 = .{ .summary = .@"@as(u16, {.data%value})", .data = u16 },
.int_u32 = .{ .summary = .@"@as(u32, {.data%value})", .data = u32 },
.int_i32 = .{ .summary = .@"@as(i32, {.data%value})", .data = i32 },
.int_usize = .{ .summary = .@"@as(usize, {.data%value})", .data = u32 },
.int_comptime_int_u32 = .{ .summary = .@"{.data%value}", .data = u32 },
.int_comptime_int_i32 = .{ .summary = .@"{.data%value}", .data = i32 },
.int_small = .{ .summary = .@"@as({.payload.ty%summary}, {.payload.value%value})", .payload = IntSmall },
.int_positive = .{},
.int_negative = .{},
.int_lazy_align = .{ .summary = .@"@as({.payload.ty%summary}, @alignOf({.payload.lazy_ty%summary}))", .payload = IntLazy },
.int_lazy_size = .{ .summary = .@"@as({.payload.ty%summary}, @sizeOf({.payload.lazy_ty%summary}))", .payload = IntLazy },
.error_set_error = .{ .summary = .@"@as({.payload.ty%summary}, error.@{.payload.name%summary})", .payload = Error },
.error_union_error = .{ .summary = .@"@as({.payload.ty%summary}, error.@{.payload.name%summary})", .payload = Error },
.error_union_payload = .{ .summary = .@"@as({.payload.ty%summary}, {.payload.val%summary})", .payload = TypeValue },
.enum_literal = .{ .summary = .@".@{.data%summary}", .data = NullTerminatedString },
.enum_tag = .{ .summary = .@"@as({.payload.ty%summary}, @enumFromInt({.payload.int%summary}))", .payload = EnumTag },
.float_f16 = .{ .summary = .@"@as(f16, {.data%value})", .data = f16 },
.float_f32 = .{ .summary = .@"@as(f32, {.data%value})", .data = f32 },
.float_f64 = .{ .summary = .@"@as(f64, {.payload%value})", .payload = f64 },
.float_f80 = .{ .summary = .@"@as(f80, {.payload%value})", .payload = f80 },
.float_f128 = .{ .summary = .@"@as(f128, {.payload%value})", .payload = f128 },
.float_c_longdouble_f80 = .{ .summary = .@"@as(c_longdouble, {.payload%value})", .payload = f80 },
.float_c_longdouble_f128 = .{ .summary = .@"@as(c_longdouble, {.payload%value})", .payload = f128 },
.float_comptime_float = .{ .summary = .@"{.payload%value}", .payload = f128 },
.variable = .{ .summary = .@"{.payload.owner_nav.fqn%summary#\"}", .payload = Variable },
.@"extern" = .{ .summary = .@"{.payload.owner_nav.fqn%summary#\"}", .payload = Extern },
.func_decl = .{
.summary = .@"{.payload.owner_nav.fqn%summary#\"}",
.payload = FuncDecl,
.trailing = struct { inferred_error_set: ?Index },
.config = .{ .@"trailing.inferred_error_set.?" = .@"payload.analysis.inferred_error_set" },
},
.func_instance = .{
.summary = .@"{.payload.owner_nav.fqn%summary#\"}",
.payload = FuncInstance,
.trailing = struct {
inferred_error_set: ?Index,
param_values: []Index,
},
.config = .{
.@"trailing.inferred_error_set.?" = .@"payload.analysis.inferred_error_set",
.@"trailing.param_values.len" = .@"payload.ty.payload.params_len",
},
},
.func_coerced = .{
.summary = .@"@as(*const {.payload.ty%summary}, @ptrCast(&{.payload.func%summary})).*",
.payload = FuncCoerced,
},
.only_possible_value = .{ .summary = .@"@as({.data%summary}, undefined)", .data = Index },
.union_value = .{ .summary = .@"@as({.payload.ty%summary}, {})", .payload = Union },
.bytes = .{ .summary = .@"@as({.payload.ty%summary}, {.payload.bytes%summary}.*)", .payload = Bytes },
.aggregate = .{
.summary = .@"@as({.payload.ty%summary}, .{...})",
.payload = Aggregate,
.trailing = struct { elements: []Index },
.config = .{ .@"trailing.elements.len" = .@"payload.ty.payload.fields_len" },
},
.repeated = .{ .summary = .@"@as({.payload.ty%summary}, @splat({.payload.elem_val%summary}))", .payload = Repeated },
.memoized_call = .{
.summary = .@"@memoize({.payload.func%summary})",
.payload = MemoizedCall,
.trailing = struct { arg_values: []Index },
.config = .{ .@"trailing.arg_values.len" = .@"payload.args_len" },
},
};
fn Payload(comptime tag: Tag) type {
return @field(encodings, @tagName(tag)).payload;
}
pub const Variable = struct {
ty: Index,
@ -6271,6 +6481,8 @@ pub fn init(ip: *InternPool, gpa: Allocator, available_threads: usize) !void {
}
pub fn deinit(ip: *InternPool, gpa: Allocator) void {
if (!builtin.strip_debug_info) std.debug.assert(debug_state.intern_pool == null);
ip.file_deps.deinit(gpa);
ip.src_hash_deps.deinit(gpa);
ip.nav_val_deps.deinit(gpa);
@ -6311,6 +6523,32 @@ pub fn deinit(ip: *InternPool, gpa: Allocator) void {
ip.* = undefined;
}
pub fn activate(ip: *const InternPool) void {
if (builtin.strip_debug_info) return;
_ = Index.Unwrapped.debug_state;
_ = String.debug_state;
_ = OptionalString.debug_state;
_ = NullTerminatedString.debug_state;
_ = OptionalNullTerminatedString.debug_state;
_ = Cau.Index.debug_state;
_ = Cau.Index.Optional.debug_state;
_ = Nav.Index.debug_state;
_ = Nav.Index.Optional.debug_state;
std.debug.assert(debug_state.intern_pool == null);
debug_state.intern_pool = ip;
}
pub fn deactivate(ip: *const InternPool) void {
if (builtin.strip_debug_info) return;
std.debug.assert(debug_state.intern_pool == ip);
debug_state.intern_pool = null;
}
/// For debugger access only.
const debug_state = struct {
threadlocal var intern_pool: ?*const InternPool = null;
};
pub fn indexToKey(ip: *const InternPool, index: Index) Key {
assert(index != .none);
const unwrapped_index = index.unwrap(ip);

View file

@ -891,7 +891,7 @@ pub const ResolveStratLazy = enum {
};
/// The chosen strategy can be easily optimized away in release builds.
/// However, in debug builds, it helps to avoid acceidentally resolving types in backends.
/// However, in debug builds, it helps to avoid accidentally resolving types in backends.
pub const ResolveStrat = enum {
/// Assert that all necessary resolution is completed.
/// Backends should typically use this, since they must not perform type resolution.

View file

@ -2169,8 +2169,10 @@ pub fn init(zcu: *Zcu, thread_count: usize) !void {
}
pub fn deinit(zcu: *Zcu) void {
const pt: Zcu.PerThread = .{ .tid = .main, .zcu = zcu };
const gpa = zcu.gpa;
{
const pt: Zcu.PerThread = .activate(zcu, .main);
defer pt.deactivate();
if (zcu.llvm_object) |llvm_object| llvm_object.deinit();
@ -2252,7 +2254,7 @@ pub fn deinit(zcu: *Zcu) void {
zcu.free_type_references.deinit(gpa);
if (zcu.resolved_references) |*r| r.deinit(gpa);
}
zcu.intern_pool.deinit(gpa);
}

View file

@ -35,6 +35,15 @@ tid: Id,
pub const IdBacking = u7;
pub const Id = if (InternPool.single_threaded) enum { main } else enum(IdBacking) { main, _ };
pub fn activate(zcu: *Zcu, tid: Id) Zcu.PerThread {
zcu.intern_pool.activate();
return .{ .zcu = zcu, .tid = tid };
}
pub fn deactivate(pt: Zcu.PerThread) void {
pt.zcu.intern_pool.deactivate();
}
fn deinitFile(pt: Zcu.PerThread, file_index: Zcu.File.Index) void {
const zcu = pt.zcu;
const gpa = zcu.gpa;

View file

@ -1537,20 +1537,23 @@ pub fn doTask(comp: *Compilation, tid: usize, task: Task) void {
};
},
.codegen_nav => |nav_index| {
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.linkerUpdateNav(nav_index) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailure(),
};
},
.codegen_func => |func| {
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
// This call takes ownership of `func.air`.
pt.linkerUpdateFunc(func.func, func.air) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailure(),
};
},
.codegen_type => |ty| {
const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = @enumFromInt(tid) };
const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.linkerUpdateContainerType(ty) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailure(),
};

View file

@ -419,7 +419,8 @@ pub fn flushModule(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node:
const gpa = comp.gpa;
const zcu = self.base.comp.zcu.?;
const ip = &zcu.intern_pool;
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = tid };
const pt: Zcu.PerThread = .activate(zcu, tid);
defer pt.deactivate();
{
var i: usize = 0;

View file

@ -2218,10 +2218,11 @@ pub fn flushModule(coff: *Coff, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
const sub_prog_node = prog_node.start("COFF Flush", 0);
defer sub_prog_node.end();
const pt: Zcu.PerThread = .{
.zcu = comp.zcu orelse return error.LinkingWithoutZigSourceUnimplemented,
.tid = tid,
};
const pt: Zcu.PerThread = .activate(
comp.zcu orelse return error.LinkingWithoutZigSourceUnimplemented,
tid,
);
defer pt.deactivate();
if (coff.lazy_syms.getPtr(.anyerror_type)) |metadata| {
// Most lazy symbols can be updated on first use, but

View file

@ -2687,23 +2687,19 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool
},
};
try wip_nav.abbrevCode(if (is_comptime)
if (has_runtime_bits and has_comptime_state)
.struct_field_comptime_runtime_bits_comptime_state
else if (has_comptime_state)
if (has_comptime_state)
.struct_field_comptime_comptime_state
else if (has_runtime_bits)
.struct_field_comptime_runtime_bits
else
.struct_field_comptime
else if (field_init != .none)
if (has_runtime_bits and has_comptime_state)
.struct_field_default_runtime_bits_comptime_state
else if (has_comptime_state)
if (has_comptime_state)
.struct_field_default_comptime_state
else if (has_runtime_bits)
.struct_field_default_runtime_bits
else
.struct_field_default
.struct_field
else
.struct_field);
if (loaded_struct.fieldName(ip, field_index).unwrap()) |field_name| try wip_nav.strp(field_name.toSlice(ip)) else {
@ -2717,8 +2713,10 @@ pub fn updateComptimeNav(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool
try uleb128(diw, loaded_struct.fieldAlign(ip, field_index).toByteUnits() orelse
field_type.abiAlignment(zcu).toByteUnits().?);
}
if (has_runtime_bits) try wip_nav.blockValue(nav_src_loc, .fromInterned(field_init));
if (has_comptime_state) try wip_nav.refValue(.fromInterned(field_init));
if (has_comptime_state)
try wip_nav.refValue(.fromInterned(field_init))
else if (has_runtime_bits)
try wip_nav.blockValue(nav_src_loc, .fromInterned(field_init));
}
try uleb128(diw, @intFromEnum(AbbrevCode.null));
}
@ -3363,9 +3361,7 @@ fn updateLazyType(
field_type.comptimeOnly(zcu) and try field_type.onePossibleValue(pt) == null,
},
};
try wip_nav.abbrevCode(if (has_runtime_bits and has_comptime_state)
.struct_field_comptime_runtime_bits_comptime_state
else if (has_comptime_state)
try wip_nav.abbrevCode(if (has_comptime_state)
.struct_field_comptime_comptime_state
else if (has_runtime_bits)
.struct_field_comptime_runtime_bits
@ -3386,8 +3382,10 @@ fn updateLazyType(
try uleb128(diw, field_type.abiAlignment(zcu).toByteUnits().?);
field_byte_offset += field_type.abiSize(zcu);
}
if (has_runtime_bits) try wip_nav.blockValue(src_loc, .fromInterned(comptime_value));
if (has_comptime_state) try wip_nav.refValue(.fromInterned(comptime_value));
if (has_comptime_state)
try wip_nav.refValue(.fromInterned(comptime_value))
else if (has_runtime_bits)
try wip_nav.blockValue(src_loc, .fromInterned(comptime_value));
}
try uleb128(diw, @intFromEnum(AbbrevCode.null));
},
@ -3956,23 +3954,19 @@ pub fn updateContainerType(dwarf: *Dwarf, pt: Zcu.PerThread, type_index: InternP
},
};
try wip_nav.abbrevCode(if (is_comptime)
if (has_runtime_bits and has_comptime_state)
.struct_field_comptime_runtime_bits_comptime_state
else if (has_comptime_state)
if (has_comptime_state)
.struct_field_comptime_comptime_state
else if (has_runtime_bits)
.struct_field_comptime_runtime_bits
else
.struct_field_comptime
else if (field_init != .none)
if (has_runtime_bits and has_comptime_state)
.struct_field_default_runtime_bits_comptime_state
else if (has_comptime_state)
if (has_comptime_state)
.struct_field_default_comptime_state
else if (has_runtime_bits)
.struct_field_default_runtime_bits
else
.struct_field_default
.struct_field
else
.struct_field);
if (loaded_struct.fieldName(ip, field_index).unwrap()) |field_name| try wip_nav.strp(field_name.toSlice(ip)) else {
@ -3986,8 +3980,10 @@ pub fn updateContainerType(dwarf: *Dwarf, pt: Zcu.PerThread, type_index: InternP
try uleb128(diw, loaded_struct.fieldAlign(ip, field_index).toByteUnits() orelse
field_type.abiAlignment(zcu).toByteUnits().?);
}
if (has_runtime_bits) try wip_nav.blockValue(ty_src_loc, .fromInterned(field_init));
if (has_comptime_state) try wip_nav.refValue(.fromInterned(field_init));
if (has_comptime_state)
try wip_nav.refValue(.fromInterned(field_init))
else if (has_runtime_bits)
try wip_nav.blockValue(ty_src_loc, .fromInterned(field_init));
}
try uleb128(diw, @intFromEnum(AbbrevCode.null));
}
@ -4064,23 +4060,19 @@ pub fn updateContainerType(dwarf: *Dwarf, pt: Zcu.PerThread, type_index: InternP
},
};
try wip_nav.abbrevCode(if (is_comptime)
if (has_runtime_bits and has_comptime_state)
.struct_field_comptime_runtime_bits_comptime_state
else if (has_comptime_state)
if (has_comptime_state)
.struct_field_comptime_comptime_state
else if (has_runtime_bits)
.struct_field_comptime_runtime_bits
else
.struct_field_comptime
else if (field_init != .none)
if (has_runtime_bits and has_comptime_state)
.struct_field_default_runtime_bits_comptime_state
else if (has_comptime_state)
if (has_comptime_state)
.struct_field_default_comptime_state
else if (has_runtime_bits)
.struct_field_default_runtime_bits
else
.struct_field_default
.struct_field
else
.struct_field);
if (loaded_struct.fieldName(ip, field_index).unwrap()) |field_name| try wip_nav.strp(field_name.toSlice(ip)) else {
@ -4094,8 +4086,10 @@ pub fn updateContainerType(dwarf: *Dwarf, pt: Zcu.PerThread, type_index: InternP
try uleb128(diw, loaded_struct.fieldAlign(ip, field_index).toByteUnits() orelse
field_type.abiAlignment(zcu).toByteUnits().?);
}
if (has_runtime_bits) try wip_nav.blockValue(ty_src_loc, .fromInterned(field_init));
if (has_comptime_state) try wip_nav.refValue(.fromInterned(field_init));
if (has_comptime_state)
try wip_nav.refValue(.fromInterned(field_init))
else if (has_runtime_bits)
try wip_nav.blockValue(ty_src_loc, .fromInterned(field_init));
}
try uleb128(diw, @intFromEnum(AbbrevCode.null));
}
@ -4680,14 +4674,11 @@ const AbbrevCode = enum {
big_enum_field,
generated_field,
struct_field,
struct_field_default,
struct_field_default_runtime_bits,
struct_field_default_comptime_state,
struct_field_default_runtime_bits_comptime_state,
struct_field_comptime,
struct_field_comptime_runtime_bits,
struct_field_comptime_comptime_state,
struct_field_comptime_runtime_bits_comptime_state,
packed_struct_field,
untagged_union_field,
tagged_union,
@ -4980,15 +4971,6 @@ const AbbrevCode = enum {
.{ .alignment, .udata },
},
},
.struct_field_default = .{
.tag = .member,
.attrs = &.{
.{ .name, .strp },
.{ .type, .ref_addr },
.{ .data_member_location, .udata },
.{ .alignment, .udata },
},
},
.struct_field_default_runtime_bits = .{
.tag = .member,
.attrs = &.{
@ -5006,18 +4988,7 @@ const AbbrevCode = enum {
.{ .type, .ref_addr },
.{ .data_member_location, .udata },
.{ .alignment, .udata },
.{ .ZIG_comptime_default_value, .ref_addr },
},
},
.struct_field_default_runtime_bits_comptime_state = .{
.tag = .member,
.attrs = &.{
.{ .name, .strp },
.{ .type, .ref_addr },
.{ .data_member_location, .udata },
.{ .alignment, .udata },
.{ .default_value, .block },
.{ .ZIG_comptime_default_value, .ref_addr },
.{ .ZIG_comptime_value, .ref_addr },
},
},
.struct_field_comptime = .{
@ -5046,16 +5017,6 @@ const AbbrevCode = enum {
.{ .ZIG_comptime_value, .ref_addr },
},
},
.struct_field_comptime_runtime_bits_comptime_state = .{
.tag = .member,
.attrs = &.{
.{ .const_expr, .flag_present },
.{ .name, .strp },
.{ .type, .ref_addr },
.{ .const_value, .block },
.{ .ZIG_comptime_value, .ref_addr },
},
},
.packed_struct_field = .{
.tag = .member,
.attrs = &.{

View file

@ -267,7 +267,8 @@ pub fn deinit(self: *ZigObject, allocator: Allocator) void {
pub fn flush(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
// Handle any lazy symbols that were emitted by incremental compilation.
if (self.lazy_syms.getPtr(.anyerror_type)) |metadata| {
const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(elf_file.base.comp.zcu.?, tid);
defer pt.deactivate();
// Most lazy symbols can be updated on first use, but
// anyerror needs to wait for everything to be flushed.
@ -296,7 +297,8 @@ pub fn flush(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
}
if (build_options.enable_logging) {
const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(elf_file.base.comp.zcu.?, tid);
defer pt.deactivate();
for (self.navs.keys(), self.navs.values()) |nav_index, meta| {
checkNavAllocated(pt, nav_index, meta);
}
@ -306,7 +308,8 @@ pub fn flush(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
}
if (self.dwarf) |*dwarf| {
const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(elf_file.base.comp.zcu.?, tid);
defer pt.deactivate();
try dwarf.flushModule(pt);
const gpa = elf_file.base.comp.gpa;

View file

@ -549,7 +549,8 @@ pub fn getInputSection(self: ZigObject, atom: Atom, macho_file: *MachO) macho.se
pub fn flushModule(self: *ZigObject, macho_file: *MachO, tid: Zcu.PerThread.Id) !void {
// Handle any lazy symbols that were emitted by incremental compilation.
if (self.lazy_syms.getPtr(.anyerror_type)) |metadata| {
const pt: Zcu.PerThread = .{ .zcu = macho_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(macho_file.base.comp.zcu.?, tid);
defer pt.deactivate();
// Most lazy symbols can be updated on first use, but
// anyerror needs to wait for everything to be flushed.
@ -578,7 +579,8 @@ pub fn flushModule(self: *ZigObject, macho_file: *MachO, tid: Zcu.PerThread.Id)
}
if (self.dwarf) |*dwarf| {
const pt: Zcu.PerThread = .{ .zcu = macho_file.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(macho_file.base.comp.zcu.?, tid);
defer pt.deactivate();
try dwarf.flushModule(pt);
self.debug_abbrev_dirty = false;

View file

@ -604,10 +604,11 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
defer assert(self.hdr.entry != 0x0);
const pt: Zcu.PerThread = .{
.zcu = self.base.comp.zcu orelse return error.LinkingWithoutZigSourceUnimplemented,
.tid = tid,
};
const pt: Zcu.PerThread = .activate(
self.base.comp.zcu orelse return error.LinkingWithoutZigSourceUnimplemented,
tid,
);
defer pt.deactivate();
// finish up the lazy syms
if (self.lazy_syms.getPtr(.none)) |metadata| {

View file

@ -589,7 +589,8 @@ fn populateErrorNameTable(zig_object: *ZigObject, wasm: *Wasm, tid: Zcu.PerThrea
// Addend for each relocation to the table
var addend: u32 = 0;
const pt: Zcu.PerThread = .{ .zcu = wasm.base.comp.zcu.?, .tid = tid };
const pt: Zcu.PerThread = .activate(wasm.base.comp.zcu.?, tid);
defer pt.deactivate();
const slice_ty = Type.slice_const_u8_sentinel_0;
const atom = wasm.getAtomPtr(atom_index);
{

View file

@ -13,21 +13,32 @@ page_size = 1 << 12
def log2_int(i): return i.bit_length() - 1
def create_struct(name, struct_type, **inits):
struct_bytes = bytearray(struct_type.size)
struct_data = lldb.SBData()
def create_struct(parent, name, struct_type, inits):
struct_bytes, struct_data = bytearray(struct_type.size), lldb.SBData()
for field in struct_type.fields:
field_size = field.type.size
field_bytes = inits[field.name].data.uint8[:field_size]
field_init = inits[field.name]
if isinstance(field_init, int):
match struct_data.byte_order:
case lldb.eByteOrderLittle:
byte_order = 'little'
case lldb.eByteOrderBig:
byte_order = 'big'
field_bytes = field_init.to_bytes(field_size, byte_order, signed=field.type.GetTypeFlags() & lldb.eTypeIsSigned != 0)
elif isinstance(field_init_type, lldb.SBValue):
field_bytes = field_init.data.uint8
else: return
match struct_data.byte_order:
case lldb.eByteOrderLittle:
field_bytes = field_bytes[:field_size]
field_start = field.byte_offset
struct_bytes[field_start:field_start + len(field_bytes)] = field_bytes
case lldb.eByteOrderBig:
field_bytes = field_bytes[-field_size:]
field_end = field.byte_offset + field_size
struct_bytes[field_end - len(field_bytes):field_end] = field_bytes
struct_data.SetData(lldb.SBError(), struct_bytes, struct_data.byte_order, struct_data.GetAddressByteSize())
return next(iter(inits.values())).CreateValueFromData(name, struct_data, struct_type)
return parent.CreateValueFromData(name, struct_data, struct_type)
# Define Zig Language
@ -292,6 +303,8 @@ class std_MultiArrayList_Slice_SynthProvider:
return self.ptrs.CreateValueFromData('[%d]' % index, data, self.entry_type)
except: return None
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
class std_HashMapUnmanaged_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
@ -702,7 +715,7 @@ class root_InternPool_Local_List_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
capacity = self.value.EvaluateExpression('@as(*@This().Header, @alignCast(@ptrCast(@this().bytes - @This().bytes_offset))).capacity')
self.view = create_struct('view', self.value.EvaluateExpression('@This().View').GetValueAsType(), bytes=self.value.GetChildMemberWithName('bytes'), len=capacity, capacity=capacity).GetNonSyntheticValue()
self.view = create_struct(self.value, '.view', self.value.type.FindDirectNestedType('View'), { 'bytes': self.value.GetChildMemberWithName('bytes'), 'len': capacity, 'capacity': capacity }).GetNonSyntheticValue()
def has_children(self): return True
def num_children(self): return 1
def get_child_index(self, name):
@ -712,6 +725,199 @@ class root_InternPool_Local_List_SynthProvider:
try: return (self.view,)[index]
except: pass
expr_path_re = re.compile(r'\{([^}]+)%([^%#}]+)(?:#([^%#}]+))?\}')
def root_InternPool_Index_SummaryProvider(value, _=None):
unwrapped = value.GetChildMemberWithName('unwrapped')
if not unwrapped: return '' # .none
tag = unwrapped.GetChildMemberWithName('tag')
tag_value = tag.value
summary = tag.CreateValueFromType(tag.type).GetChildMemberWithName('encodings').GetChildMemberWithName(tag_value.removeprefix('.')).GetChildMemberWithName('summary')
if not summary: return tag_value
return re.sub(
expr_path_re,
lambda matchobj: getattr(unwrapped.GetValueForExpressionPath(matchobj[1]), matchobj[2]).strip(matchobj[3] or ''),
summary.summary.removeprefix('.').removeprefix('@"').removesuffix('"').replace(r'\"', '"'),
)
class root_InternPool_Index_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
self.unwrapped = None
wrapped = self.value.unsigned
if wrapped == (1 << 32) - 1: return
unwrapped_type = self.value.type.FindDirectNestedType('Unwrapped')
ip = self.value.CreateValueFromType(unwrapped_type).GetChildMemberWithName('debug_state').GetChildMemberWithName('intern_pool').GetNonSyntheticValue().GetChildMemberWithName('?')
tid_shift_30 = ip.GetChildMemberWithName('tid_shift_30').unsigned
self.unwrapped = create_struct(self.value, '.unwrapped', unwrapped_type, { 'tid': wrapped >> tid_shift_30, 'index': wrapped & (1 << tid_shift_30) - 1 })
def has_children(self): return True
def num_children(self): return 0
def get_child_index(self, name):
try: return ('unwrapped',).index(name)
except: pass
def get_child_at_index(self, index):
try: return (self.unwrapped,)[index]
except: pass
class root_InternPool_Index_Unwrapped_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
self.tag, self.index, self.data, self.payload, self.trailing = None, None, None, None, None
index = self.value.GetChildMemberWithName('index')
ip = self.value.CreateValueFromType(self.value.type).GetChildMemberWithName('debug_state').GetChildMemberWithName('intern_pool').GetNonSyntheticValue().GetChildMemberWithName('?')
shared = ip.GetChildMemberWithName('locals').GetSyntheticValue().child[self.value.GetChildMemberWithName('tid').unsigned].GetChildMemberWithName('shared')
item = shared.GetChildMemberWithName('items').GetChildMemberWithName('view').child[index.unsigned]
self.tag, item_data = item.GetChildMemberWithName('tag'), item.GetChildMemberWithName('data')
encoding = self.tag.CreateValueFromType(self.tag.type).GetChildMemberWithName('encodings').GetChildMemberWithName(self.tag.value.removeprefix('.'))
encoding_index, encoding_data, encoding_payload, encoding_trailing, encoding_config = encoding.GetChildMemberWithName('index'), encoding.GetChildMemberWithName('data'), encoding.GetChildMemberWithName('payload'), encoding.GetChildMemberWithName('trailing'), encoding.GetChildMemberWithName('config')
if encoding_index:
index_type = encoding_index.GetValueAsType()
index_bytes, index_data = index.data.uint8, lldb.SBData()
match index_data.byte_order:
case lldb.eByteOrderLittle:
index_bytes = bytes(index_bytes[:index_type.size])
case lldb.eByteOrderBig:
index_bytes = bytes(index_bytes[-index_type.size:])
index_data.SetData(lldb.SBError(), index_bytes, index_data.byte_order, index_data.GetAddressByteSize())
self.index = self.value.CreateValueFromData('.index', index_data, index_type)
elif encoding_data:
data_type = encoding_data.GetValueAsType()
data_bytes, data_data = item_data.data.uint8, lldb.SBData()
match data_data.byte_order:
case lldb.eByteOrderLittle:
data_bytes = bytes(data_bytes[:data_type.size])
case lldb.eByteOrderBig:
data_bytes = bytes(data_bytes[-data_type.size:])
data_data.SetData(lldb.SBError(), data_bytes, data_data.byte_order, data_data.GetAddressByteSize())
self.data = self.value.CreateValueFromData('.data', data_data, data_type)
elif encoding_payload:
extra = shared.GetChildMemberWithName('extra').GetChildMemberWithName('view').GetChildMemberWithName('0')
extra_index = item_data.unsigned
payload_type = encoding_payload.GetValueAsType()
payload_fields = dict()
for payload_field in payload_type.fields:
payload_fields[payload_field.name] = extra.child[extra_index]
extra_index += 1
self.payload = create_struct(self.value, '.payload', payload_type, payload_fields)
if encoding_trailing and encoding_config:
trailing_type = encoding_trailing.GetValueAsType()
trailing_bytes, trailing_data = bytearray(trailing_type.size), lldb.SBData()
def eval_config(config_name):
expr = encoding_config.GetChildMemberWithName(config_name).summary.removeprefix('.').removeprefix('@"').removesuffix('"').replace(r'\"', '"')
if 'payload.' in expr:
return self.payload.EvaluateExpression(expr.replace('payload.', '@this().'))
elif expr.startswith('trailing.'):
field_type, field_byte_offset = trailing_type, 0
expr_parts = expr.split('.')
for expr_part in expr_parts[1:]:
field = next(filter(lambda field: field.name == expr_part, field_type.fields))
field_type = field.type
field_byte_offset += field.byte_offset
field_data = lldb.SBData()
field_bytes = trailing_bytes[field_byte_offset:field_byte_offset + field_type.size]
field_data.SetData(lldb.SBError(), field_bytes, field_data.byte_order, field_data.GetAddressByteSize())
return self.value.CreateValueFromData('.%s' % expr_parts[-1], field_data, field_type)
else:
return self.value.frame.EvaluateExpression(expr)
for trailing_field in trailing_type.fields:
trailing_field_type = trailing_field.type
trailing_field_name = 'trailing.%s' % trailing_field.name
trailing_field_byte_offset = trailing_field.byte_offset
while True:
match [trailing_field_type_field.name for trailing_field_type_field in trailing_field_type.fields]:
case ['has_value', '?']:
has_value_field, child_field = trailing_field_type.fields
trailing_field_name = '%s.%s' % (trailing_field_name, child_field.name)
match eval_config(trailing_field_name).value:
case 'true':
if has_value_field.type.name == 'bool':
trailing_bytes[trailing_field_byte_offset + has_value_field.byte_offset] = True
trailing_field_type = child_field.type
trailing_field_byte_offset += child_field.byte_offset
case 'false':
break
case ['ptr', 'len']:
ptr_field, len_field = trailing_field_type.fields
ptr_field_byte_offset, len_field_byte_offset = trailing_field_byte_offset + ptr_field.byte_offset, trailing_field_byte_offset + len_field.byte_offset
trailing_bytes[ptr_field_byte_offset:ptr_field_byte_offset + ptr_field.type.size] = extra.child[extra_index].address_of.data.uint8
len_field_value = eval_config('%s.len' % trailing_field_name)
len_field_size = len_field.type.size
match trailing_data.byte_order:
case lldb.eByteOrderLittle:
len_field_bytes = len_field_value.data.uint8[:len_field_size]
trailing_bytes[len_field_byte_offset:len_field_byte_offset + len(len_field_bytes)] = len_field_bytes
case lldb.eByteOrderBig:
len_field_bytes = len_field_value.data.uint8[-len_field_size:]
len_field_end = len_field_byte_offset + len_field_size
trailing_bytes[len_field_end - len(len_field_bytes):len_field_end] = len_field_bytes
extra_index += (ptr_field.type.GetPointeeType().size * len_field_value.unsigned + 3) // 4
break
case _:
for offset in range(0, trailing_field_type.size, 4):
trailing_bytes[trailing_field_byte_offset + offset:trailing_field_byte_offset + offset + 4] = extra.child[extra_index].data.uint8
extra_index += 1
break
trailing_data.SetData(lldb.SBError(), trailing_bytes, trailing_data.byte_order, trailing_data.GetAddressByteSize())
self.trailing = self.value.CreateValueFromData('.trailing', trailing_data, trailing_type)
def has_children(self): return True
def num_children(self): return 1 + ((self.index or self.data or self.payload) is not None) + (self.trailing is not None)
def get_child_index(self, name):
try: return ('tag', 'index' if self.index is not None else 'data' if self.data is not None else 'payload', 'trailing').index(name)
except: pass
def get_child_at_index(self, index):
try: return (self.tag, self.index or self.data or self.payload, self.trailing)[index]
except: pass
def root_InternPool_String_SummaryProvider(value, _=None):
wrapped = value.unsigned
ip = value.CreateValueFromType(value.type).GetChildMemberWithName('debug_state').GetChildMemberWithName('intern_pool').GetNonSyntheticValue().GetChildMemberWithName('?')
tid_shift_32 = ip.GetChildMemberWithName('tid_shift_32').unsigned
locals_value = ip.GetChildMemberWithName('locals').GetSyntheticValue()
local_value = locals_value.child[wrapped >> tid_shift_32]
if local_value is None:
wrapped = 0
local_value = locals_value.child[0]
string = local_value.GetChildMemberWithName('shared').GetChildMemberWithName('strings').GetChildMemberWithName('view').GetChildMemberWithName('0').child[wrapped & (1 << tid_shift_32) - 1].address_of
string.format = lldb.eFormatCString
return string.value
class root_InternPool_Cau_Index_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
self.cau = None
wrapped = self.value.unsigned
if wrapped == (1 << 32) - 1: return
ip = self.value.CreateValueFromType(self.value.type).GetChildMemberWithName('debug_state').GetChildMemberWithName('intern_pool').GetNonSyntheticValue().GetChildMemberWithName('?')
tid_shift_31 = ip.GetChildMemberWithName('tid_shift_31').unsigned
locals_value = ip.GetChildMemberWithName('locals').GetSyntheticValue()
local_value = locals_value.child[wrapped >> tid_shift_31]
if local_value is None:
wrapped = 0
local_value = locals_value.child[0]
self.cau = local_value.GetChildMemberWithName('shared').GetChildMemberWithName('caus').GetChildMemberWithName('view').GetChildMemberWithName('0').child[wrapped & (1 << tid_shift_31) - 1]
def has_children(self): return self.cau.GetNumChildren(1) > 0
def num_children(self): return self.cau.GetNumChildren()
def get_child_index(self, name): return self.cau.GetIndexOfChildWithName(name)
def get_child_at_index(self, index): return self.cau.GetChildAtIndex(index)
class root_InternPool_Nav_Index_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
self.nav = None
wrapped = self.value.unsigned
if wrapped == (1 << 32) - 1: return
ip = self.value.CreateValueFromType(self.value.type).GetChildMemberWithName('debug_state').GetChildMemberWithName('intern_pool').GetNonSyntheticValue().GetChildMemberWithName('?')
tid_shift_32 = ip.GetChildMemberWithName('tid_shift_32').unsigned
locals_value = ip.GetChildMemberWithName('locals').GetSyntheticValue()
local_value = locals_value.child[wrapped >> tid_shift_32]
if local_value is None:
wrapped = 0
local_value = locals_value.child[0]
self.nav = local_value.GetChildMemberWithName('shared').GetChildMemberWithName('navs').GetChildMemberWithName('view').child[wrapped & (1 << tid_shift_32) - 1]
def has_children(self): return self.nav.GetNumChildren(1) > 0
def num_children(self): return self.nav.GetNumChildren()
def get_child_index(self, name): return self.nav.GetIndexOfChildWithName(name)
def get_child_at_index(self, index): return self.nav.GetChildAtIndex(index)
# Initialize
def add(debugger, *, category, regex=False, type, identifier=None, synth=False, inline_children=False, expand=False, summary=False):
@ -719,8 +925,6 @@ def add(debugger, *, category, regex=False, type, identifier=None, synth=False,
if summary: debugger.HandleCommand('type summary add --category %s%s%s "%s"' % (category, ' --inline-children' if inline_children else ''.join((' --expand' if expand else '', ' --python-function %s_SummaryProvider' % prefix if summary == True else ' --summary-string "%s"' % summary)), ' --regex' if regex else '', type))
if synth: debugger.HandleCommand('type synthetic add --category %s%s --python-class %s_SynthProvider "%s"' % (category, ' --regex' if regex else '', prefix, type))
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
def __lldb_init_module(debugger, _=None):
# Initialize Zig Categories
debugger.HandleCommand('type category define --language c99 zig.lang zig.std')
@ -765,4 +969,9 @@ def __lldb_init_module(debugger, _=None):
add(debugger, category='zig.stage2', type='arch.x86_64.CodeGen.MCValue', identifier='zig_TaggedUnion', synth=True, inline_children=True, summary=True)
# Initialize Zig Stage2 Compiler (compiled with the self-hosted backend)
add(debugger, category='zig', regex=True, type='^root\\.InternPool\\.Local\\.List\\(.*\\)$', identifier='root_InternPool_Local_List', synth=True, expand=True, summary='capacity=${var%#}')
add(debugger, category='zig', regex=True, type=r'^root\.InternPool\.Local\.List\(.*\)$', identifier='root_InternPool_Local_List', synth=True, expand=True, summary='capacity=${var%#}')
add(debugger, category='zig', type='root.InternPool.Index', synth=True, summary=True)
add(debugger, category='zig', type='root.InternPool.Index.Unwrapped', synth=True)
add(debugger, category='zig', regex=True, type=r'^root\.InternPool\.(Optional)?(NullTerminated)?String$', identifier='root_InternPool_String', summary=True)
add(debugger, category='zig', regex=True, type=r'^root\.InternPool\.Cau\.Index(\.Optional)?$', identifier='root_InternPool_Cau_Index', synth=True)
add(debugger, category='zig', regex=True, type=r'^root\.InternPool\.Nav\.Index(\.Optional)?$', identifier='root_InternPool_Nav_Index', synth=True)