dwarf: track type relocation state in Dwarf module

This commit is contained in:
Jakub Konka 2022-03-27 17:04:20 +02:00
parent 4ca9b4c44a
commit 366ec21052
11 changed files with 435 additions and 517 deletions

View file

@ -386,18 +386,19 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
const delta_line = @intCast(i32, line) - @intCast(i32, self.prev_di_line);
const delta_pc: usize = self.code.items.len - self.prev_di_pc;
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// TODO Look into using the DWARF special opcodes to compress this data.
// It lets you emit single-byte opcodes that add different numbers to
// both the PC and the line number at the same time.
try dbg_out.dbg_line.ensureUnusedCapacity(11);
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_out.dbg_line.writer(), delta_pc) catch unreachable;
const dbg_line = dw.getDeclDebugLineBuffer();
try dbg_line.ensureUnusedCapacity(11);
dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_line.writer(), delta_pc) catch unreachable;
if (delta_line != 0) {
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_out.dbg_line.writer(), delta_line) catch unreachable;
dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_line.writer(), delta_line) catch unreachable;
}
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.copy);
dbg_line.appendAssumeCapacity(DW.LNS.copy);
self.prev_di_pc = self.code.items.len;
self.prev_di_line = line;
self.prev_di_column = column;
@ -586,8 +587,8 @@ fn mirDbgLine(emit: *Emit, inst: Mir.Inst.Index) !void {
fn mirDebugPrologueEnd(self: *Emit) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_prologue_end);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_prologue_end);
try self.dbgAdvancePCAndLine(self.prev_di_line, self.prev_di_column);
},
.plan9 => {},
@ -597,8 +598,8 @@ fn mirDebugPrologueEnd(self: *Emit) !void {
fn mirDebugEpilogueBegin(self: *Emit) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_epilogue_begin);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_epilogue_begin);
try self.dbgAdvancePCAndLine(self.prev_di_line, self.prev_di_column);
},
.plan9 => {},

View file

@ -328,18 +328,19 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
const delta_line = @intCast(i32, line) - @intCast(i32, self.prev_di_line);
const delta_pc: usize = self.code.items.len - self.prev_di_pc;
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// TODO Look into using the DWARF special opcodes to compress this data.
// It lets you emit single-byte opcodes that add different numbers to
// both the PC and the line number at the same time.
try dbg_out.dbg_line.ensureUnusedCapacity(11);
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_out.dbg_line.writer(), delta_pc) catch unreachable;
const dbg_line = dw.getDeclDebugLineBuffer();
try dbg_line.ensureUnusedCapacity(11);
dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_line.writer(), delta_pc) catch unreachable;
if (delta_line != 0) {
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_out.dbg_line.writer(), delta_line) catch unreachable;
dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_line.writer(), delta_line) catch unreachable;
}
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.copy);
dbg_line.appendAssumeCapacity(DW.LNS.copy);
self.prev_di_pc = self.code.items.len;
self.prev_di_line = line;
self.prev_di_column = column;
@ -379,19 +380,17 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
/// after codegen for this symbol is done.
fn addDbgInfoTypeReloc(self: *Emit, ty: Type) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
assert(ty.hasRuntimeBits());
const index = dbg_out.dbg_info.items.len;
try dbg_out.dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const gop = try dbg_out.dbg_info_type_relocs.getOrPutContext(self.bin_file.allocator, ty, .{ .target = self.target.* });
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(self.bin_file.allocator, @intCast(u32, index));
const dbg_info = dw.getDeclDebugInfoBuffer();
const index = dbg_info.items.len;
try dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const atom = switch (self.bin_file.tag) {
.elf => &self.function.mod_fn.owner_decl.link.elf.dbg_info_atom,
.macho => unreachable,
else => unreachable,
};
try dw.addTypeReloc(atom, ty, @intCast(u32, index), null);
},
.plan9 => {},
.none => {},
@ -409,16 +408,17 @@ fn genArgDbgInfo(self: *Emit, inst: Air.Inst.Index, arg_index: u32) !void {
switch (mcv) {
.register => |reg| {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_info.ensureUnusedCapacity(3);
dbg_out.dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
.dwarf => |dw| {
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.ensureUnusedCapacity(3);
dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
1, // ULEB128 dwarf expression length
reg.dwarfLocOp(),
});
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try self.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
.none => {},
@ -428,7 +428,7 @@ fn genArgDbgInfo(self: *Emit, inst: Air.Inst.Index, arg_index: u32) !void {
.stack_argument_offset,
=> {
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
const abi_size = math.cast(u32, ty.abiSize(self.target.*)) catch {
return self.fail("type '{}' too big to fit into stack frame", .{ty.fmt(target)});
};
@ -442,7 +442,8 @@ fn genArgDbgInfo(self: *Emit, inst: Air.Inst.Index, arg_index: u32) !void {
else => unreachable,
};
try dbg_out.dbg_info.append(link.File.Dwarf.abbrev_parameter);
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.append(link.File.Dwarf.abbrev_parameter);
// Get length of the LEB128 stack offset
var counting_writer = std.io.countingWriter(std.io.null_writer);
@ -450,13 +451,13 @@ fn genArgDbgInfo(self: *Emit, inst: Air.Inst.Index, arg_index: u32) !void {
// DW.AT.location, DW.FORM.exprloc
// ULEB128 dwarf expression length
try leb128.writeULEB128(dbg_out.dbg_info.writer(), counting_writer.bytes_written + 1);
try dbg_out.dbg_info.append(DW.OP.breg11);
try leb128.writeILEB128(dbg_out.dbg_info.writer(), adjusted_stack_offset);
try leb128.writeULEB128(dbg_info.writer(), counting_writer.bytes_written + 1);
try dbg_info.append(DW.OP.breg11);
try leb128.writeILEB128(dbg_info.writer(), adjusted_stack_offset);
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try self.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
.none => {},
@ -558,8 +559,8 @@ fn mirDbgLine(emit: *Emit, inst: Mir.Inst.Index) !void {
fn mirDebugPrologueEnd(emit: *Emit) !void {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_prologue_end);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_prologue_end);
try emit.dbgAdvancePCAndLine(emit.prev_di_line, emit.prev_di_column);
},
.plan9 => {},
@ -569,8 +570,8 @@ fn mirDebugPrologueEnd(emit: *Emit) !void {
fn mirDebugEpilogueBegin(emit: *Emit) !void {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_epilogue_begin);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_epilogue_begin);
try emit.dbgAdvancePCAndLine(emit.prev_di_line, emit.prev_di_column);
},
.plan9 => {},

View file

@ -745,21 +745,17 @@ fn ensureProcessDeathCapacity(self: *Self, additional_count: usize) !void {
/// after codegen for this symbol is done.
fn addDbgInfoTypeReloc(self: *Self, ty: Type) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
assert(ty.hasRuntimeBits());
const index = dbg_out.dbg_info.items.len;
try dbg_out.dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const gop = try dbg_out.dbg_info_type_relocs.getOrPutContext(self.gpa, ty, .{
.target = self.target.*,
});
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(self.gpa, @intCast(u32, index));
const dbg_info = dw.getDeclDebugInfoBuffer();
const index = dbg_info.items.len;
try dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const atom = switch (self.bin_file.tag) {
.elf => &self.mod_fn.owner_decl.link.elf.dbg_info_atom,
.macho => unreachable,
else => unreachable,
};
try dw.addTypeReloc(atom, ty, @intCast(u32, index), null);
},
.plan9 => {},
.none => {},
@ -1573,16 +1569,17 @@ fn genArgDbgInfo(self: *Self, inst: Air.Inst.Index, mcv: MCValue, arg_index: u32
switch (mcv) {
.register => |reg| {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_info.ensureUnusedCapacity(3);
dbg_out.dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
.dwarf => |dw| {
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.ensureUnusedCapacity(3);
dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
1, // ULEB128 dwarf expression length
reg.dwarfLocOp(),
});
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try self.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
.none => {},

View file

@ -89,18 +89,19 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
const delta_line = @intCast(i32, line) - @intCast(i32, self.prev_di_line);
const delta_pc: usize = self.code.items.len - self.prev_di_pc;
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// TODO Look into using the DWARF special opcodes to compress this data.
// It lets you emit single-byte opcodes that add different numbers to
// both the PC and the line number at the same time.
try dbg_out.dbg_line.ensureUnusedCapacity(11);
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_out.dbg_line.writer(), delta_pc) catch unreachable;
const dbg_line = dw.getDeclDebugLineBuffer();
try dbg_line.ensureUnusedCapacity(11);
dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_line.writer(), delta_pc) catch unreachable;
if (delta_line != 0) {
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_out.dbg_line.writer(), delta_line) catch unreachable;
dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_line.writer(), delta_line) catch unreachable;
}
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.copy);
dbg_line.appendAssumeCapacity(DW.LNS.copy);
self.prev_di_pc = self.code.items.len;
self.prev_di_line = line;
self.prev_di_column = column;
@ -182,8 +183,8 @@ fn mirDbgLine(emit: *Emit, inst: Mir.Inst.Index) !void {
fn mirDebugPrologueEnd(self: *Emit) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_prologue_end);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_prologue_end);
try self.dbgAdvancePCAndLine(self.prev_di_line, self.prev_di_column);
},
.plan9 => {},
@ -193,8 +194,8 @@ fn mirDebugPrologueEnd(self: *Emit) !void {
fn mirDebugEpilogueBegin(self: *Emit) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_epilogue_begin);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_epilogue_begin);
try self.dbgAdvancePCAndLine(self.prev_di_line, self.prev_di_column);
},
.plan9 => {},

View file

@ -350,6 +350,7 @@ pub fn generate(
var emit = Emit{
.mir = mir,
.bin_file = bin_file,
.function = &function,
.debug_output = debug_output,
.target = &bin_file.options.target,
.src_loc = src_loc,

View file

@ -16,6 +16,7 @@ const testing = std.testing;
const Air = @import("../../Air.zig");
const Allocator = mem.Allocator;
const CodeGen = @import("CodeGen.zig");
const DebugInfoOutput = @import("../../codegen.zig").DebugInfoOutput;
const DW = std.dwarf;
const Encoder = bits.Encoder;
@ -29,6 +30,7 @@ const Type = @import("../../type.zig").Type;
mir: Mir,
bin_file: *link.File,
function: *const CodeGen,
debug_output: DebugInfoOutput,
target: *const std.Target,
err_msg: ?*ErrorMsg = null,
@ -963,18 +965,19 @@ fn dbgAdvancePCAndLine(emit: *Emit, line: u32, column: u32) InnerError!void {
const delta_pc: usize = emit.code.items.len - emit.prev_di_pc;
log.debug(" (advance pc={d} and line={d})", .{ delta_line, delta_pc });
switch (emit.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// TODO Look into using the DWARF special opcodes to compress this data.
// It lets you emit single-byte opcodes that add different numbers to
// both the PC and the line number at the same time.
try dbg_out.dbg_line.ensureUnusedCapacity(11);
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_out.dbg_line.writer(), delta_pc) catch unreachable;
const dbg_line = dw.getDeclDebugLineBuffer();
try dbg_line.ensureUnusedCapacity(11);
dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_line.writer(), delta_pc) catch unreachable;
if (delta_line != 0) {
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_out.dbg_line.writer(), delta_line) catch unreachable;
dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_line.writer(), delta_line) catch unreachable;
}
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.copy);
dbg_line.appendAssumeCapacity(DW.LNS.copy);
emit.prev_di_line = line;
emit.prev_di_column = column;
emit.prev_di_pc = emit.code.items.len;
@ -1022,8 +1025,8 @@ fn mirDbgPrologueEnd(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
const tag = emit.mir.instructions.items(.tag)[inst];
assert(tag == .dbg_prologue_end);
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_prologue_end);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_prologue_end);
log.debug("mirDbgPrologueEnd (line={d}, col={d})", .{ emit.prev_di_line, emit.prev_di_column });
try emit.dbgAdvancePCAndLine(emit.prev_di_line, emit.prev_di_column);
},
@ -1036,8 +1039,8 @@ fn mirDbgEpilogueBegin(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
const tag = emit.mir.instructions.items(.tag)[inst];
assert(tag == .dbg_epilogue_begin);
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_epilogue_begin);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_epilogue_begin);
log.debug("mirDbgEpilogueBegin (line={d}, col={d})", .{ emit.prev_di_line, emit.prev_di_column });
try emit.dbgAdvancePCAndLine(emit.prev_di_line, emit.prev_di_column);
},
@ -1063,16 +1066,17 @@ fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue, max_stack: u32
switch (mcv) {
.register => |reg| {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_info.ensureUnusedCapacity(3);
dbg_out.dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
.dwarf => |dw| {
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.ensureUnusedCapacity(3);
dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
1, // ULEB128 dwarf expression length
reg.dwarfLocOp(),
});
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try emit.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
.none => {},
@ -1080,25 +1084,26 @@ fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue, max_stack: u32
},
.stack_offset => |off| {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// we add here +16 like we do in airArg in CodeGen since we refer directly to
// rbp as the start of function frame minus 8 bytes for caller's rbp preserved in the
// prologue, and 8 bytes for return address.
// TODO we need to make this more generic if we don't use rbp as the frame pointer
// for example when -fomit-frame-pointer is set.
const disp = @intCast(i32, max_stack) - off + 16;
try dbg_out.dbg_info.ensureUnusedCapacity(8);
dbg_out.dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
const fixup = dbg_out.dbg_info.items.len;
dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.ensureUnusedCapacity(8);
dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
const fixup = dbg_info.items.len;
dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
1, // we will backpatch it after we encode the displacement in LEB128
DW.OP.breg6, // .rbp TODO handle -fomit-frame-pointer
});
leb128.writeILEB128(dbg_out.dbg_info.writer(), disp) catch unreachable;
dbg_out.dbg_info.items[fixup] += @intCast(u8, dbg_out.dbg_info.items.len - fixup - 2);
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
leb128.writeILEB128(dbg_info.writer(), disp) catch unreachable;
dbg_info.items[fixup] += @intCast(u8, dbg_info.items.len - fixup - 2);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try emit.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
@ -1113,21 +1118,17 @@ fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue, max_stack: u32
/// after codegen for this symbol is done.
fn addDbgInfoTypeReloc(emit: *Emit, ty: Type) !void {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
assert(ty.hasRuntimeBits());
const index = dbg_out.dbg_info.items.len;
try dbg_out.dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const gop = try dbg_out.dbg_info_type_relocs.getOrPutContext(emit.bin_file.allocator, ty, .{
.target = emit.target.*,
});
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(emit.bin_file.allocator, @intCast(u32, index));
const dbg_info = dw.getDeclDebugInfoBuffer();
const index = dbg_info.items.len;
try dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const atom = switch (emit.bin_file.tag) {
.elf => &emit.function.mod_fn.owner_decl.link.elf.dbg_info_atom,
.macho => &emit.function.mod_fn.owner_decl.link.macho.dbg_info_atom,
else => unreachable,
};
try dw.addTypeReloc(atom, ty, @intCast(u32, index), null);
},
.plan9 => {},
.none => {},

View file

@ -42,11 +42,7 @@ pub const GenerateSymbolError = error{
};
pub const DebugInfoOutput = union(enum) {
dwarf: struct {
dbg_line: *std.ArrayList(u8),
dbg_info: *std.ArrayList(u8),
dbg_info_type_relocs: *link.File.Dwarf.DbgInfoTypeRelocsTable,
},
dwarf: *link.File.Dwarf,
/// the plan9 debuginfo output is a bytecode with 4 opcodes
/// assume all numbers/variables are bytes
/// 0 w x y z -> interpret w x y z as a big-endian i32, and add it to the line offset

View file

@ -39,16 +39,22 @@ atom_last: ?*Atom = null,
abbrev_table_offset: ?u64 = null,
/// TODO replace with InternArena
/// Table of debug symbol names.
strtab: std.ArrayListUnmanaged(u8) = .{},
deferred_error_sets_relocs: std.ArrayListUnmanaged(u32) = .{},
/// Lives only as long as the analysed Decl.
/// Allocated with `initDeclState`.
/// Freed with `commitDeclState`.
decl_state: ?DeclState = null,
/// List of atoms that are owned directly by the DWARF module.
/// TODO convert links in DebugInfoAtom into indices and make
/// sure every atom is owned by this module.
managed_atoms: std.ArrayListUnmanaged(*Atom) = .{},
global_abbrev_relocs: std.ArrayListUnmanaged(AbbrevRelocation) = .{},
pub const Atom = struct {
/// Previous/next linked list pointers.
/// This is the linked list node for this Decl's corresponding .debug_info tag.
@ -60,6 +66,54 @@ pub const Atom = struct {
len: u32,
};
/// Represents state of the analysed Decl.
/// Includes Decl's abbrev table of type Types, matching arena
/// and a set of relocations that will be resolved once this
/// Decl's inner Atom is assigned an offset within the DWARF section.
pub const DeclState = struct {
dbg_line: std.ArrayList(u8),
dbg_info: std.ArrayList(u8),
abbrev_type_arena: std.heap.ArenaAllocator,
abbrev_table: std.ArrayListUnmanaged(AbbrevEntry) = .{},
abbrev_resolver: std.HashMapUnmanaged(
Type,
u32,
Type.HashContext64,
std.hash_map.default_max_load_percentage,
) = .{},
abbrev_relocs: std.ArrayListUnmanaged(AbbrevRelocation) = .{},
fn init(gpa: Allocator) DeclState {
return .{
.dbg_line = std.ArrayList(u8).init(gpa),
.dbg_info = std.ArrayList(u8).init(gpa),
.abbrev_type_arena = std.heap.ArenaAllocator.init(gpa),
};
}
fn deinit(self: *DeclState, gpa: Allocator) void {
self.dbg_line.deinit();
self.dbg_info.deinit();
self.abbrev_type_arena.deinit();
self.abbrev_table.deinit(gpa);
self.abbrev_resolver.deinit(gpa);
self.abbrev_relocs.deinit(gpa);
}
};
pub const AbbrevEntry = struct {
atom: *const Atom,
@"type": Type,
offset: u32,
};
pub const AbbrevRelocation = struct {
target: u32,
atom: *const Atom,
offset: u32,
addend: u32,
};
pub const SrcFn = struct {
/// Offset from the beginning of the Debug Line Program header that contains this function.
off: u32,
@ -82,22 +136,6 @@ pub const SrcFn = struct {
pub const PtrWidth = enum { p32, p64 };
pub const DbgInfoTypeRelocsTable = std.ArrayHashMapUnmanaged(
Type,
DbgInfoTypeReloc,
Type.HashContext32,
true,
);
pub const DbgInfoTypeReloc = struct {
/// Offset from `TextBlock.dbg_info_off` (the buffer that is local to a Decl).
/// This is where the .debug_info tag for the type is.
off: u32,
/// Offset from `TextBlock.dbg_info_off` (the buffer that is local to a Decl).
/// List of DW.AT.type / DW.FORM.ref4 that points to the type.
relocs: std.ArrayListUnmanaged(u32),
};
pub const abbrev_compile_unit = 1;
pub const abbrev_subprogram = 2;
pub const abbrev_subprogram_retvoid = 3;
@ -143,7 +181,7 @@ pub fn deinit(self: *Dwarf) void {
self.dbg_line_fn_free_list.deinit(gpa);
self.atom_free_list.deinit(gpa);
self.strtab.deinit(gpa);
self.deferred_error_sets_relocs.deinit(gpa);
self.global_abbrev_relocs.deinit(gpa);
for (self.managed_atoms.items) |atom| {
gpa.destroy(atom);
@ -151,25 +189,22 @@ pub fn deinit(self: *Dwarf) void {
self.managed_atoms.deinit(gpa);
}
pub const DeclDebugBuffers = struct {
dbg_line_buffer: std.ArrayList(u8),
dbg_info_buffer: std.ArrayList(u8),
dbg_info_type_relocs: DbgInfoTypeRelocsTable,
};
pub fn initDeclDebugInfo(self: *Dwarf, decl: *Module.Decl) !DeclDebugBuffers {
/// Initializes Decl's state and its matching output buffers.
/// Call this before `commitDeclState`.
pub fn initDeclState(self: *Dwarf, decl: *Module.Decl) !void {
const tracy = trace(@src());
defer tracy.end();
const decl_name = try decl.getFullyQualifiedName(self.allocator);
defer self.allocator.free(decl_name);
log.debug("initDeclDebugInfo {s}{*}", .{ decl_name, decl });
log.debug("initDeclState {s}{*}", .{ decl_name, decl });
const gpa = self.allocator;
var dbg_line_buffer = std.ArrayList(u8).init(gpa);
var dbg_info_buffer = std.ArrayList(u8).init(gpa);
var dbg_info_type_relocs: DbgInfoTypeRelocsTable = .{};
assert(self.decl_state == null);
self.decl_state = DeclState.init(gpa);
const dbg_line_buffer = &self.decl_state.?.dbg_line;
const dbg_info_buffer = &self.decl_state.?.dbg_info;
assert(decl.has_tv);
@ -232,19 +267,17 @@ pub fn initDeclDebugInfo(self: *Dwarf, decl: *Module.Decl) !DeclDebugBuffers {
dbg_info_buffer.items.len += ptr_width_bytes; // DW.AT.low_pc, DW.FORM.addr
assert(self.getRelocDbgInfoSubprogramHighPC() == dbg_info_buffer.items.len);
dbg_info_buffer.items.len += 4; // DW.AT.high_pc, DW.FORM.data4
//
if (fn_ret_has_bits) {
const gop = try dbg_info_type_relocs.getOrPutContext(gpa, fn_ret_type, .{
.target = self.target,
});
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(gpa, @intCast(u32, dbg_info_buffer.items.len));
const atom = switch (self.tag) {
.elf => &decl.link.elf.dbg_info_atom,
.macho => &decl.link.macho.dbg_info_atom,
else => unreachable,
};
try self.addTypeReloc(atom, fn_ret_type, @intCast(u32, dbg_info_buffer.items.len), null);
dbg_info_buffer.items.len += 4; // DW.AT.type, DW.FORM.ref4
}
dbg_info_buffer.appendSliceAssumeCapacity(decl_name_with_null); // DW.AT.name, DW.FORM.string
},
@ -252,30 +285,28 @@ pub fn initDeclDebugInfo(self: *Dwarf, decl: *Module.Decl) !DeclDebugBuffers {
// TODO implement .debug_info for global variables
},
}
return DeclDebugBuffers{
.dbg_info_buffer = dbg_info_buffer,
.dbg_line_buffer = dbg_line_buffer,
.dbg_info_type_relocs = dbg_info_type_relocs,
};
}
pub fn commitDeclDebugInfo(
pub fn commitDeclState(
self: *Dwarf,
file: *File,
module: *Module,
decl: *Module.Decl,
sym_addr: u64,
sym_size: u64,
debug_buffers: *DeclDebugBuffers,
) !void {
const tracy = trace(@src());
defer tracy.end();
assert(self.decl_state != null); // Caller forgot to call `initDeclState`
defer {
self.decl_state.?.deinit(self.allocator);
self.decl_state = null;
}
const gpa = self.allocator;
var dbg_line_buffer = &debug_buffers.dbg_line_buffer;
var dbg_info_buffer = &debug_buffers.dbg_info_buffer;
var dbg_info_type_relocs = &debug_buffers.dbg_info_type_relocs;
var dbg_line_buffer = &self.decl_state.?.dbg_line;
var dbg_info_buffer = &self.decl_state.?.dbg_info;
const target_endian = self.target.cpu.arch.endian();
@ -473,25 +504,21 @@ pub fn commitDeclDebugInfo(
if (dbg_info_buffer.items.len == 0)
return;
// We need this for the duration of this function only so that for composite
// types such as []const u32, if the type *u32 is non-existent, we create
// it synthetically and store the backing bytes in this arena. After we are
// done with the relocations, we can safely deinit the entire memory slab.
// TODO currently, we do not store the relocations for future use, however,
// if that is the case, we should move memory management to a higher scope,
// such as linker scope, or whatnot.
var dbg_type_arena = std.heap.ArenaAllocator.init(gpa);
defer dbg_type_arena.deinit();
const atom = switch (self.tag) {
.elf => &decl.link.elf.dbg_info_atom,
.macho => &decl.link.macho.dbg_info_atom,
else => unreachable,
};
const decl_state = &self.decl_state.?;
var nested_ref4_relocs = std.ArrayList(u32).init(gpa);
defer nested_ref4_relocs.deinit();
{
// Now we emit the .debug_info types of the Decl. These will count towards the size of
// the buffer, so we have to do it before computing the offset, and we can't perform the actual
// relocations yet.
var it: usize = 0;
while (it < dbg_info_type_relocs.count()) : (it += 1) {
const ty = dbg_info_type_relocs.keys()[it];
var sym_index: usize = 0;
while (sym_index < decl_state.abbrev_table.items.len) : (sym_index += 1) {
const symbol = &decl_state.abbrev_table.items[sym_index];
const ty = symbol.@"type";
const deferred: bool = blk: {
if (ty.isAnyError()) break :blk true;
switch (ty.tag()) {
@ -504,68 +531,38 @@ pub fn commitDeclDebugInfo(
};
if (deferred) continue;
const value_ptr = dbg_info_type_relocs.getPtrContext(ty, .{
.target = self.target,
}).?;
value_ptr.off = @intCast(u32, dbg_info_buffer.items.len);
try self.addDbgInfoType(
dbg_type_arena.allocator(),
module,
ty,
dbg_info_buffer,
dbg_info_type_relocs,
&nested_ref4_relocs,
);
symbol.offset = @intCast(u32, dbg_info_buffer.items.len);
try self.addDbgInfoType(decl_state.abbrev_type_arena.allocator(), module, atom, ty, dbg_info_buffer);
}
}
const atom = switch (self.tag) {
.elf => &decl.link.elf.dbg_info_atom,
.macho => &decl.link.macho.dbg_info_atom,
else => unreachable,
};
try self.updateDeclDebugInfoAllocation(file, atom, @intCast(u32, dbg_info_buffer.items.len));
{
// Now that we have the offset assigned we can finally perform type relocations.
for (dbg_info_type_relocs.keys()) |ty| {
const value = dbg_info_type_relocs.getContext(ty, .{
.target = self.target,
}).?;
for (value.relocs.items) |off| {
const deferred: bool = blk: {
if (ty.isAnyError()) break :blk true;
switch (ty.tag()) {
.error_set_inferred => {
if (!ty.castTag(.error_set_inferred).?.data.is_resolved) break :blk true;
},
else => {},
}
break :blk false;
};
if (deferred) {
// Defer until later
try self.deferred_error_sets_relocs.append(self.allocator, atom.off + off);
} else {
mem.writeInt(
u32,
dbg_info_buffer.items[off..][0..4],
atom.off + value.off,
target_endian,
);
}
while (decl_state.abbrev_relocs.popOrNull()) |reloc| {
const symbol = decl_state.abbrev_table.items[reloc.target];
const ty = symbol.@"type";
const deferred: bool = blk: {
if (ty.isAnyError()) break :blk true;
switch (ty.tag()) {
.error_set_inferred => {
if (!ty.castTag(.error_set_inferred).?.data.is_resolved) break :blk true;
},
else => {},
}
}
// Offsets to positions with known a priori relative displacement values.
// Here, we just need to add the offset of the atom to the read value in the
// relocated cell.
// TODO Should probably generalise this with type relocs.
for (nested_ref4_relocs.items) |off| {
const addend = mem.readInt(u32, dbg_info_buffer.items[off..][0..4], target_endian);
break :blk false;
};
if (deferred) {
try self.global_abbrev_relocs.append(gpa, .{
.target = undefined,
.offset = reloc.offset,
.atom = reloc.atom,
.addend = reloc.addend,
});
} else {
mem.writeInt(
u32,
dbg_info_buffer.items[off..][0..4],
atom.off + addend,
dbg_info_buffer.items[reloc.offset..][0..@sizeOf(u32)],
symbol.atom.off + symbol.offset + reloc.addend,
target_endian,
);
}
@ -574,77 +571,6 @@ pub fn commitDeclDebugInfo(
try self.writeDeclDebugInfo(file, atom, dbg_info_buffer.items);
}
pub fn commitErrorSetDebugInfo(self: *Dwarf, file: *File, module: *Module) !void {
if (self.deferred_error_sets_relocs.items.len == 0) return; // Nothing to do
const gpa = self.allocator;
var arena_alloc = std.heap.ArenaAllocator.init(gpa);
defer arena_alloc.deinit();
const arena = arena_alloc.allocator();
const error_set = try arena.create(Module.ErrorSet);
const ty = try Type.Tag.error_set.create(arena, error_set);
var names = Module.ErrorSet.NameMap{};
try names.ensureUnusedCapacity(arena, module.global_error_set.count());
var it = module.global_error_set.keyIterator();
while (it.next()) |key| {
names.putAssumeCapacityNoClobber(key.*, {});
}
error_set.names = names;
var dbg_info_buffer = std.ArrayList(u8).init(arena);
try self.addDbgInfoErrorSet(arena, module, ty, &dbg_info_buffer);
const atom = try gpa.create(Atom);
errdefer gpa.destroy(atom);
atom.* = .{
.prev = null,
.next = null,
.off = 0,
.len = 0,
};
try self.managed_atoms.append(gpa, atom);
try self.updateDeclDebugInfoAllocation(file, atom, @intCast(u32, dbg_info_buffer.items.len));
try self.writeDeclDebugInfo(file, atom, dbg_info_buffer.items);
const file_pos = blk: {
switch (self.tag) {
.elf => {
const elf_file = file.cast(File.Elf).?;
const debug_info_sect = &elf_file.sections.items[elf_file.debug_info_section_index.?];
break :blk debug_info_sect.sh_offset;
},
.macho => {
const macho_file = file.cast(File.MachO).?;
const d_sym = &macho_file.d_sym.?;
const dwarf_segment = &d_sym.load_commands.items[d_sym.dwarf_segment_cmd_index.?].segment;
const debug_info_sect = &dwarf_segment.sections.items[d_sym.debug_info_section_index.?];
break :blk debug_info_sect.offset;
},
else => unreachable,
}
};
const target_endian = self.target.cpu.arch.endian();
var buf: [@sizeOf(u32)]u8 = undefined;
while (self.deferred_error_sets_relocs.popOrNull()) |reloc| {
mem.writeInt(u32, &buf, atom.off, target_endian);
switch (self.tag) {
.elf => {
const elf_file = file.cast(File.Elf).?;
try elf_file.base.file.?.pwriteAll(&buf, file_pos + reloc);
},
.macho => {
const macho_file = file.cast(File.MachO).?;
const d_sym = &macho_file.d_sym.?;
try d_sym.file.pwriteAll(&buf, file_pos + reloc);
},
else => unreachable,
}
}
}
fn updateDeclDebugInfoAllocation(self: *Dwarf, file: *File, atom: *Atom, len: u32) !void {
const tracy = trace(@src());
defer tracy.end();
@ -904,14 +830,12 @@ fn addDbgInfoType(
self: *Dwarf,
arena: Allocator,
module: *Module,
atom: *Atom,
ty: Type,
dbg_info_buffer: *std.ArrayList(u8),
dbg_info_type_relocs: *DbgInfoTypeRelocsTable,
nested_ref4_relocs: *std.ArrayList(u32),
) error{OutOfMemory}!void {
const target = self.target;
const target_endian = self.target.cpu.arch.endian();
var relocs = std.ArrayList(struct { ty: Type, reloc: u32 }).init(arena);
switch (ty.zigTypeTag()) {
.NoReturn => unreachable,
@ -970,7 +894,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
var index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = Type.bool, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, Type.bool, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.ensureUnusedCapacity(6);
dbg_info_buffer.appendAssumeCapacity(0);
@ -982,7 +906,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = payload_ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, payload_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
const offset = abi_size - payload_ty.abiSize(target);
try leb128.writeULEB128(dbg_info_buffer.writer(), offset);
@ -1011,7 +935,7 @@ fn addDbgInfoType(
try dbg_info_buffer.resize(index + 4);
var buf = try arena.create(Type.SlicePtrFieldTypeBuffer);
const ptr_ty = ty.slicePtrFieldType(buf);
try relocs.append(.{ .ty = ptr_ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, ptr_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.ensureUnusedCapacity(6);
dbg_info_buffer.appendAssumeCapacity(0);
@ -1023,7 +947,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = Type.initTag(.usize), .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, Type.usize, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.ensureUnusedCapacity(2);
dbg_info_buffer.appendAssumeCapacity(@sizeOf(usize));
@ -1035,7 +959,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = ty.childType(), .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, ty.childType(), @intCast(u32, index), null);
}
},
.Struct => blk: {
@ -1059,7 +983,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
var index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = field, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, field, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
const field_off = ty.structFieldOffset(field_index, target);
try leb128.writeULEB128(dbg_info_buffer.writer(), field_off);
@ -1090,7 +1014,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
var index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = field.ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, field.ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
const field_off = ty.structFieldOffset(field_index, target);
try leb128.writeULEB128(dbg_info_buffer.writer(), field_off);
@ -1169,14 +1093,8 @@ fn addDbgInfoType(
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.type, DW.FORM.ref4
const inner_union_index = dbg_info_buffer.items.len;
try dbg_info_buffer.ensureUnusedCapacity(4);
mem.writeInt(
u32,
dbg_info_buffer.addManyAsArrayAssumeCapacity(4),
@intCast(u32, inner_union_index + 5),
target_endian,
);
try nested_ref4_relocs.append(@intCast(u32, inner_union_index));
try dbg_info_buffer.resize(inner_union_index + 4);
try self.addTypeReloc(atom, ty, @intCast(u32, inner_union_index), 5);
// DW.AT.data_member_location, DW.FORM.sdata
try leb128.writeULEB128(dbg_info_buffer.writer(), payload_offset);
}
@ -1203,7 +1121,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = field.ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, field.ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.append(0);
}
@ -1220,7 +1138,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = union_obj.tag_ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, union_obj.tag_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try leb128.writeULEB128(dbg_info_buffer.writer(), tag_offset);
@ -1229,7 +1147,40 @@ fn addDbgInfoType(
}
},
.ErrorSet => {
try self.addDbgInfoErrorSet(arena, module, ty, dbg_info_buffer);
// DW.AT.enumeration_type
try dbg_info_buffer.append(abbrev_enum_type);
// DW.AT.byte_size, DW.FORM.sdata
const abi_size = ty.abiSize(target);
try leb128.writeULEB128(dbg_info_buffer.writer(), abi_size);
// DW.AT.name, DW.FORM.string
const name = try ty.nameAllocArena(arena, target);
try dbg_info_buffer.writer().print("{s}\x00", .{name});
// DW.AT.enumerator
const no_error = "(no error)";
try dbg_info_buffer.ensureUnusedCapacity(no_error.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(abbrev_enum_variant);
// DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity(no_error);
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), 0, target_endian);
const error_names = ty.errorSetNames();
for (error_names) |error_name| {
const kv = module.getErrorValue(error_name) catch unreachable;
// DW.AT.enumerator
try dbg_info_buffer.ensureUnusedCapacity(error_name.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(abbrev_enum_variant);
// DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity(error_name);
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), kv.value, target_endian);
}
// DW.AT.enumeration_type delimit children
try dbg_info_buffer.append(0);
},
.ErrorUnion => {
const error_ty = ty.errorUnionSet();
@ -1255,7 +1206,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
var index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = payload_ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, payload_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try leb128.writeULEB128(dbg_info_buffer.writer(), payload_off);
@ -1268,7 +1219,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = error_ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, error_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.append(0);
@ -1280,65 +1231,6 @@ fn addDbgInfoType(
try dbg_info_buffer.append(abbrev_pad1);
},
}
for (relocs.items) |rel| {
const gop = try dbg_info_type_relocs.getOrPutContext(self.allocator, rel.ty, .{
.target = self.target,
});
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(self.allocator, rel.reloc);
}
}
fn addDbgInfoErrorSet(
self: *Dwarf,
arena: Allocator,
module: *Module,
ty: Type,
dbg_info_buffer: *std.ArrayList(u8),
) error{OutOfMemory}!void {
const target = self.target;
const target_endian = self.target.cpu.arch.endian();
// DW.AT.enumeration_type
try dbg_info_buffer.append(abbrev_enum_type);
// DW.AT.byte_size, DW.FORM.sdata
const abi_size = ty.abiSize(target);
try leb128.writeULEB128(dbg_info_buffer.writer(), abi_size);
// DW.AT.name, DW.FORM.string
const name = try ty.nameAllocArena(arena, target);
try dbg_info_buffer.writer().print("{s}\x00", .{name});
// DW.AT.enumerator
const no_error = "(no error)";
try dbg_info_buffer.ensureUnusedCapacity(no_error.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(abbrev_enum_variant);
// DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity(no_error);
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), 0, target_endian);
const error_names = ty.errorSetNames();
for (error_names) |error_name| {
const kv = module.getErrorValue(error_name) catch unreachable;
// DW.AT.enumerator
try dbg_info_buffer.ensureUnusedCapacity(error_name.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(abbrev_enum_variant);
// DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity(error_name);
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), kv.value, target_endian);
}
// DW.AT.enumeration_type delimit children
try dbg_info_buffer.append(0);
}
pub fn writeDbgAbbrev(self: *Dwarf, file: *File) !void {
@ -2059,3 +1951,115 @@ fn padToIdeal(actual_size: anytype) @TypeOf(actual_size) {
return std.math.add(@TypeOf(actual_size), actual_size, actual_size / ideal_factor) catch
std.math.maxInt(@TypeOf(actual_size));
}
pub fn addTypeReloc(self: *Dwarf, atom: *const Atom, ty: Type, offset: u32, addend: ?u32) !void {
const decl_state = &self.decl_state.?;
const gpa = self.allocator;
const resolv = decl_state.abbrev_resolver.getContext(ty, .{
.target = self.target,
}) orelse blk: {
const sym_index = @intCast(u32, decl_state.abbrev_table.items.len);
try decl_state.abbrev_table.append(gpa, .{
.atom = atom,
.@"type" = ty,
.offset = undefined,
});
log.debug("@{d}: {}", .{ sym_index, ty.fmtDebug() });
try decl_state.abbrev_resolver.putNoClobberContext(gpa, ty, sym_index, .{
.target = self.target,
});
break :blk decl_state.abbrev_resolver.getContext(ty, .{
.target = self.target,
}).?;
};
const add: u32 = addend orelse 0;
log.debug("{x}: @{d} + {x}", .{ offset, resolv, add });
try decl_state.abbrev_relocs.append(gpa, .{
.target = resolv,
.atom = atom,
.offset = offset,
.addend = add,
});
}
pub fn getDeclDebugLineBuffer(self: *Dwarf) *std.ArrayList(u8) {
return &self.decl_state.?.dbg_line;
}
pub fn getDeclDebugInfoBuffer(self: *Dwarf) *std.ArrayList(u8) {
return &self.decl_state.?.dbg_info;
}
pub fn flushModule(self: *Dwarf, file: *File, module: *Module) !void {
if (self.global_abbrev_relocs.items.len > 0) {
const gpa = self.allocator;
var arena_alloc = std.heap.ArenaAllocator.init(gpa);
defer arena_alloc.deinit();
const arena = arena_alloc.allocator();
const error_set = try arena.create(Module.ErrorSet);
const error_ty = try Type.Tag.error_set.create(arena, error_set);
var names = Module.ErrorSet.NameMap{};
try names.ensureUnusedCapacity(arena, module.global_error_set.count());
var it = module.global_error_set.keyIterator();
while (it.next()) |key| {
names.putAssumeCapacityNoClobber(key.*, {});
}
error_set.names = names;
const atom = try gpa.create(Atom);
errdefer gpa.destroy(atom);
atom.* = .{
.prev = null,
.next = null,
.off = 0,
.len = 0,
};
var dbg_info_buffer = std.ArrayList(u8).init(arena);
try self.addDbgInfoType(arena, module, atom, error_ty, &dbg_info_buffer);
try self.managed_atoms.append(gpa, atom);
try self.updateDeclDebugInfoAllocation(file, atom, @intCast(u32, dbg_info_buffer.items.len));
try self.writeDeclDebugInfo(file, atom, dbg_info_buffer.items);
const file_pos = blk: {
switch (self.tag) {
.elf => {
const elf_file = file.cast(File.Elf).?;
const debug_info_sect = &elf_file.sections.items[elf_file.debug_info_section_index.?];
break :blk debug_info_sect.sh_offset;
},
.macho => {
const macho_file = file.cast(File.MachO).?;
const d_sym = &macho_file.d_sym.?;
const dwarf_segment = &d_sym.load_commands.items[d_sym.dwarf_segment_cmd_index.?].segment;
const debug_info_sect = &dwarf_segment.sections.items[d_sym.debug_info_section_index.?];
break :blk debug_info_sect.offset;
},
else => unreachable,
}
};
var buf: [@sizeOf(u32)]u8 = undefined;
mem.writeInt(u32, &buf, atom.off, self.target.cpu.arch.endian());
while (self.global_abbrev_relocs.popOrNull()) |reloc| {
switch (self.tag) {
.elf => {
const elf_file = file.cast(File.Elf).?;
try elf_file.base.file.?.pwriteAll(&buf, file_pos + reloc.atom.off + reloc.offset);
},
.macho => {
const macho_file = file.cast(File.MachO).?;
const d_sym = &macho_file.d_sym.?;
try d_sym.file.pwriteAll(&buf, file_pos + reloc.atom.off + reloc.offset);
},
else => unreachable,
}
}
}
assert(self.decl_state == null);
}

View file

@ -958,8 +958,8 @@ pub fn flushModule(self: *Elf, comp: *Compilation) !void {
const target_endian = self.base.options.target.cpu.arch.endian();
const foreign_endian = target_endian != builtin.cpu.arch.endian();
if (self.dwarf) |*dwarf| {
try dwarf.commitErrorSetDebugInfo(&self.base, module);
if (self.dwarf) |*dw| {
try dw.flushModule(&self.base, module);
}
{
@ -2232,13 +2232,6 @@ pub fn freeDecl(self: *Elf, decl: *Module.Decl) void {
}
}
fn deinitRelocs(gpa: Allocator, table: *link.File.Dwarf.DbgInfoTypeRelocsTable) void {
for (table.values()) |*value| {
value.relocs.deinit(gpa);
}
table.deinit(gpa);
}
fn getDeclPhdrIndex(self: *Elf, decl: *Module.Decl) !u16 {
const ty = decl.ty;
const zig_ty = ty.zigTypeTag();
@ -2346,26 +2339,13 @@ pub fn updateFunc(self: *Elf, module: *Module, func: *Module.Fn, air: Air, liven
const decl = func.owner_decl;
self.freeUnnamedConsts(decl);
var debug_buffers_buf: Dwarf.DeclDebugBuffers = undefined;
const debug_buffers = if (self.dwarf) |*dw| blk: {
debug_buffers_buf = try dw.initDeclDebugInfo(decl);
break :blk &debug_buffers_buf;
} else null;
defer {
if (debug_buffers) |dbg| {
dbg.dbg_line_buffer.deinit();
dbg.dbg_info_buffer.deinit();
deinitRelocs(self.base.allocator, &dbg.dbg_info_type_relocs);
}
if (self.dwarf) |*dw| {
try dw.initDeclState(decl);
}
const res = if (debug_buffers) |dbg|
const res = if (self.dwarf) |*dw|
try codegen.generateFunction(&self.base, decl.srcLoc(), func, air, liveness, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
.dbg_info_type_relocs = &dbg.dbg_info_type_relocs,
},
.dwarf = dw,
})
else
try codegen.generateFunction(&self.base, decl.srcLoc(), func, air, liveness, &code_buffer, .none);
@ -2379,15 +2359,8 @@ pub fn updateFunc(self: *Elf, module: *Module, func: *Module.Fn, air: Air, liven
},
};
const local_sym = try self.updateDeclCode(decl, code, elf.STT_FUNC);
if (debug_buffers) |dbg| {
try self.dwarf.?.commitDeclDebugInfo(
&self.base,
module,
decl,
local_sym.st_value,
local_sym.st_size,
dbg,
);
if (self.dwarf) |*dw| {
try dw.commitDeclState(&self.base, module, decl, local_sym.st_value, local_sym.st_size);
}
// Since we updated the vaddr and the size, each corresponding export symbol also needs to be updated.
@ -2421,31 +2394,18 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
var debug_buffers_buf: Dwarf.DeclDebugBuffers = undefined;
const debug_buffers = if (self.dwarf) |*dw| blk: {
debug_buffers_buf = try dw.initDeclDebugInfo(decl);
break :blk &debug_buffers_buf;
} else null;
defer {
if (debug_buffers) |dbg| {
dbg.dbg_line_buffer.deinit();
dbg.dbg_info_buffer.deinit();
deinitRelocs(self.base.allocator, &dbg.dbg_info_type_relocs);
}
if (self.dwarf) |*dw| {
try dw.initDeclState(decl);
}
// TODO implement .debug_info for global variables
const decl_val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val;
const res = if (debug_buffers) |dbg|
const res = if (self.dwarf) |*dw|
try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
.ty = decl.ty,
.val = decl_val,
}, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
.dbg_info_type_relocs = &dbg.dbg_info_type_relocs,
},
.dwarf = dw,
}, .{
.parent_atom_index = decl.link.elf.local_sym_index,
})
@ -2468,8 +2428,8 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
};
const local_sym = try self.updateDeclCode(decl, code, elf.STT_OBJECT);
if (debug_buffers) |dbg| {
try self.dwarf.?.commitDeclDebugInfo(&self.base, module, decl, local_sym.st_value, local_sym.st_size, dbg);
if (self.dwarf) |*dw| {
try dw.commitDeclState(&self.base, module, decl, local_sym.st_value, local_sym.st_size);
}
// Since we updated the vaddr and the size, each corresponding export symbol also needs to be updated.

View file

@ -453,6 +453,12 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
if (self.d_sym) |*d_sym| {
if (self.base.options.module) |module| {
try d_sym.dwarf.flushModule(&self.base, module);
}
}
// If there is no Zig code to compile, then we should skip flushing the output file because it
// will not be part of the linker line anyway.
const module_obj_path: ?[]const u8 = if (self.base.options.module) |module| blk: {
@ -3670,32 +3676,17 @@ pub fn updateFunc(self: *MachO, module: *Module, func: *Module.Fn, air: Air, liv
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
var debug_buffers_buf: link.File.Dwarf.DeclDebugBuffers = undefined;
const debug_buffers = if (self.d_sym) |*d_sym| blk: {
debug_buffers_buf = try d_sym.initDeclDebugInfo(module, decl);
break :blk &debug_buffers_buf;
} else null;
defer {
if (debug_buffers) |dbg| {
dbg.dbg_line_buffer.deinit();
dbg.dbg_info_buffer.deinit();
for (dbg.dbg_info_type_relocs.values()) |*value| {
value.relocs.deinit(self.base.allocator);
}
dbg.dbg_info_type_relocs.deinit(self.base.allocator);
}
if (self.d_sym) |*d_sym| {
try d_sym.dwarf.initDeclState(decl);
}
const res = if (debug_buffers) |dbg|
const res = if (self.d_sym) |*d_sym|
try codegen.generateFunction(&self.base, decl.srcLoc(), func, air, liveness, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
.dbg_info_type_relocs = &dbg.dbg_info_type_relocs,
},
.dwarf = &d_sym.dwarf,
})
else
try codegen.generateFunction(&self.base, decl.srcLoc(), func, air, liveness, &code_buffer, .none);
switch (res) {
.appended => {
try decl.link.macho.code.appendSlice(self.base.allocator, code_buffer.items);
@ -3707,12 +3698,10 @@ pub fn updateFunc(self: *MachO, module: *Module, func: *Module.Fn, air: Air, liv
},
}
_ = try self.placeDecl(decl, decl.link.macho.code.items.len);
const symbol = try self.placeDecl(decl, decl.link.macho.code.items.len);
if (debug_buffers) |db| {
if (self.d_sym) |*d_sym| {
try d_sym.commitDeclDebugInfo(module, decl, db);
}
if (self.d_sym) |*d_sym| {
try d_sym.dwarf.commitDeclState(&self.base, module, decl, symbol.n_value, decl.link.macho.size);
}
// Since we updated the vaddr and the size, each corresponding export symbol also
@ -3812,33 +3801,17 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
var debug_buffers_buf: link.File.Dwarf.DeclDebugBuffers = undefined;
const debug_buffers = if (self.d_sym) |*d_sym| blk: {
debug_buffers_buf = try d_sym.initDeclDebugInfo(module, decl);
break :blk &debug_buffers_buf;
} else null;
defer {
if (debug_buffers) |dbg| {
dbg.dbg_line_buffer.deinit();
dbg.dbg_info_buffer.deinit();
for (dbg.dbg_info_type_relocs.values()) |*value| {
value.relocs.deinit(self.base.allocator);
}
dbg.dbg_info_type_relocs.deinit(self.base.allocator);
}
if (self.d_sym) |*d_sym| {
try d_sym.dwarf.initDeclState(decl);
}
const decl_val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val;
const res = if (debug_buffers) |dbg|
const res = if (self.d_sym) |*d_sym|
try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
.ty = decl.ty,
.val = decl_val,
}, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
.dbg_info_type_relocs = &dbg.dbg_info_type_relocs,
},
.dwarf = &d_sym.dwarf,
}, .{
.parent_atom_index = decl.link.macho.local_sym_index,
})
@ -3870,7 +3843,11 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
},
}
};
_ = try self.placeDecl(decl, code.len);
const symbol = try self.placeDecl(decl, code.len);
if (self.d_sym) |*d_sym| {
try d_sym.dwarf.commitDeclState(&self.base, module, decl, symbol.n_value, decl.link.macho.size);
}
// Since we updated the vaddr and the size, each corresponding export symbol also
// needs to be updated.
@ -4084,8 +4061,9 @@ fn placeDecl(self: *MachO, decl: *Module.Decl, code_len: usize) !*macho.nlist_64
}
pub fn updateDeclLineNumber(self: *MachO, module: *Module, decl: *const Module.Decl) !void {
_ = module;
if (self.d_sym) |*d_sym| {
try d_sym.updateDeclLineNumber(module, decl);
try d_sym.dwarf.updateDeclLineNumber(&self.base, decl);
}
}

View file

@ -645,25 +645,3 @@ fn writeStringTable(self: *DebugSymbols) !void {
self.load_commands_dirty = true;
}
pub fn updateDeclLineNumber(self: *DebugSymbols, module: *Module, decl: *const Module.Decl) !void {
_ = module;
return self.dwarf.updateDeclLineNumber(&self.base.base, decl);
}
/// Caller owns the returned memory.
pub fn initDeclDebugInfo(self: *DebugSymbols, module: *Module, decl: *Module.Decl) !Dwarf.DeclDebugBuffers {
_ = module;
return self.dwarf.initDeclDebugInfo(decl);
}
pub fn commitDeclDebugInfo(
self: *DebugSymbols,
module: *Module,
decl: *Module.Decl,
debug_buffers: *Dwarf.DeclDebugBuffers,
) !void {
const symbol = self.base.locals.items[decl.link.macho.local_sym_index];
const atom = &decl.link.macho;
return self.dwarf.commitDeclDebugInfo(&self.base.base, module, decl, symbol.n_value, atom.size, debug_buffers);
}