update GenericWriter usage found by test-cases

This commit is contained in:
Andrew Kelley 2025-08-27 13:57:55 -07:00
parent 888f00e856
commit ea3471288a
4 changed files with 47 additions and 38 deletions

View file

@ -719,7 +719,7 @@ fn diagnoseIncompleteDefinitions(p: *Parser) !void {
}
/// root : (decl | assembly ';' | staticAssert)*
pub fn parse(pp: *Preprocessor) Compilation.Error!Tree {
pub fn parse(pp: *Preprocessor) Error!Tree {
assert(pp.linemarkers == .none);
pp.comp.pragmaEvent(.before_parse);

View file

@ -116,15 +116,17 @@ pub fn translate(
var driver: aro.Driver = .{ .comp = comp };
defer driver.deinit();
var macro_buf = std.array_list.Managed(u8).init(gpa);
var macro_buf: std.Io.Writer.Allocating = .init(gpa);
defer macro_buf.deinit();
assert(!try driver.parseArgs(std.io.null_writer, macro_buf.writer(), args));
var trash: [64]u8 = undefined;
var discarding: std.Io.Writer.Discarding = .init(&trash);
assert(!try driver.parseArgs(&discarding.writer, &macro_buf.writer, args));
assert(driver.inputs.items.len == 1);
const source = driver.inputs.items[0];
const builtin_macros = try comp.generateBuiltinMacros(.include_system_defines);
const user_macros = try comp.addSourceFromBuffer("<command line>", macro_buf.items);
const user_macros = try comp.addSourceFromBuffer("<command line>", macro_buf.written());
var pp = try aro.Preprocessor.initDefault(comp);
defer pp.deinit();
@ -698,11 +700,10 @@ fn transEnumDecl(c: *Context, scope: *Scope, enum_decl: *const Type.Enum, field_
}
fn getTypeStr(c: *Context, ty: Type) ![]const u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty;
defer buf.deinit(c.gpa);
const w = buf.writer(c.gpa);
try ty.print(c.mapper, c.comp.langopts, w);
return c.arena.dupe(u8, buf.items);
var allocating: std.Io.Writer.Allocating = .init(c.gpa);
defer allocating.deinit();
ty.print(c.mapper, c.comp.langopts, &allocating.writer) catch return error.OutOfMemory;
return c.arena.dupe(u8, allocating.written());
}
fn transType(c: *Context, scope: *Scope, raw_ty: Type, qual_handling: Type.QualHandling, source_loc: TokenIndex) TypeError!ZigNode {
@ -1820,6 +1821,7 @@ pub fn main() !void {
var tree = translate(gpa, &aro_comp, args) catch |err| switch (err) {
error.ParsingFailed, error.FatalError => renderErrorsAndExit(&aro_comp),
error.OutOfMemory => return error.OutOfMemory,
error.WriteFailed => return error.WriteFailed,
error.StreamTooLong => std.process.fatal("An input file was larger than 4GiB", .{}),
};
defer tree.deinit(gpa);

View file

@ -65,9 +65,15 @@ pub fn deinit(self: *Pdb) void {
pub fn parseDbiStream(self: *Pdb) !void {
var stream = self.getStream(pdb.StreamType.dbi) orelse
return error.InvalidDebugInfo;
const reader = stream.reader();
const header = try reader.readStruct(std.pdb.DbiStreamHeader);
const gpa = self.allocator;
const deprecated_reader = stream.reader();
var adapted_buffer: [1024]u8 = undefined;
var adapted_reader = deprecated_reader.adaptToNewApi(&adapted_buffer);
const reader = &adapted_reader.new_interface;
const header = try reader.takeStruct(std.pdb.DbiStreamHeader, .little);
if (header.version_header != 19990903) // V70, only value observed by LLVM team
return error.UnknownPDBVersion;
// if (header.Age != age)
@ -76,22 +82,24 @@ pub fn parseDbiStream(self: *Pdb) !void {
const mod_info_size = header.mod_info_size;
const section_contrib_size = header.section_contribution_size;
var modules = std.array_list.Managed(Module).init(self.allocator);
var modules = std.array_list.Managed(Module).init(gpa);
errdefer modules.deinit();
// Module Info Substream
var mod_info_offset: usize = 0;
while (mod_info_offset != mod_info_size) {
const mod_info = try reader.readStruct(pdb.ModInfo);
const mod_info = try reader.takeStruct(pdb.ModInfo, .little);
var this_record_len: usize = @sizeOf(pdb.ModInfo);
const module_name = try reader.readUntilDelimiterAlloc(self.allocator, 0, 1024);
errdefer self.allocator.free(module_name);
this_record_len += module_name.len + 1;
var module_name: std.Io.Writer.Allocating = .init(gpa);
defer module_name.deinit();
this_record_len += try reader.streamDelimiterLimit(&module_name.writer, 0, .limited(1024));
this_record_len += 1;
const obj_file_name = try reader.readUntilDelimiterAlloc(self.allocator, 0, 1024);
errdefer self.allocator.free(obj_file_name);
this_record_len += obj_file_name.len + 1;
var obj_file_name: std.Io.Writer.Allocating = .init(gpa);
defer obj_file_name.deinit();
this_record_len += try reader.streamDelimiterLimit(&obj_file_name.writer, 0, .limited(1024));
this_record_len += 1;
if (this_record_len % 4 != 0) {
const round_to_next_4 = (this_record_len | 0x3) + 1;
@ -102,8 +110,8 @@ pub fn parseDbiStream(self: *Pdb) !void {
try modules.append(Module{
.mod_info = mod_info,
.module_name = module_name,
.obj_file_name = obj_file_name,
.module_name = try module_name.toOwnedSlice(),
.obj_file_name = try obj_file_name.toOwnedSlice(),
.populated = false,
.symbols = undefined,
@ -117,21 +125,21 @@ pub fn parseDbiStream(self: *Pdb) !void {
}
// Section Contribution Substream
var sect_contribs = std.array_list.Managed(pdb.SectionContribEntry).init(self.allocator);
var sect_contribs = std.array_list.Managed(pdb.SectionContribEntry).init(gpa);
errdefer sect_contribs.deinit();
var sect_cont_offset: usize = 0;
if (section_contrib_size != 0) {
const version = reader.readEnum(std.pdb.SectionContrSubstreamVersion, .little) catch |err| switch (err) {
error.InvalidValue => return error.InvalidDebugInfo,
else => |e| return e,
const version = reader.takeEnum(std.pdb.SectionContrSubstreamVersion, .little) catch |err| switch (err) {
error.InvalidEnumTag, error.EndOfStream => return error.InvalidDebugInfo,
error.ReadFailed => return error.ReadFailed,
};
_ = version;
sect_cont_offset += @sizeOf(u32);
}
while (sect_cont_offset != section_contrib_size) {
const entry = try sect_contribs.addOne();
entry.* = try reader.readStruct(pdb.SectionContribEntry);
entry.* = try reader.takeStruct(pdb.SectionContribEntry, .little);
sect_cont_offset += @sizeOf(pdb.SectionContribEntry);
if (sect_cont_offset > section_contrib_size)
@ -233,6 +241,7 @@ pub fn getSymbolName(self: *Pdb, module: *Module, address: u64) ?[]const u8 {
pub fn getLineNumberInfo(self: *Pdb, module: *Module, address: u64) !std.debug.SourceLocation {
std.debug.assert(module.populated);
const subsect_info = module.subsect_info;
const gpa = self.allocator;
var sect_offset: usize = 0;
var skip_len: usize = undefined;
@ -287,7 +296,16 @@ pub fn getLineNumberInfo(self: *Pdb, module: *Module, address: u64) !std.debug.S
const chksum_hdr: *align(1) pdb.FileChecksumEntryHeader = @ptrCast(&module.subsect_info[subsect_index]);
const strtab_offset = @sizeOf(pdb.StringTableHeader) + chksum_hdr.file_name_offset;
try self.string_table.?.seekTo(strtab_offset);
const source_file_name = try self.string_table.?.reader().readUntilDelimiterAlloc(self.allocator, 0, 1024);
const source_file_name = s: {
const deprecated_reader = self.string_table.?.reader();
var adapted_buffer: [1024]u8 = undefined;
var adapted_reader = deprecated_reader.adaptToNewApi(&adapted_buffer);
var source_file_name: std.Io.Writer.Allocating = .init(gpa);
defer source_file_name.deinit();
_ = try adapted_reader.new_interface.streamDelimiterLimit(&source_file_name.writer, 0, .limited(1024));
break :s try source_file_name.toOwnedSlice();
};
errdefer gpa.free(source_file_name);
const line_entry_idx = line_i - 1;

View file

@ -1075,17 +1075,6 @@ test "assigning packed struct inside another packed struct" {
try expect(S.mem.padding == 0);
}
test "packed struct used as part of anon decl name" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv) return error.SkipZigTest;
const S = packed struct { a: u0 = 0 };
var a: u8 = 0;
_ = &a;
try std.io.null_writer.print("\n{} {}\n", .{ a, S{} });
}
test "packed struct acts as a namespace" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO