update tools and other miscellaneous things to new APIs

This commit is contained in:
Andrew Kelley 2025-08-29 20:49:18 -07:00
parent fadd268a60
commit 9adcc31ca3
20 changed files with 55 additions and 53 deletions

View file

@ -164,13 +164,14 @@ pub fn main() !void {
} else { } else {
switch (options.input_source) { switch (options.input_source) {
.stdio => |file| { .stdio => |file| {
break :full_input file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| { var file_reader = file.reader(&.{});
break :full_input file_reader.interface.allocRemaining(allocator, .unlimited) catch |err| {
try error_handler.emitMessage(allocator, .err, "unable to read input from stdin: {s}", .{@errorName(err)}); try error_handler.emitMessage(allocator, .err, "unable to read input from stdin: {s}", .{@errorName(err)});
std.process.exit(1); std.process.exit(1);
}; };
}, },
.filename => |input_filename| { .filename => |input_filename| {
break :full_input std.fs.cwd().readFileAlloc(allocator, input_filename, std.math.maxInt(usize)) catch |err| { break :full_input std.fs.cwd().readFileAlloc(input_filename, allocator, .unlimited) catch |err| {
try error_handler.emitMessage(allocator, .err, "unable to read input file path '{s}': {s}", .{ input_filename, @errorName(err) }); try error_handler.emitMessage(allocator, .err, "unable to read input file path '{s}': {s}", .{ input_filename, @errorName(err) });
std.process.exit(1); std.process.exit(1);
}; };
@ -462,7 +463,10 @@ const IoStream = struct {
pub fn readAll(self: Source, allocator: std.mem.Allocator) !Data { pub fn readAll(self: Source, allocator: std.mem.Allocator) !Data {
return switch (self) { return switch (self) {
inline .file, .stdio => |file| .{ inline .file, .stdio => |file| .{
.bytes = try file.readToEndAlloc(allocator, std.math.maxInt(usize)), .bytes = b: {
var file_reader = file.reader(&.{});
break :b try file_reader.interface.allocRemaining(allocator, .unlimited);
},
.needs_free = true, .needs_free = true,
}, },
.memory => |list| .{ .bytes = list.items, .needs_free = false }, .memory => |list| .{ .bytes = list.items, .needs_free = false },

View file

@ -13,7 +13,7 @@ pub fn main() !void {
const input_path = args[1]; const input_path = args[1];
const optimize_mode_text = args[2]; const optimize_mode_text = args[2];
const input_bytes = try std.fs.cwd().readFileAlloc(arena, input_path, 5 * 1024 * 1024); const input_bytes = try std.fs.cwd().readFileAlloc(input_path, arena, .limited(5 * 1024 * 1024));
const optimize_mode = std.meta.stringToEnum(std.builtin.OptimizeMode, optimize_mode_text).?; const optimize_mode = std.meta.stringToEnum(std.builtin.OptimizeMode, optimize_mode_text).?;
var stderr = input_bytes; var stderr = input_bytes;

View file

@ -32,7 +32,8 @@ pub fn main() !void {
const hello_stdout = "hello from stdout"; const hello_stdout = "hello from stdout";
var buf: [hello_stdout.len]u8 = undefined; var buf: [hello_stdout.len]u8 = undefined;
const n = try child.stdout.?.deprecatedReader().readAll(&buf); var stdout_reader = child.stdout.?.reader(&.{});
const n = try stdout_reader.interface.readSliceShort(&buf);
if (!std.mem.eql(u8, buf[0..n], hello_stdout)) { if (!std.mem.eql(u8, buf[0..n], hello_stdout)) {
testError("child stdout: '{s}'; want '{s}'", .{ buf[0..n], hello_stdout }); testError("child stdout: '{s}'; want '{s}'", .{ buf[0..n], hello_stdout });
} }

View file

@ -9,8 +9,8 @@ pub fn main() !void {
const actual_path = args[1]; const actual_path = args[1];
const expected_path = args[2]; const expected_path = args[2];
const actual = try std.fs.cwd().readFileAlloc(arena, actual_path, 1024 * 1024); const actual = try std.fs.cwd().readFileAlloc(actual_path, arena, .limited(1024 * 1024));
const expected = try std.fs.cwd().readFileAlloc(arena, expected_path, 1024 * 1024); const expected = try std.fs.cwd().readFileAlloc(expected_path, arena, .limited(1024 * 1024));
// The actual output starts with a comment which we should strip out before comparing. // The actual output starts with a comment which we should strip out before comparing.
const comment_str = "/* This file was generated by ConfigHeader using the Zig Build System. */\n"; const comment_str = "/* This file was generated by ConfigHeader using the Zig Build System. */\n";

View file

@ -6,8 +6,8 @@ pub fn main() !void {
const args = try std.process.argsAlloc(arena); const args = try std.process.argsAlloc(arena);
if (args.len != 3) return error.BadUsage; // usage: 'check_differ <path a> <path b>' if (args.len != 3) return error.BadUsage; // usage: 'check_differ <path a> <path b>'
const contents_1 = try std.fs.cwd().readFileAlloc(arena, args[1], 1024 * 1024 * 64); // 64 MiB ought to be plenty const contents_1 = try std.fs.cwd().readFileAlloc(args[1], arena, .limited(1024 * 1024 * 64)); // 64 MiB ought to be plenty
const contents_2 = try std.fs.cwd().readFileAlloc(arena, args[2], 1024 * 1024 * 64); // 64 MiB ought to be plenty const contents_2 = try std.fs.cwd().readFileAlloc(args[2], arena, .limited(1024 * 1024 * 64)); // 64 MiB ought to be plenty
if (std.mem.eql(u8, contents_1, contents_2)) { if (std.mem.eql(u8, contents_1, contents_2)) {
return error.FilesMatch; return error.FilesMatch;

View file

@ -77,7 +77,8 @@ pub fn main() !void {
var code_dir = try fs.cwd().openDir(code_dir_path, .{}); var code_dir = try fs.cwd().openDir(code_dir_path, .{});
defer code_dir.close(); defer code_dir.close();
const input_file_bytes = try in_file.deprecatedReader().readAllAlloc(arena, max_doc_file_size); var in_file_reader = in_file.reader(&.{});
const input_file_bytes = try in_file_reader.interface.allocRemaining(arena, .limited(max_doc_file_size));
var tokenizer = Tokenizer.init(input_path, input_file_bytes); var tokenizer = Tokenizer.init(input_path, input_file_bytes);
var toc = try genToc(arena, &tokenizer); var toc = try genToc(arena, &tokenizer);
@ -1039,10 +1040,8 @@ fn genHtml(
}); });
defer allocator.free(out_basename); defer allocator.free(out_basename);
const contents = code_dir.readFileAlloc(allocator, out_basename, std.math.maxInt(u32)) catch |err| { const contents = code_dir.readFileAlloc(out_basename, allocator, .limited(std.math.maxInt(u32))) catch |err| {
return parseError(tokenizer, code.token, "unable to open '{s}': {s}", .{ return parseError(tokenizer, code.token, "unable to open '{s}': {t}", .{ out_basename, err });
out_basename, @errorName(err),
});
}; };
defer allocator.free(contents); defer allocator.free(contents);

View file

@ -70,7 +70,7 @@ pub fn main() !void {
const zig_path = opt_zig orelse fatal("missing zig compiler path (--zig)", .{}); const zig_path = opt_zig orelse fatal("missing zig compiler path (--zig)", .{});
const cache_root = opt_cache_root orelse fatal("missing cache root path (--cache-root)", .{}); const cache_root = opt_cache_root orelse fatal("missing cache root path (--cache-root)", .{});
const source_bytes = try fs.cwd().readFileAlloc(arena, input_path, std.math.maxInt(u32)); const source_bytes = try fs.cwd().readFileAlloc(input_path, arena, .limited(std.math.maxInt(u32)));
const code = try parseManifest(arena, source_bytes); const code = try parseManifest(arena, source_bytes);
const source = stripManifest(source_bytes); const source = stripManifest(source_bytes);

View file

@ -38,10 +38,9 @@ pub fn main() !void {
defer debug_info.deinit(gpa); defer debug_info.deinit(gpa);
const cov_bytes = cov_path.root_dir.handle.readFileAllocOptions( const cov_bytes = cov_path.root_dir.handle.readFileAllocOptions(
arena,
cov_path.sub_path, cov_path.sub_path,
1 << 30, arena,
null, .limited(1 << 30),
.of(SeenPcsHeader), .of(SeenPcsHeader),
null, null,
) catch |err| { ) catch |err| {

View file

@ -1,6 +1,5 @@
const std = @import("std"); const std = @import("std");
const fs = std.fs; const fs = std.fs;
const io = std.io;
const mem = std.mem; const mem = std.mem;
const process = std.process; const process = std.process;
const assert = std.debug.assert; const assert = std.debug.assert;
@ -93,7 +92,7 @@ pub fn main() anyerror!void {
var sdk_dir = try std.fs.cwd().openDir(sysroot_path, .{}); var sdk_dir = try std.fs.cwd().openDir(sysroot_path, .{});
defer sdk_dir.close(); defer sdk_dir.close();
const sdk_info = try sdk_dir.readFileAlloc(allocator, "SDKSettings.json", std.math.maxInt(u32)); const sdk_info = try sdk_dir.readFileAlloc("SDKSettings.json", allocator, .limited(std.math.maxInt(u32)));
const parsed_json = try std.json.parseFromSlice(struct { const parsed_json = try std.json.parseFromSlice(struct {
DefaultProperties: struct { MACOSX_DEPLOYMENT_TARGET: []const u8 }, DefaultProperties: struct { MACOSX_DEPLOYMENT_TARGET: []const u8 },
@ -198,7 +197,8 @@ fn fetchTarget(
var dirs = std.StringHashMap(fs.Dir).init(arena); var dirs = std.StringHashMap(fs.Dir).init(arena);
try dirs.putNoClobber(".", dest_dir); try dirs.putNoClobber(".", dest_dir);
const headers_list_str = try headers_list_file.deprecatedReader().readAllAlloc(arena, std.math.maxInt(usize)); var headers_list_file_reader = headers_list_file.reader(&.{});
const headers_list_str = try headers_list_file_reader.interface.allocRemaining(arena, .unlimited);
const prefix = "/usr/include"; const prefix = "/usr/include";
var it = mem.splitScalar(u8, headers_list_str, '\n'); var it = mem.splitScalar(u8, headers_list_str, '\n');

View file

@ -136,7 +136,7 @@ fn readExtRegistry(exts: *std.array_list.Managed(Extension), dir: std.fs.Dir, su
} }
fn readRegistry(comptime RegistryType: type, dir: std.fs.Dir, path: []const u8) !RegistryType { fn readRegistry(comptime RegistryType: type, dir: std.fs.Dir, path: []const u8) !RegistryType {
const spec = try dir.readFileAlloc(allocator, path, std.math.maxInt(usize)); const spec = try dir.readFileAlloc(path, allocator, .unlimited);
// Required for json parsing. // Required for json parsing.
// TODO: ALI // TODO: ALI
@setEvalBranchQuota(10000); @setEvalBranchQuota(10000);
@ -189,7 +189,7 @@ fn tagPriorityScore(tag: []const u8) usize {
} }
fn render( fn render(
writer: *std.io.Writer, writer: *std.Io.Writer,
registry: CoreRegistry, registry: CoreRegistry,
extensions: []const Extension, extensions: []const Extension,
) !void { ) !void {
@ -214,7 +214,7 @@ fn render(
\\ none, \\ none,
\\ _, \\ _,
\\ \\
\\ pub fn format(self: Id, writer: *std.io.Writer) std.io.Writer.Error!void { \\ pub fn format(self: Id, writer: *std.Io.Writer) std.Io.Writer.Error!void {
\\ switch (self) { \\ switch (self) {
\\ .none => try writer.writeAll("(none)"), \\ .none => try writer.writeAll("(none)"),
\\ else => try writer.print("%{d}", .{@intFromEnum(self)}), \\ else => try writer.print("%{d}", .{@intFromEnum(self)}),
@ -327,7 +327,7 @@ fn render(
} }
fn renderInstructionSet( fn renderInstructionSet(
writer: *std.io.Writer, writer: *std.Io.Writer,
core: CoreRegistry, core: CoreRegistry,
extensions: []const Extension, extensions: []const Extension,
all_operand_kinds: OperandKindMap, all_operand_kinds: OperandKindMap,
@ -362,7 +362,7 @@ fn renderInstructionSet(
} }
fn renderInstructionsCase( fn renderInstructionsCase(
writer: *std.io.Writer, writer: *std.Io.Writer,
set_name: []const u8, set_name: []const u8,
instructions: []const Instruction, instructions: []const Instruction,
all_operand_kinds: OperandKindMap, all_operand_kinds: OperandKindMap,
@ -409,7 +409,7 @@ fn renderInstructionsCase(
); );
} }
fn renderClass(writer: *std.io.Writer, instructions: []const Instruction) !void { fn renderClass(writer: *std.Io.Writer, instructions: []const Instruction) !void {
var class_map = std.StringArrayHashMap(void).init(allocator); var class_map = std.StringArrayHashMap(void).init(allocator);
for (instructions) |inst| { for (instructions) |inst| {
@ -427,7 +427,7 @@ fn renderClass(writer: *std.io.Writer, instructions: []const Instruction) !void
const Formatter = struct { const Formatter = struct {
data: []const u8, data: []const u8,
fn format(f: Formatter, writer: *std.Io.Writer) std.io.Writer.Error!void { fn format(f: Formatter, writer: *std.Io.Writer) std.Io.Writer.Error!void {
var id_buf: [128]u8 = undefined; var id_buf: [128]u8 = undefined;
var fw: std.Io.Writer = .fixed(&id_buf); var fw: std.Io.Writer = .fixed(&id_buf);
for (f.data, 0..) |c, i| { for (f.data, 0..) |c, i| {
@ -457,7 +457,7 @@ fn formatId(identifier: []const u8) std.fmt.Alt(Formatter, Formatter.format) {
return .{ .data = .{ .data = identifier } }; return .{ .data = .{ .data = identifier } };
} }
fn renderOperandKind(writer: *std.io.Writer, operands: []const OperandKind) !void { fn renderOperandKind(writer: *std.Io.Writer, operands: []const OperandKind) !void {
try writer.writeAll( try writer.writeAll(
\\pub const OperandKind = enum { \\pub const OperandKind = enum {
\\ opcode, \\ opcode,
@ -513,7 +513,7 @@ fn renderOperandKind(writer: *std.io.Writer, operands: []const OperandKind) !voi
try writer.writeAll("};\n}\n};\n"); try writer.writeAll("};\n}\n};\n");
} }
fn renderEnumerant(writer: *std.io.Writer, enumerant: Enumerant) !void { fn renderEnumerant(writer: *std.Io.Writer, enumerant: Enumerant) !void {
try writer.print(".{{.name = \"{s}\", .value = ", .{enumerant.enumerant}); try writer.print(".{{.name = \"{s}\", .value = ", .{enumerant.enumerant});
switch (enumerant.value) { switch (enumerant.value) {
.bitflag => |flag| try writer.writeAll(flag), .bitflag => |flag| try writer.writeAll(flag),
@ -530,7 +530,7 @@ fn renderEnumerant(writer: *std.io.Writer, enumerant: Enumerant) !void {
} }
fn renderOpcodes( fn renderOpcodes(
writer: *std.io.Writer, writer: *std.Io.Writer,
opcode_type_name: []const u8, opcode_type_name: []const u8,
want_operands: bool, want_operands: bool,
instructions: []const Instruction, instructions: []const Instruction,
@ -629,7 +629,7 @@ fn renderOpcodes(
} }
fn renderOperandKinds( fn renderOperandKinds(
writer: *std.io.Writer, writer: *std.Io.Writer,
kinds: []const OperandKind, kinds: []const OperandKind,
extended_structs: ExtendedStructSet, extended_structs: ExtendedStructSet,
) !void { ) !void {
@ -643,7 +643,7 @@ fn renderOperandKinds(
} }
fn renderValueEnum( fn renderValueEnum(
writer: *std.io.Writer, writer: *std.Io.Writer,
enumeration: OperandKind, enumeration: OperandKind,
extended_structs: ExtendedStructSet, extended_structs: ExtendedStructSet,
) !void { ) !void {
@ -721,7 +721,7 @@ fn renderValueEnum(
} }
fn renderBitEnum( fn renderBitEnum(
writer: *std.io.Writer, writer: *std.Io.Writer,
enumeration: OperandKind, enumeration: OperandKind,
extended_structs: ExtendedStructSet, extended_structs: ExtendedStructSet,
) !void { ) !void {
@ -804,7 +804,7 @@ fn renderBitEnum(
} }
fn renderOperand( fn renderOperand(
writer: *std.io.Writer, writer: *std.Io.Writer,
kind: enum { kind: enum {
@"union", @"union",
instruction, instruction,
@ -888,7 +888,7 @@ fn renderOperand(
try writer.writeAll(",\n"); try writer.writeAll(",\n");
} }
fn renderFieldName(writer: *std.io.Writer, operands: []const Operand, field_index: usize) !void { fn renderFieldName(writer: *std.Io.Writer, operands: []const Operand, field_index: usize) !void {
const operand = operands[field_index]; const operand = operands[field_index];
derive_from_kind: { derive_from_kind: {

View file

@ -299,10 +299,9 @@ pub fn main() !void {
// Read the ELF header. // Read the ELF header.
const elf_bytes = build_all_dir.readFileAllocOptions( const elf_bytes = build_all_dir.readFileAllocOptions(
arena,
libc_so_path, libc_so_path,
100 * 1024 * 1024, arena,
1 * 1024 * 1024, .limited(100 * 1024 * 1024),
.of(elf.Elf64_Ehdr), .of(elf.Elf64_Ehdr),
null, null,
) catch |err| { ) catch |err| {

View file

@ -32,7 +32,7 @@ pub fn main() !void {
}).lessThan); }).lessThan);
for (names.items) |name| { for (names.items) |name| {
const contents = try std.fs.cwd().readFileAlloc(allocator, name, 250001); const contents = try std.fs.cwd().readFileAlloc(name, allocator, .limited(250001));
try output.writeAll("test "); try output.writeAll("test ");
try writeString(output, name); try writeString(output, name);
try output.writeAll(" {\n try "); try output.writeAll(" {\n try ");

View file

@ -248,7 +248,7 @@ pub fn main() !void {
try Io.Writer.flush(stdout); try Io.Writer.flush(stdout);
} }
fn usage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void { fn usage(w: *std.Io.Writer, arg0: []const u8) std.Io.Writer.Error!void {
try w.print( try w.print(
\\Usage: {s} /path/to/zig /path/to/linux \\Usage: {s} /path/to/zig /path/to/linux
\\Alternative Usage: zig run /path/to/git/zig/tools/generate_linux_syscalls.zig -- /path/to/zig /path/to/linux \\Alternative Usage: zig run /path/to/git/zig/tools/generate_linux_syscalls.zig -- /path/to/zig /path/to/linux

View file

@ -52,7 +52,7 @@ pub fn main() !void {
const zig_exe = opt_zig_exe orelse fatal("missing path to zig\n{s}", .{usage}); const zig_exe = opt_zig_exe orelse fatal("missing path to zig\n{s}", .{usage});
const input_file_name = opt_input_file_name orelse fatal("missing input file\n{s}", .{usage}); const input_file_name = opt_input_file_name orelse fatal("missing input file\n{s}", .{usage});
const input_file_bytes = try std.fs.cwd().readFileAlloc(arena, input_file_name, std.math.maxInt(u32)); const input_file_bytes = try std.fs.cwd().readFileAlloc(input_file_name, arena, .limited(std.math.maxInt(u32)));
const case = try Case.parse(arena, input_file_bytes); const case = try Case.parse(arena, input_file_bytes);
// Check now: if there are any targets using the `cbe` backend, we need the lib dir. // Check now: if there are any targets using the `cbe` backend, we need the lib dir.
@ -226,7 +226,7 @@ const Eval = struct {
cc_child_args: *std.ArrayListUnmanaged([]const u8), cc_child_args: *std.ArrayListUnmanaged([]const u8),
const StreamEnum = enum { stdout, stderr }; const StreamEnum = enum { stdout, stderr };
const Poller = std.io.Poller(StreamEnum); const Poller = std.Io.Poller(StreamEnum);
/// Currently this function assumes the previous updates have already been written. /// Currently this function assumes the previous updates have already been written.
fn write(eval: *Eval, update: Case.Update) void { fn write(eval: *Eval, update: Case.Update) void {

View file

@ -1,6 +1,5 @@
const std = @import("std"); const std = @import("std");
const builtin = @import("builtin"); const builtin = @import("builtin");
const io = std.io;
const fs = std.fs; const fs = std.fs;
const print = std.debug.print; const print = std.debug.print;
const mem = std.mem; const mem = std.mem;
@ -29,7 +28,8 @@ pub fn main() !void {
var out_dir = try fs.cwd().openDir(fs.path.dirname(output_file).?, .{}); var out_dir = try fs.cwd().openDir(fs.path.dirname(output_file).?, .{});
defer out_dir.close(); defer out_dir.close();
const input_file_bytes = try in_file.deprecatedReader().readAllAlloc(arena, std.math.maxInt(u32)); var in_file_reader = in_file.reader(&.{});
const input_file_bytes = try in_file_reader.interface.allocRemaining(arena, .unlimited);
var tokenizer = Tokenizer.init(input_file, input_file_bytes); var tokenizer = Tokenizer.init(input_file, input_file_bytes);

View file

@ -254,7 +254,7 @@ pub fn main() !void {
.file, .sym_link => { .file, .sym_link => {
const rel_path = try std.fs.path.relative(allocator, target_include_dir, full_path); const rel_path = try std.fs.path.relative(allocator, target_include_dir, full_path);
const max_size = 2 * 1024 * 1024 * 1024; const max_size = 2 * 1024 * 1024 * 1024;
const raw_bytes = try std.fs.cwd().readFileAlloc(allocator, full_path, max_size); const raw_bytes = try std.fs.cwd().readFileAlloc(full_path, allocator, .limited(max_size));
const trimmed = std.mem.trim(u8, raw_bytes, " \r\n\t"); const trimmed = std.mem.trim(u8, raw_bytes, " \r\n\t");
total_bytes += raw_bytes.len; total_bytes += raw_bytes.len;
const hash = try allocator.alloc(u8, 32); const hash = try allocator.alloc(u8, 32);

View file

@ -206,7 +206,7 @@ pub fn main() !void {
.file => { .file => {
const rel_path = try std.fs.path.relative(arena, target_include_dir, full_path); const rel_path = try std.fs.path.relative(arena, target_include_dir, full_path);
const max_size = 2 * 1024 * 1024 * 1024; const max_size = 2 * 1024 * 1024 * 1024;
const raw_bytes = try std.fs.cwd().readFileAlloc(arena, full_path, max_size); const raw_bytes = try std.fs.cwd().readFileAlloc(full_path, arena, .limited(max_size));
const trimmed = std.mem.trim(u8, raw_bytes, " \r\n\t"); const trimmed = std.mem.trim(u8, raw_bytes, " \r\n\t");
total_bytes += raw_bytes.len; total_bytes += raw_bytes.len;
const hash = try arena.alloc(u8, 32); const hash = try arena.alloc(u8, 32);

View file

@ -965,7 +965,7 @@ fn printUsageAndExit(arg0: []const u8) noreturn {
std.process.exit(1); std.process.exit(1);
} }
fn printUsage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void { fn printUsage(w: *std.Io.Writer, arg0: []const u8) std.Io.Writer.Error!void {
try w.print( try w.print(
\\Usage: {s} /path/to/llvm-tblgen /path/to/git/llvm/llvm-project \\Usage: {s} /path/to/llvm-tblgen /path/to/git/llvm/llvm-project
\\Alternative Usage: zig run /path/to/git/zig/tools/update_clang_options.zig -- /path/to/llvm-tblgen /path/to/git/llvm/llvm-project \\Alternative Usage: zig run /path/to/git/zig/tools/update_clang_options.zig -- /path/to/llvm-tblgen /path/to/git/llvm/llvm-project

View file

@ -194,7 +194,7 @@ fn printUsageAndExit(arg0: []const u8) noreturn {
std.process.exit(1); std.process.exit(1);
} }
fn printUsage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void { fn printUsage(w: *std.Io.Writer, arg0: []const u8) std.Io.Writer.Error!void {
return w.print( return w.print(
\\Usage: {s} /path/git/zig \\Usage: {s} /path/git/zig
\\ \\

View file

@ -116,9 +116,9 @@ pub fn main() !void {
const max_file_size = 10 * 1024 * 1024; const max_file_size = 10 * 1024 * 1024;
const generic_glibc_contents = generic_glibc_dir.readFileAlloc( const generic_glibc_contents = generic_glibc_dir.readFileAlloc(
arena,
entry.path, entry.path,
max_file_size, arena,
.limited(max_file_size),
) catch |err| switch (err) { ) catch |err| switch (err) {
error.FileNotFound => continue, error.FileNotFound => continue,
else => |e| fatal("unable to load '{s}/include/{s}': {s}", .{ else => |e| fatal("unable to load '{s}/include/{s}': {s}", .{
@ -126,9 +126,9 @@ pub fn main() !void {
}), }),
}; };
const glibc_include_contents = include_dir.readFileAlloc( const glibc_include_contents = include_dir.readFileAlloc(
arena,
entry.path, entry.path,
max_file_size, arena,
.limited(max_file_size),
) catch |err| { ) catch |err| {
fatal("unable to load '{s}/include/{s}': {s}", .{ fatal("unable to load '{s}/include/{s}': {s}", .{
dest_dir_path, entry.path, @errorName(err), dest_dir_path, entry.path, @errorName(err),