const builtin = @import("builtin"); const std = @import("std"); const Io = std.Io; const Allocator = std.mem.Allocator; const removeComments = @import("comments.zig").removeComments; const parseAndRemoveLineCommands = @import("source_mapping.zig").parseAndRemoveLineCommands; const compile = @import("compile.zig").compile; const Dependencies = @import("compile.zig").Dependencies; const Diagnostics = @import("errors.zig").Diagnostics; const cli = @import("cli.zig"); const preprocess = @import("preprocess.zig"); const renderErrorMessage = @import("utils.zig").renderErrorMessage; const openFileNotDir = @import("utils.zig").openFileNotDir; const cvtres = @import("cvtres.zig"); const hasDisjointCodePage = @import("disjoint_code_page.zig").hasDisjointCodePage; const fmtResourceType = @import("res.zig").NameOrOrdinal.fmtResourceType; const aro = @import("aro"); const compiler_util = @import("../util.zig"); pub fn main() !void { var debug_allocator: std.heap.DebugAllocator(.{}) = .init; defer std.debug.assert(debug_allocator.deinit() == .ok); const gpa = debug_allocator.allocator(); var arena_state = std.heap.ArenaAllocator.init(gpa); defer arena_state.deinit(); const arena = arena_state.allocator(); const args = try std.process.argsAlloc(arena); if (args.len < 2) { const w, const ttyconf = std.debug.lockStderrWriter(&.{}); try renderErrorMessage(w, ttyconf, .err, "expected zig lib dir as first argument", .{}); std.process.exit(1); } const zig_lib_dir = args[1]; var cli_args = args[2..]; var zig_integration = false; if (cli_args.len > 0 and std.mem.eql(u8, cli_args[0], "--zig-integration")) { zig_integration = true; cli_args = args[3..]; } var stdout_buffer: [1024]u8 = undefined; var stdout_writer = std.fs.File.stdout().writer(&stdout_buffer); const stdout = &stdout_writer.interface; var error_handler: ErrorHandler = switch (zig_integration) { true => .{ .server = .{ .out = stdout, .in = undefined, // won't be receiving messages }, }, false => .stderr, }; var options = options: { var cli_diagnostics = cli.Diagnostics.init(gpa); defer cli_diagnostics.deinit(); var options = cli.parse(gpa, cli_args, &cli_diagnostics) catch |err| switch (err) { error.ParseError => { try error_handler.emitCliDiagnostics(gpa, cli_args, &cli_diagnostics); std.process.exit(1); }, else => |e| return e, }; try options.maybeAppendRC(std.fs.cwd()); if (!zig_integration) { // print any warnings/notes cli_diagnostics.renderToStdErr(cli_args); // If there was something printed, then add an extra newline separator // so that there is a clear separation between the cli diagnostics and whatever // gets printed after if (cli_diagnostics.errors.items.len > 0) { const stderr, _ = std.debug.lockStderrWriter(&.{}); defer std.debug.unlockStderrWriter(); try stderr.writeByte('\n'); } } break :options options; }; defer options.deinit(); var threaded: std.Io.Threaded = .init(gpa); defer threaded.deinit(); const io = threaded.io(); if (options.print_help_and_exit) { try cli.writeUsage(stdout, "zig rc"); try stdout.flush(); return; } // Don't allow verbose when integrating with Zig via stdout options.verbose = false; if (options.verbose) { try options.dumpVerbose(stdout); try stdout.writeByte('\n'); try stdout.flush(); } var dependencies = Dependencies.init(gpa); defer dependencies.deinit(); const maybe_dependencies: ?*Dependencies = if (options.depfile_path != null) &dependencies else null; var include_paths = LazyIncludePaths{ .arena = arena, .io = io, .auto_includes_option = options.auto_includes, .zig_lib_dir = zig_lib_dir, .target_machine_type = options.coff_options.target, }; const full_input = full_input: { if (options.input_format == .rc and options.preprocess != .no) { var preprocessed_buf: std.Io.Writer.Allocating = .init(gpa); errdefer preprocessed_buf.deinit(); // We're going to throw away everything except the final preprocessed output anyway, // so we can use a scoped arena for everything else. var aro_arena_state = std.heap.ArenaAllocator.init(gpa); defer aro_arena_state.deinit(); const aro_arena = aro_arena_state.allocator(); var stderr_buf: [512]u8 = undefined; var diagnostics: aro.Diagnostics = .{ .output = output: { if (zig_integration) break :output .{ .to_list = .{ .arena = .init(gpa) } }; const w, const ttyconf = std.debug.lockStderrWriter(&stderr_buf); break :output .{ .to_writer = .{ .writer = w, .color = ttyconf, } }; } }; defer { diagnostics.deinit(); if (!zig_integration) std.debug.unlockStderrWriter(); } var comp = aro.Compilation.init(aro_arena, aro_arena, io, &diagnostics, std.fs.cwd()); defer comp.deinit(); var argv: std.ArrayList([]const u8) = .empty; defer argv.deinit(aro_arena); try argv.append(aro_arena, "arocc"); // dummy command name const resolved_include_paths = try include_paths.get(&error_handler); try preprocess.appendAroArgs(aro_arena, &argv, options, resolved_include_paths); try argv.append(aro_arena, switch (options.input_source) { .stdio => "-", .filename => |filename| filename, }); if (options.verbose) { try stdout.writeAll("Preprocessor: arocc (built-in)\n"); for (argv.items[0 .. argv.items.len - 1]) |arg| { try stdout.print("{s} ", .{arg}); } try stdout.print("{s}\n\n", .{argv.items[argv.items.len - 1]}); try stdout.flush(); } preprocess.preprocess(&comp, &preprocessed_buf.writer, argv.items, maybe_dependencies) catch |err| switch (err) { error.GeneratedSourceError => { try error_handler.emitAroDiagnostics(gpa, "failed during preprocessor setup (this is always a bug)", &comp); std.process.exit(1); }, // ArgError can occur if e.g. the .rc file is not found error.ArgError, error.PreprocessError => { try error_handler.emitAroDiagnostics(gpa, "failed during preprocessing", &comp); std.process.exit(1); }, error.FileTooBig => { try error_handler.emitMessage(gpa, .err, "failed during preprocessing: maximum file size exceeded", .{}); std.process.exit(1); }, error.WriteFailed => { try error_handler.emitMessage(gpa, .err, "failed during preprocessing: error writing the preprocessed output", .{}); std.process.exit(1); }, error.OutOfMemory => |e| return e, }; break :full_input try preprocessed_buf.toOwnedSlice(); } else { switch (options.input_source) { .stdio => |file| { var file_reader = file.reader(io, &.{}); break :full_input file_reader.interface.allocRemaining(gpa, .unlimited) catch |err| { try error_handler.emitMessage(gpa, .err, "unable to read input from stdin: {s}", .{@errorName(err)}); std.process.exit(1); }; }, .filename => |input_filename| { break :full_input std.fs.cwd().readFileAlloc(input_filename, gpa, .unlimited) catch |err| { try error_handler.emitMessage(gpa, .err, "unable to read input file path '{s}': {s}", .{ input_filename, @errorName(err) }); std.process.exit(1); }; }, } } }; defer gpa.free(full_input); if (options.preprocess == .only) { switch (options.output_source) { .stdio => |output_file| { try output_file.writeAll(full_input); }, .filename => |output_filename| { try std.fs.cwd().writeFile(.{ .sub_path = output_filename, .data = full_input }); }, } return; } var resources = resources: { const need_intermediate_res = options.output_format == .coff and options.input_format != .res; var res_stream = if (need_intermediate_res) IoStream{ .name = "", .intermediate = true, .source = .{ .memory = .empty }, } else if (options.input_format == .res) IoStream.fromIoSource(options.input_source, .input) catch |err| { try error_handler.emitMessage(gpa, .err, "unable to read res file path '{s}': {s}", .{ options.input_source.filename, @errorName(err) }); std.process.exit(1); } else IoStream.fromIoSource(options.output_source, .output) catch |err| { try error_handler.emitMessage(gpa, .err, "unable to create output file '{s}': {s}", .{ options.output_source.filename, @errorName(err) }); std.process.exit(1); }; defer res_stream.deinit(gpa); const res_data = res_data: { if (options.input_format != .res) { // Note: We still want to run this when no-preprocess is set because: // 1. We want to print accurate line numbers after removing multiline comments // 2. We want to be able to handle an already-preprocessed input with #line commands in it var mapping_results = parseAndRemoveLineCommands(gpa, full_input, full_input, .{ .initial_filename = options.input_source.filename }) catch |err| switch (err) { error.InvalidLineCommand => { // TODO: Maybe output the invalid line command try error_handler.emitMessage(gpa, .err, "invalid line command in the preprocessed source", .{}); if (options.preprocess == .no) { try error_handler.emitMessage(gpa, .note, "line commands must be of the format: #line \"\"", .{}); } else { try error_handler.emitMessage(gpa, .note, "this is likely to be a bug, please report it", .{}); } std.process.exit(1); }, error.LineNumberOverflow => { // TODO: Better error message try error_handler.emitMessage(gpa, .err, "line number count exceeded maximum of {}", .{std.math.maxInt(usize)}); std.process.exit(1); }, error.OutOfMemory => |e| return e, }; defer mapping_results.mappings.deinit(gpa); const default_code_page = options.default_code_page orelse .windows1252; const has_disjoint_code_page = hasDisjointCodePage(mapping_results.result, &mapping_results.mappings, default_code_page); const final_input = try removeComments(mapping_results.result, mapping_results.result, &mapping_results.mappings); var diagnostics = Diagnostics.init(gpa, io); defer diagnostics.deinit(); var output_buffer: [4096]u8 = undefined; var res_stream_writer = res_stream.source.writer(gpa, &output_buffer); defer res_stream_writer.deinit(&res_stream.source); const output_buffered_stream = res_stream_writer.interface(); compile(gpa, io, final_input, output_buffered_stream, .{ .cwd = std.fs.cwd(), .diagnostics = &diagnostics, .source_mappings = &mapping_results.mappings, .dependencies = maybe_dependencies, .ignore_include_env_var = options.ignore_include_env_var, .extra_include_paths = options.extra_include_paths.items, .system_include_paths = try include_paths.get(&error_handler), .default_language_id = options.default_language_id, .default_code_page = default_code_page, .disjoint_code_page = has_disjoint_code_page, .verbose = options.verbose, .null_terminate_string_table_strings = options.null_terminate_string_table_strings, .max_string_literal_codepoints = options.max_string_literal_codepoints, .silent_duplicate_control_ids = options.silent_duplicate_control_ids, .warn_instead_of_error_on_invalid_code_page = options.warn_instead_of_error_on_invalid_code_page, }) catch |err| switch (err) { error.ParseError, error.CompileError => { try error_handler.emitDiagnostics(gpa, std.fs.cwd(), final_input, &diagnostics, mapping_results.mappings); // Delete the output file on error res_stream.cleanupAfterError(); std.process.exit(1); }, else => |e| return e, }; try output_buffered_stream.flush(); // print any warnings/notes if (!zig_integration) { diagnostics.renderToStdErr(std.fs.cwd(), final_input, mapping_results.mappings); } // write the depfile if (options.depfile_path) |depfile_path| { var depfile = std.fs.cwd().createFile(depfile_path, .{}) catch |err| { try error_handler.emitMessage(gpa, .err, "unable to create depfile '{s}': {s}", .{ depfile_path, @errorName(err) }); std.process.exit(1); }; defer depfile.close(); var depfile_buffer: [1024]u8 = undefined; var depfile_writer = depfile.writer(&depfile_buffer); switch (options.depfile_fmt) { .json => { var write_stream: std.json.Stringify = .{ .writer = &depfile_writer.interface, .options = .{ .whitespace = .indent_2 }, }; try write_stream.beginArray(); for (dependencies.list.items) |dep_path| { try write_stream.write(dep_path); } try write_stream.endArray(); }, } try depfile_writer.interface.flush(); } } if (options.output_format != .coff) return; break :res_data res_stream.source.readAll(gpa, io) catch |err| { try error_handler.emitMessage(gpa, .err, "unable to read res from '{s}': {s}", .{ res_stream.name, @errorName(err) }); std.process.exit(1); }; }; // No need to keep the res_data around after parsing the resources from it defer res_data.deinit(gpa); std.debug.assert(options.output_format == .coff); // TODO: Maybe use a buffered file reader instead of reading file into memory -> fbs var res_reader: std.Io.Reader = .fixed(res_data.bytes); break :resources cvtres.parseRes(gpa, &res_reader, .{ .max_size = res_data.bytes.len }) catch |err| { // TODO: Better errors try error_handler.emitMessage(gpa, .err, "unable to parse res from '{s}': {s}", .{ res_stream.name, @errorName(err) }); std.process.exit(1); }; }; defer resources.deinit(); var coff_stream = IoStream.fromIoSource(options.output_source, .output) catch |err| { try error_handler.emitMessage(gpa, .err, "unable to create output file '{s}': {s}", .{ options.output_source.filename, @errorName(err) }); std.process.exit(1); }; defer coff_stream.deinit(gpa); var coff_output_buffer: [4096]u8 = undefined; var coff_output_buffered_stream = coff_stream.source.writer(gpa, &coff_output_buffer); var cvtres_diagnostics: cvtres.Diagnostics = .{ .none = {} }; cvtres.writeCoff(gpa, coff_output_buffered_stream.interface(), resources.list.items, options.coff_options, &cvtres_diagnostics) catch |err| { switch (err) { error.DuplicateResource => { const duplicate_resource = resources.list.items[cvtres_diagnostics.duplicate_resource]; try error_handler.emitMessage(gpa, .err, "duplicate resource [id: {f}, type: {f}, language: {f}]", .{ duplicate_resource.name_value, fmtResourceType(duplicate_resource.type_value), duplicate_resource.language, }); }, error.ResourceDataTooLong => { const overflow_resource = resources.list.items[cvtres_diagnostics.duplicate_resource]; try error_handler.emitMessage(gpa, .err, "resource has a data length that is too large to be written into a coff section", .{}); try error_handler.emitMessage(gpa, .note, "the resource with the invalid size is [id: {f}, type: {f}, language: {f}]", .{ overflow_resource.name_value, fmtResourceType(overflow_resource.type_value), overflow_resource.language, }); }, error.TotalResourceDataTooLong => { const overflow_resource = resources.list.items[cvtres_diagnostics.duplicate_resource]; try error_handler.emitMessage(gpa, .err, "total resource data exceeds the maximum of the coff 'size of raw data' field", .{}); try error_handler.emitMessage(gpa, .note, "size overflow occurred when attempting to write this resource: [id: {f}, type: {f}, language: {f}]", .{ overflow_resource.name_value, fmtResourceType(overflow_resource.type_value), overflow_resource.language, }); }, else => { try error_handler.emitMessage(gpa, .err, "unable to write coff output file '{s}': {s}", .{ coff_stream.name, @errorName(err) }); }, } // Delete the output file on error coff_stream.cleanupAfterError(); std.process.exit(1); }; try coff_output_buffered_stream.interface().flush(); } const IoStream = struct { name: []const u8, intermediate: bool, source: Source, pub const IoDirection = enum { input, output }; pub fn fromIoSource(source: cli.Options.IoSource, io: IoDirection) !IoStream { return .{ .name = switch (source) { .filename => |filename| filename, .stdio => switch (io) { .input => "", .output => "", }, }, .intermediate = false, .source = try Source.fromIoSource(source, io), }; } pub fn deinit(self: *IoStream, allocator: Allocator) void { self.source.deinit(allocator); } pub fn cleanupAfterError(self: *IoStream) void { switch (self.source) { .file => |file| { // Delete the output file on error file.close(); // Failing to delete is not really a big deal, so swallow any errors std.fs.cwd().deleteFile(self.name) catch {}; }, .stdio, .memory, .closed => return, } } pub const Source = union(enum) { file: std.fs.File, stdio: std.fs.File, memory: std.ArrayList(u8), /// The source has been closed and any usage of the Source in this state is illegal (except deinit). closed: void, pub fn fromIoSource(source: cli.Options.IoSource, io: IoDirection) !Source { switch (source) { .filename => |filename| return .{ .file = switch (io) { .input => try openFileNotDir(std.fs.cwd(), filename, .{}), .output => try std.fs.cwd().createFile(filename, .{}), }, }, .stdio => |file| return .{ .stdio = file }, } } pub fn deinit(self: *Source, allocator: Allocator) void { switch (self.*) { .file => |file| file.close(), .stdio => {}, .memory => |*list| list.deinit(allocator), .closed => {}, } } pub const Data = struct { bytes: []const u8, needs_free: bool, pub fn deinit(self: Data, allocator: Allocator) void { if (self.needs_free) { allocator.free(self.bytes); } } }; pub fn readAll(self: Source, allocator: Allocator, io: Io) !Data { return switch (self) { inline .file, .stdio => |file| .{ .bytes = b: { var file_reader = file.reader(io, &.{}); break :b try file_reader.interface.allocRemaining(allocator, .unlimited); }, .needs_free = true, }, .memory => |list| .{ .bytes = list.items, .needs_free = false }, .closed => unreachable, }; } pub const Writer = union(enum) { file: std.fs.File.Writer, allocating: std.Io.Writer.Allocating, pub const Error = Allocator.Error || std.fs.File.WriteError; pub fn interface(this: *@This()) *std.Io.Writer { return switch (this.*) { .file => |*fw| &fw.interface, .allocating => |*a| &a.writer, }; } pub fn deinit(this: *@This(), source: *Source) void { switch (this.*) { .file => {}, .allocating => |*a| source.memory = a.toArrayList(), } this.* = undefined; } }; pub fn writer(source: *Source, allocator: Allocator, buffer: []u8) Writer { return switch (source.*) { .file, .stdio => |file| .{ .file = file.writer(buffer) }, .memory => |*list| .{ .allocating = .fromArrayList(allocator, list) }, .closed => unreachable, }; } }; }; const LazyIncludePaths = struct { arena: Allocator, io: Io, auto_includes_option: cli.Options.AutoIncludes, zig_lib_dir: []const u8, target_machine_type: std.coff.IMAGE.FILE.MACHINE, resolved_include_paths: ?[]const []const u8 = null, pub fn get(self: *LazyIncludePaths, error_handler: *ErrorHandler) ![]const []const u8 { const io = self.io; if (self.resolved_include_paths) |include_paths| return include_paths; return getIncludePaths(self.arena, io, self.auto_includes_option, self.zig_lib_dir, self.target_machine_type) catch |err| switch (err) { error.OutOfMemory => |e| return e, else => |e| { switch (e) { error.UnsupportedAutoIncludesMachineType => { try error_handler.emitMessage(self.arena, .err, "automatic include path detection is not supported for target '{s}'", .{@tagName(self.target_machine_type)}); }, error.MsvcIncludesNotFound => { try error_handler.emitMessage(self.arena, .err, "MSVC include paths could not be automatically detected", .{}); }, error.MingwIncludesNotFound => { try error_handler.emitMessage(self.arena, .err, "MinGW include paths could not be automatically detected", .{}); }, } try error_handler.emitMessage(self.arena, .note, "to disable auto includes, use the option /:auto-includes none", .{}); std.process.exit(1); }, }; } }; fn getIncludePaths( arena: Allocator, io: Io, auto_includes_option: cli.Options.AutoIncludes, zig_lib_dir: []const u8, target_machine_type: std.coff.IMAGE.FILE.MACHINE, ) ![]const []const u8 { if (auto_includes_option == .none) return &[_][]const u8{}; const includes_arch: std.Target.Cpu.Arch = switch (target_machine_type) { .AMD64 => .x86_64, .I386 => .x86, .ARMNT => .thumb, .ARM64 => .aarch64, .ARM64EC => .aarch64, .ARM64X => .aarch64, .IA64, .EBC => { return error.UnsupportedAutoIncludesMachineType; }, // The above cases are exhaustive of all the `MachineType`s supported (see supported_targets in cvtres.zig) // This is enforced by the argument parser in cli.zig. else => unreachable, }; var includes = auto_includes_option; if (builtin.target.os.tag != .windows) { switch (includes) { .none => unreachable, // MSVC can't be found when the host isn't Windows, so short-circuit. .msvc => return error.MsvcIncludesNotFound, // Skip straight to gnu since we won't be able to detect MSVC on non-Windows hosts. .any => includes = .gnu, .gnu => {}, } } while (true) { switch (includes) { .none => unreachable, .any, .msvc => { // MSVC is only detectable on Windows targets. This unreachable is to signify // that .any and .msvc should be dealt with on non-Windows targets before this point, // since getting MSVC include paths uses Windows-only APIs. if (builtin.target.os.tag != .windows) unreachable; const target_query: std.Target.Query = .{ .os_tag = .windows, .cpu_arch = includes_arch, .abi = .msvc, }; const target = std.zig.resolveTargetQueryOrFatal(io, target_query); const is_native_abi = target_query.isNativeAbi(); const detected_libc = std.zig.LibCDirs.detect(arena, zig_lib_dir, &target, is_native_abi, true, null) catch { if (includes == .any) { // fall back to mingw includes = .gnu; continue; } return error.MsvcIncludesNotFound; }; if (detected_libc.libc_include_dir_list.len == 0) { if (includes == .any) { // fall back to mingw includes = .gnu; continue; } return error.MsvcIncludesNotFound; } return detected_libc.libc_include_dir_list; }, .gnu => { const target_query: std.Target.Query = .{ .os_tag = .windows, .cpu_arch = includes_arch, .abi = .gnu, }; const target = std.zig.resolveTargetQueryOrFatal(io, target_query); const is_native_abi = target_query.isNativeAbi(); const detected_libc = std.zig.LibCDirs.detect(arena, zig_lib_dir, &target, is_native_abi, true, null) catch |err| switch (err) { error.OutOfMemory => |e| return e, else => return error.MingwIncludesNotFound, }; return detected_libc.libc_include_dir_list; }, } } } const ErrorBundle = std.zig.ErrorBundle; const SourceMappings = @import("source_mapping.zig").SourceMappings; const ErrorHandler = union(enum) { server: std.zig.Server, stderr, pub fn emitCliDiagnostics( self: *ErrorHandler, allocator: Allocator, args: []const []const u8, diagnostics: *cli.Diagnostics, ) !void { switch (self.*) { .server => |*server| { var error_bundle = try cliDiagnosticsToErrorBundle(allocator, diagnostics); defer error_bundle.deinit(allocator); try server.serveErrorBundle(error_bundle); }, .stderr => diagnostics.renderToStdErr(args), } } pub fn emitAroDiagnostics( self: *ErrorHandler, allocator: Allocator, fail_msg: []const u8, comp: *aro.Compilation, ) !void { switch (self.*) { .server => |*server| { var error_bundle = try compiler_util.aroDiagnosticsToErrorBundle( comp.diagnostics, allocator, fail_msg, ); defer error_bundle.deinit(allocator); try server.serveErrorBundle(error_bundle); }, .stderr => { // aro errors have already been emitted const stderr, const ttyconf = std.debug.lockStderrWriter(&.{}); defer std.debug.unlockStderrWriter(); try renderErrorMessage(stderr, ttyconf, .err, "{s}", .{fail_msg}); }, } } pub fn emitDiagnostics( self: *ErrorHandler, allocator: Allocator, cwd: std.fs.Dir, source: []const u8, diagnostics: *Diagnostics, mappings: SourceMappings, ) !void { switch (self.*) { .server => |*server| { var error_bundle = try diagnosticsToErrorBundle(allocator, source, diagnostics, mappings); defer error_bundle.deinit(allocator); try server.serveErrorBundle(error_bundle); }, .stderr => diagnostics.renderToStdErr(cwd, source, mappings), } } pub fn emitMessage( self: *ErrorHandler, allocator: Allocator, msg_type: @import("utils.zig").ErrorMessageType, comptime format: []const u8, args: anytype, ) !void { switch (self.*) { .server => |*server| { // only emit errors if (msg_type != .err) return; var error_bundle = try errorStringToErrorBundle(allocator, format, args); defer error_bundle.deinit(allocator); try server.serveErrorBundle(error_bundle); }, .stderr => { const stderr, const ttyconf = std.debug.lockStderrWriter(&.{}); defer std.debug.unlockStderrWriter(); try renderErrorMessage(stderr, ttyconf, msg_type, format, args); }, } } }; fn cliDiagnosticsToErrorBundle( gpa: Allocator, diagnostics: *cli.Diagnostics, ) !ErrorBundle { @branchHint(.cold); var bundle: ErrorBundle.Wip = undefined; try bundle.init(gpa); errdefer bundle.deinit(); try bundle.addRootErrorMessage(.{ .msg = try bundle.addString("invalid command line option(s)"), }); var cur_err: ?ErrorBundle.ErrorMessage = null; var cur_notes: std.ArrayList(ErrorBundle.ErrorMessage) = .empty; defer cur_notes.deinit(gpa); for (diagnostics.errors.items) |err_details| { switch (err_details.type) { .err => { if (cur_err) |err| { try bundle.addRootErrorMessageWithNotes(err, cur_notes.items); } cur_err = .{ .msg = try bundle.addString(err_details.msg.items), }; cur_notes.clearRetainingCapacity(); }, .warning => cur_err = null, .note => { if (cur_err == null) continue; cur_err.?.notes_len += 1; try cur_notes.append(gpa, .{ .msg = try bundle.addString(err_details.msg.items), }); }, } } if (cur_err) |err| { try bundle.addRootErrorMessageWithNotes(err, cur_notes.items); } return try bundle.toOwnedBundle(""); } fn diagnosticsToErrorBundle( gpa: Allocator, source: []const u8, diagnostics: *Diagnostics, mappings: SourceMappings, ) !ErrorBundle { @branchHint(.cold); var bundle: ErrorBundle.Wip = undefined; try bundle.init(gpa); errdefer bundle.deinit(); var msg_buf: std.Io.Writer.Allocating = .init(gpa); defer msg_buf.deinit(); var cur_err: ?ErrorBundle.ErrorMessage = null; var cur_notes: std.ArrayList(ErrorBundle.ErrorMessage) = .empty; defer cur_notes.deinit(gpa); for (diagnostics.errors.items) |err_details| { switch (err_details.type) { .hint => continue, // Clear the current error so that notes don't bleed into unassociated errors .warning => { cur_err = null; continue; }, .note => if (cur_err == null) continue, .err => {}, } const corresponding_span = mappings.getCorrespondingSpan(err_details.token.line_number).?; const err_line = corresponding_span.start_line; const err_filename = mappings.files.get(corresponding_span.filename_offset); const source_line_start = err_details.token.getLineStartForErrorDisplay(source); // Treat tab stops as 1 column wide for error display purposes, // and add one to get a 1-based column const column = err_details.token.calculateColumn(source, 1, source_line_start) + 1; msg_buf.clearRetainingCapacity(); try err_details.render(&msg_buf.writer, source, diagnostics.strings.items); const src_loc = src_loc: { var src_loc: ErrorBundle.SourceLocation = .{ .src_path = try bundle.addString(err_filename), .line = @intCast(err_line - 1), // 1-based -> 0-based .column = @intCast(column - 1), // 1-based -> 0-based .span_start = 0, .span_main = 0, .span_end = 0, }; if (err_details.print_source_line) { const source_line = err_details.token.getLineForErrorDisplay(source, source_line_start); const visual_info = err_details.visualTokenInfo(source_line_start, source_line_start + source_line.len, source); src_loc.span_start = @intCast(visual_info.point_offset - visual_info.before_len); src_loc.span_main = @intCast(visual_info.point_offset); src_loc.span_end = @intCast(visual_info.point_offset + 1 + visual_info.after_len); src_loc.source_line = try bundle.addString(source_line); } break :src_loc try bundle.addSourceLocation(src_loc); }; switch (err_details.type) { .err => { if (cur_err) |err| { try bundle.addRootErrorMessageWithNotes(err, cur_notes.items); } cur_err = .{ .msg = try bundle.addString(msg_buf.written()), .src_loc = src_loc, }; cur_notes.clearRetainingCapacity(); }, .note => { cur_err.?.notes_len += 1; try cur_notes.append(gpa, .{ .msg = try bundle.addString(msg_buf.written()), .src_loc = src_loc, }); }, .warning, .hint => unreachable, } } if (cur_err) |err| { try bundle.addRootErrorMessageWithNotes(err, cur_notes.items); } return try bundle.toOwnedBundle(""); } fn errorStringToErrorBundle(allocator: Allocator, comptime format: []const u8, args: anytype) !ErrorBundle { @branchHint(.cold); var bundle: ErrorBundle.Wip = undefined; try bundle.init(allocator); errdefer bundle.deinit(); try bundle.addRootErrorMessage(.{ .msg = try bundle.printString(format, args), }); return try bundle.toOwnedBundle(""); }