mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 13:54:21 +00:00
link.Elf: avoid needless file system reads in flush()
flush() must not do anything more than necessary. Determining the type of input files must be done only once, before flush. Fortunately, we don't even need any file system accesses to do this since that information is statically known in most cases, and in the rest of the cases can be determined by file extension alone. This commit also updates the nearby code to conform to the convention for error handling where there is exactly one error code to represent the fact that error messages have already been emitted. This had the side effect of improving the error message for a linker script parse error. "positionals" is not a linker concept; it is a command line interface concept. Zig's linker implementation should not mention "positionals". This commit deletes that array list in favor of directly making function calls, eliminating that heap allocation during flush().
This commit is contained in:
parent
2c41c453b6
commit
31d70cb1e1
9 changed files with 168 additions and 232 deletions
|
|
@ -280,6 +280,13 @@ pub const CRTFile = struct {
|
|||
lock: Cache.Lock,
|
||||
full_object_path: []const u8,
|
||||
|
||||
pub fn isObject(cf: CRTFile) bool {
|
||||
return switch (classifyFileExt(cf.full_object_path)) {
|
||||
.object => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *CRTFile, gpa: Allocator) void {
|
||||
self.lock.release();
|
||||
gpa.free(self.full_object_path);
|
||||
|
|
@ -1018,6 +1025,13 @@ pub const LinkObject = struct {
|
|||
//
|
||||
// Consistent with `withLOption` variable name in lld ELF driver.
|
||||
loption: bool = false,
|
||||
|
||||
pub fn isObject(lo: LinkObject) bool {
|
||||
return switch (classifyFileExt(lo.path)) {
|
||||
.object => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const CreateOptions = struct {
|
||||
|
|
@ -2433,7 +2447,7 @@ fn flush(
|
|||
if (comp.bin_file) |lf| {
|
||||
// This is needed before reading the error flags.
|
||||
lf.flush(arena, tid, prog_node) catch |err| switch (err) {
|
||||
error.FlushFailure => {}, // error reported through link_error_flags
|
||||
error.FlushFailure, error.LinkFailure => {}, // error reported through link_error_flags
|
||||
error.LLDReportedFailure => {}, // error reported via lockAndParseLldStderr
|
||||
else => |e| return e,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -533,7 +533,10 @@ pub const File = struct {
|
|||
FailedToEmit,
|
||||
FileSystem,
|
||||
FilesOpenedWithWrongFlags,
|
||||
/// Indicates an error will be present in `Compilation.link_errors`.
|
||||
FlushFailure,
|
||||
/// Indicates an error will be present in `Compilation.link_errors`.
|
||||
LinkFailure,
|
||||
FunctionSignatureMismatch,
|
||||
GlobalTypeMismatch,
|
||||
HotSwapUnavailableOnHostOperatingSystem,
|
||||
|
|
|
|||
182
src/link/Elf.zig
182
src/link/Elf.zig
|
|
@ -791,8 +791,8 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
|
|||
const target = self.getTarget();
|
||||
const link_mode = comp.config.link_mode;
|
||||
const directory = self.base.emit.root_dir; // Just an alias to make it shorter to type.
|
||||
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.emit.sub_path});
|
||||
const module_obj_path: ?[]const u8 = if (self.base.zcu_object_sub_path) |path| blk: {
|
||||
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.emit.sub_path});
|
||||
if (fs.path.dirname(full_out_path)) |dirname| {
|
||||
break :blk try fs.path.join(arena, &.{ dirname, path });
|
||||
} else {
|
||||
|
|
@ -808,61 +808,37 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
|
|||
if (self.base.isObject()) return relocatable.flushObject(self, comp, module_obj_path);
|
||||
|
||||
const csu = try CsuObjects.init(arena, comp);
|
||||
const compiler_rt_path: ?[]const u8 = blk: {
|
||||
if (comp.compiler_rt_lib) |x| break :blk x.full_object_path;
|
||||
if (comp.compiler_rt_obj) |x| break :blk x.full_object_path;
|
||||
break :blk null;
|
||||
};
|
||||
|
||||
// Here we will parse input positional and library files (if referenced).
|
||||
// This will roughly match in any linker backend we support.
|
||||
var positionals = std.ArrayList(Compilation.LinkObject).init(arena);
|
||||
// Here we will parse object and library files (if referenced).
|
||||
|
||||
// csu prelude
|
||||
if (csu.crt0) |v| try positionals.append(.{ .path = v });
|
||||
if (csu.crti) |v| try positionals.append(.{ .path = v });
|
||||
if (csu.crtbegin) |v| try positionals.append(.{ .path = v });
|
||||
if (csu.crt0) |path| try parseObjectReportingFailure(self, path);
|
||||
if (csu.crti) |path| try parseObjectReportingFailure(self, path);
|
||||
if (csu.crtbegin) |path| try parseObjectReportingFailure(self, path);
|
||||
|
||||
try positionals.ensureUnusedCapacity(comp.objects.len);
|
||||
positionals.appendSliceAssumeCapacity(comp.objects);
|
||||
for (comp.objects) |obj| {
|
||||
if (obj.isObject()) {
|
||||
try parseObjectReportingFailure(self, obj.path);
|
||||
} else {
|
||||
try parseLibraryReportingFailure(self, .{ .path = obj.path }, obj.must_link);
|
||||
}
|
||||
}
|
||||
|
||||
// This is a set of object files emitted by clang in a single `build-exe` invocation.
|
||||
// For instance, the implicit `a.o` as compiled by `zig build-exe a.c` will end up
|
||||
// in this set.
|
||||
for (comp.c_object_table.keys()) |key| {
|
||||
try positionals.append(.{ .path = key.status.success.object_path });
|
||||
try parseObjectReportingFailure(self, key.status.success.object_path);
|
||||
}
|
||||
|
||||
if (module_obj_path) |path| try positionals.append(.{ .path = path });
|
||||
if (module_obj_path) |path| try parseObjectReportingFailure(self, path);
|
||||
|
||||
if (comp.config.any_sanitize_thread) {
|
||||
try positionals.append(.{ .path = comp.tsan_lib.?.full_object_path });
|
||||
}
|
||||
|
||||
if (comp.config.any_fuzz) {
|
||||
try positionals.append(.{ .path = comp.fuzzer_lib.?.full_object_path });
|
||||
}
|
||||
if (comp.config.any_sanitize_thread) try parseCrtFileReportingFailure(self, comp.tsan_lib.?);
|
||||
if (comp.config.any_fuzz) try parseCrtFileReportingFailure(self, comp.fuzzer_lib.?);
|
||||
|
||||
// libc
|
||||
if (!comp.skip_linker_dependencies and !comp.config.link_libc) {
|
||||
if (comp.libc_static_lib) |lib| {
|
||||
try positionals.append(.{ .path = lib.full_object_path });
|
||||
}
|
||||
}
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
self.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.MalformedArchive,
|
||||
error.MismatchedEflags,
|
||||
error.InvalidMachineType,
|
||||
=> continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
if (comp.libc_static_lib) |lib| try parseCrtFileReportingFailure(self, lib);
|
||||
}
|
||||
|
||||
var system_libs = std.ArrayList(SystemLib).init(arena);
|
||||
|
|
@ -945,42 +921,23 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
|
|||
}
|
||||
|
||||
for (system_libs.items) |lib| {
|
||||
self.parseLibrary(lib, false) catch |err| switch (err) {
|
||||
error.MalformedObject, error.MalformedArchive, error.InvalidMachineType => continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
lib.path,
|
||||
"unexpected error: parsing library failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
try self.parseLibraryReportingFailure(lib, false);
|
||||
}
|
||||
|
||||
// Finally, as the last input objects we add compiler_rt and CSU postlude (if any).
|
||||
positionals.clearRetainingCapacity();
|
||||
|
||||
// compiler-rt. Since compiler_rt exports symbols like `memset`, it needs
|
||||
// to be after the shared libraries, so they are picked up from the shared
|
||||
// libraries, not libcompiler_rt.
|
||||
if (compiler_rt_path) |path| try positionals.append(.{ .path = path });
|
||||
if (comp.compiler_rt_lib) |crt_file| {
|
||||
try parseLibraryReportingFailure(self, .{ .path = crt_file.full_object_path }, false);
|
||||
} else if (comp.compiler_rt_obj) |crt_file| {
|
||||
try parseObjectReportingFailure(self, crt_file.full_object_path);
|
||||
}
|
||||
|
||||
// csu postlude
|
||||
if (csu.crtend) |v| try positionals.append(.{ .path = v });
|
||||
if (csu.crtn) |v| try positionals.append(.{ .path = v });
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
self.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.MalformedArchive,
|
||||
error.MismatchedEflags,
|
||||
error.InvalidMachineType,
|
||||
=> continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
if (csu.crtend) |path| try parseObjectReportingFailure(self, path);
|
||||
if (csu.crtn) |path| try parseObjectReportingFailure(self, path);
|
||||
|
||||
if (self.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
|
|
@ -1022,7 +979,9 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
|
|||
self.markEhFrameAtomsDead();
|
||||
try self.resolveMergeSections();
|
||||
|
||||
try self.convertCommonSymbols();
|
||||
for (self.objects.items) |index| {
|
||||
try self.file(index).?.object.convertCommonSymbols(self);
|
||||
}
|
||||
self.markImportsExports();
|
||||
|
||||
if (self.base.gc_sections) {
|
||||
|
|
@ -1402,10 +1361,9 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
|
|||
}
|
||||
|
||||
pub const ParseError = error{
|
||||
MalformedObject,
|
||||
MalformedArchive,
|
||||
InvalidMachineType,
|
||||
MismatchedEflags,
|
||||
/// Indicates the error is already reported on `Compilation.link_errors`.
|
||||
LinkFailure,
|
||||
|
||||
OutOfMemory,
|
||||
Overflow,
|
||||
InputOutput,
|
||||
|
|
@ -1416,16 +1374,30 @@ pub const ParseError = error{
|
|||
UnknownFileType,
|
||||
} || LdScript.Error || fs.Dir.AccessError || fs.File.SeekError || fs.File.OpenError || fs.File.ReadError;
|
||||
|
||||
pub fn parsePositional(self: *Elf, path: []const u8, must_link: bool) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
if (try Object.isObject(path)) {
|
||||
try self.parseObject(path);
|
||||
fn parseCrtFileReportingFailure(self: *Elf, crt_file: Compilation.CRTFile) error{OutOfMemory}!void {
|
||||
if (crt_file.isObject()) {
|
||||
try parseObjectReportingFailure(self, crt_file.full_object_path);
|
||||
} else {
|
||||
try self.parseLibrary(.{ .path = path }, must_link);
|
||||
try parseLibraryReportingFailure(self, .{ .path = crt_file.full_object_path }, false);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parseObjectReportingFailure(self: *Elf, path: []const u8) error{OutOfMemory}!void {
|
||||
self.parseObject(path) catch |err| switch (err) {
|
||||
error.LinkFailure => return, // already reported
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => |e| try self.addParseError(path, "unable to parse object: {s}", .{@errorName(e)}),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parseLibraryReportingFailure(self: *Elf, lib: SystemLib, must_link: bool) error{OutOfMemory}!void {
|
||||
self.parseLibrary(lib, must_link) catch |err| switch (err) {
|
||||
error.LinkFailure => return, // already reported
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => |e| try self.addParseError(lib.path, "unable to parse library: {s}", .{@errorName(e)}),
|
||||
};
|
||||
}
|
||||
|
||||
fn parseLibrary(self: *Elf, lib: SystemLib, must_link: bool) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
|
@ -1575,8 +1547,8 @@ fn parseLdScript(self: *Elf, lib: SystemLib) ParseError!void {
|
|||
.needed = scr_obj.needed,
|
||||
.path = full_path,
|
||||
}, false) catch |err| switch (err) {
|
||||
error.MalformedObject, error.MalformedArchive, error.InvalidMachineType => continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
error.LinkFailure => continue, // already reported
|
||||
else => |e| try self.addParseError(
|
||||
full_path,
|
||||
"unexpected error: parsing library failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
|
|
@ -1601,24 +1573,24 @@ pub fn validateEFlags(self: *Elf, file_index: File.Index, e_flags: elf.Elf64_Wor
|
|||
self_riscv_eflags.rvc = self_riscv_eflags.rvc or riscv_eflags.rvc;
|
||||
self_riscv_eflags.tso = self_riscv_eflags.tso or riscv_eflags.tso;
|
||||
|
||||
var is_error: bool = false;
|
||||
var any_errors: bool = false;
|
||||
if (self_riscv_eflags.fabi != riscv_eflags.fabi) {
|
||||
is_error = true;
|
||||
_ = try self.reportParseError2(
|
||||
any_errors = true;
|
||||
try self.addFileError(
|
||||
file_index,
|
||||
"cannot link object files with different float-point ABIs",
|
||||
.{},
|
||||
);
|
||||
}
|
||||
if (self_riscv_eflags.rve != riscv_eflags.rve) {
|
||||
is_error = true;
|
||||
_ = try self.reportParseError2(
|
||||
any_errors = true;
|
||||
try self.addFileError(
|
||||
file_index,
|
||||
"cannot link object files with different RVEs",
|
||||
.{},
|
||||
);
|
||||
}
|
||||
if (is_error) return error.MismatchedEflags;
|
||||
if (any_errors) return error.LinkFailure;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
|
|
@ -1740,12 +1712,6 @@ pub fn markEhFrameAtomsDead(self: *Elf) void {
|
|||
}
|
||||
}
|
||||
|
||||
fn convertCommonSymbols(self: *Elf) !void {
|
||||
for (self.objects.items) |index| {
|
||||
try self.file(index).?.object.convertCommonSymbols(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn markImportsExports(self: *Elf) void {
|
||||
if (self.zigObjectPtr()) |zo| {
|
||||
zo.markImportsExports(self);
|
||||
|
|
@ -2838,7 +2804,7 @@ pub fn resolveMergeSections(self: *Elf) !void {
|
|||
const file_ptr = self.file(index).?;
|
||||
if (!file_ptr.isAlive()) continue;
|
||||
file_ptr.object.initInputMergeSections(self) catch |err| switch (err) {
|
||||
error.MalformedObject => has_errors = true,
|
||||
error.LinkFailure => has_errors = true,
|
||||
else => |e| return e,
|
||||
};
|
||||
}
|
||||
|
|
@ -2855,12 +2821,12 @@ pub fn resolveMergeSections(self: *Elf) !void {
|
|||
const file_ptr = self.file(index).?;
|
||||
if (!file_ptr.isAlive()) continue;
|
||||
file_ptr.object.resolveMergeSubsections(self) catch |err| switch (err) {
|
||||
error.MalformedObject => has_errors = true,
|
||||
error.LinkFailure => has_errors = true,
|
||||
else => |e| return e,
|
||||
};
|
||||
}
|
||||
|
||||
if (has_errors) return error.FlushFailure;
|
||||
if (has_errors) return error.LinkFailure;
|
||||
}
|
||||
|
||||
pub fn finalizeMergeSections(self: *Elf) !void {
|
||||
|
|
@ -5192,7 +5158,7 @@ fn reportUnsupportedCpuArch(self: *Elf) error{OutOfMemory}!void {
|
|||
});
|
||||
}
|
||||
|
||||
pub fn reportParseError(
|
||||
pub fn addParseError(
|
||||
self: *Elf,
|
||||
path: []const u8,
|
||||
comptime format: []const u8,
|
||||
|
|
@ -5203,7 +5169,7 @@ pub fn reportParseError(
|
|||
try err.addNote("while parsing {s}", .{path});
|
||||
}
|
||||
|
||||
pub fn reportParseError2(
|
||||
pub fn addFileError(
|
||||
self: *Elf,
|
||||
file_index: File.Index,
|
||||
comptime format: []const u8,
|
||||
|
|
@ -5214,6 +5180,26 @@ pub fn reportParseError2(
|
|||
try err.addNote("while parsing {}", .{self.file(file_index).?.fmtPath()});
|
||||
}
|
||||
|
||||
pub fn failFile(
|
||||
self: *Elf,
|
||||
file_index: File.Index,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{ OutOfMemory, LinkFailure } {
|
||||
try addFileError(self, file_index, format, args);
|
||||
return error.LinkFailure;
|
||||
}
|
||||
|
||||
pub fn failParse(
|
||||
self: *Elf,
|
||||
path: []const u8,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{ OutOfMemory, LinkFailure } {
|
||||
try addParseError(self, path, format, args);
|
||||
return error.LinkFailure;
|
||||
}
|
||||
|
||||
const FormatShdrCtx = struct {
|
||||
elf_file: *Elf,
|
||||
shdr: elf.Elf64_Shdr,
|
||||
|
|
|
|||
|
|
@ -35,10 +35,9 @@ pub fn parse(self: *Archive, elf_file: *Elf, path: []const u8, handle_index: Fil
|
|||
pos += @sizeOf(elf.ar_hdr);
|
||||
|
||||
if (!mem.eql(u8, &hdr.ar_fmag, elf.ARFMAG)) {
|
||||
try elf_file.reportParseError(path, "invalid archive header delimiter: {s}", .{
|
||||
return elf_file.failParse(path, "invalid archive header delimiter: {s}", .{
|
||||
std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag),
|
||||
});
|
||||
return error.MalformedArchive;
|
||||
}
|
||||
|
||||
const obj_size = try hdr.size();
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ pub fn deinit(scr: *LdScript, allocator: Allocator) void {
|
|||
}
|
||||
|
||||
pub const Error = error{
|
||||
InvalidLdScript,
|
||||
LinkFailure,
|
||||
UnexpectedToken,
|
||||
UnknownCpuArch,
|
||||
OutOfMemory,
|
||||
|
|
@ -32,12 +32,9 @@ pub fn parse(scr: *LdScript, data: []const u8, elf_file: *Elf) Error!void {
|
|||
try line_col.append(.{ .line = line, .column = column });
|
||||
switch (tok.id) {
|
||||
.invalid => {
|
||||
try elf_file.reportParseError(scr.path, "invalid token in LD script: '{s}' ({d}:{d})", .{
|
||||
std.fmt.fmtSliceEscapeLower(tok.get(data)),
|
||||
line,
|
||||
column,
|
||||
return elf_file.failParse(scr.path, "invalid token in LD script: '{s}' ({d}:{d})", .{
|
||||
std.fmt.fmtSliceEscapeLower(tok.get(data)), line, column,
|
||||
});
|
||||
return error.InvalidLdScript;
|
||||
},
|
||||
.new_line => {
|
||||
line += 1;
|
||||
|
|
@ -59,13 +56,12 @@ pub fn parse(scr: *LdScript, data: []const u8, elf_file: *Elf) Error!void {
|
|||
const last_token_id = parser.it.pos - 1;
|
||||
const last_token = parser.it.get(last_token_id);
|
||||
const lcol = line_col.items[last_token_id];
|
||||
try elf_file.reportParseError(scr.path, "unexpected token in LD script: {s}: '{s}' ({d}:{d})", .{
|
||||
return elf_file.failParse(scr.path, "unexpected token in LD script: {s}: '{s}' ({d}:{d})", .{
|
||||
@tagName(last_token.id),
|
||||
last_token.get(data),
|
||||
lcol.line,
|
||||
lcol.column,
|
||||
});
|
||||
return error.InvalidLdScript;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -34,18 +34,6 @@ num_dynrelocs: u32 = 0,
|
|||
output_symtab_ctx: Elf.SymtabCtx = .{},
|
||||
output_ar_state: Archive.ArState = .{},
|
||||
|
||||
pub fn isObject(path: []const u8) !bool {
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
defer file.close();
|
||||
const reader = file.reader();
|
||||
const header = reader.readStruct(elf.Elf64_Ehdr) catch return false;
|
||||
if (!mem.eql(u8, header.e_ident[0..4], "\x7fELF")) return false;
|
||||
if (header.e_ident[elf.EI_VERSION] != 1) return false;
|
||||
if (header.e_type != elf.ET.REL) return false;
|
||||
if (header.e_version != 1) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Object, allocator: Allocator) void {
|
||||
if (self.archive) |*ar| allocator.free(ar.path);
|
||||
allocator.free(self.path);
|
||||
|
|
@ -107,12 +95,9 @@ fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_fil
|
|||
|
||||
const em = elf_file.base.comp.root_mod.resolved_target.result.toElfMachine();
|
||||
if (em != self.header.?.e_machine) {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"invalid ELF machine type: {s}",
|
||||
.{@tagName(self.header.?.e_machine)},
|
||||
);
|
||||
return error.InvalidMachineType;
|
||||
return elf_file.failFile(self.index, "invalid ELF machine type: {s}", .{
|
||||
@tagName(self.header.?.e_machine),
|
||||
});
|
||||
}
|
||||
try elf_file.validateEFlags(self.index, self.header.?.e_flags);
|
||||
|
||||
|
|
@ -122,12 +107,7 @@ fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_fil
|
|||
const shnum = math.cast(usize, self.header.?.e_shnum) orelse return error.Overflow;
|
||||
const shsize = shnum * @sizeOf(elf.Elf64_Shdr);
|
||||
if (file_size < offset + shoff or file_size < offset + shoff + shsize) {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"corrupt header: section header table extends past the end of file",
|
||||
.{},
|
||||
);
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "corrupt header: section header table extends past the end of file", .{});
|
||||
}
|
||||
|
||||
const shdrs_buffer = try Elf.preadAllAlloc(allocator, handle, offset + shoff, shsize);
|
||||
|
|
@ -138,8 +118,7 @@ fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_fil
|
|||
for (self.shdrs.items) |shdr| {
|
||||
if (shdr.sh_type != elf.SHT_NOBITS) {
|
||||
if (file_size < offset + shdr.sh_offset or file_size < offset + shdr.sh_offset + shdr.sh_size) {
|
||||
try elf_file.reportParseError2(self.index, "corrupt section: extends past the end of file", .{});
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "corrupt section: extends past the end of file", .{});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -148,8 +127,7 @@ fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_fil
|
|||
defer allocator.free(shstrtab);
|
||||
for (self.shdrs.items) |shdr| {
|
||||
if (shdr.sh_name >= shstrtab.len) {
|
||||
try elf_file.reportParseError2(self.index, "corrupt section name offset", .{});
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "corrupt section name offset", .{});
|
||||
}
|
||||
}
|
||||
try self.strtab.appendSlice(allocator, shstrtab);
|
||||
|
|
@ -166,8 +144,7 @@ fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_fil
|
|||
const raw_symtab = try self.preadShdrContentsAlloc(allocator, handle, index);
|
||||
defer allocator.free(raw_symtab);
|
||||
const nsyms = math.divExact(usize, raw_symtab.len, @sizeOf(elf.Elf64_Sym)) catch {
|
||||
try elf_file.reportParseError2(self.index, "symbol table not evenly divisible", .{});
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "symbol table not evenly divisible", .{});
|
||||
};
|
||||
const symtab = @as([*]align(1) const elf.Elf64_Sym, @ptrCast(raw_symtab.ptr))[0..nsyms];
|
||||
|
||||
|
|
@ -221,30 +198,15 @@ fn initAtoms(self: *Object, allocator: Allocator, handle: std.fs.File, elf_file:
|
|||
const group_raw_data = try self.preadShdrContentsAlloc(allocator, handle, shndx);
|
||||
defer allocator.free(group_raw_data);
|
||||
const group_nmembers = math.divExact(usize, group_raw_data.len, @sizeOf(u32)) catch {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"corrupt section group: not evenly divisible ",
|
||||
.{},
|
||||
);
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "corrupt section group: not evenly divisible ", .{});
|
||||
};
|
||||
if (group_nmembers == 0) {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"corrupt section group: empty section",
|
||||
.{},
|
||||
);
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "corrupt section group: empty section", .{});
|
||||
}
|
||||
const group_members = @as([*]align(1) const u32, @ptrCast(group_raw_data.ptr))[0..group_nmembers];
|
||||
|
||||
if (group_members[0] != elf.GRP_COMDAT) {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"corrupt section group: unknown SHT_GROUP format",
|
||||
.{},
|
||||
);
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "corrupt section group: unknown SHT_GROUP format", .{});
|
||||
}
|
||||
|
||||
const group_start = @as(u32, @intCast(self.comdat_group_data.items.len));
|
||||
|
|
@ -722,7 +684,7 @@ pub fn initInputMergeSections(self: *Object, elf_file: *Elf) !void {
|
|||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("string not null terminated", .{});
|
||||
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
return error.MalformedObject;
|
||||
return error.LinkFailure;
|
||||
}
|
||||
end += sh_entsize;
|
||||
const string = data[start..end];
|
||||
|
|
@ -737,7 +699,7 @@ pub fn initInputMergeSections(self: *Object, elf_file: *Elf) !void {
|
|||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("size not a multiple of sh_entsize", .{});
|
||||
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
return error.MalformedObject;
|
||||
return error.LinkFailure;
|
||||
}
|
||||
|
||||
var pos: u32 = 0;
|
||||
|
|
@ -765,7 +727,12 @@ pub fn initOutputMergeSections(self: *Object, elf_file: *Elf) !void {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) !void {
|
||||
pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) error{
|
||||
LinkFailure,
|
||||
OutOfMemory,
|
||||
/// TODO report the error and remove this
|
||||
Overflow,
|
||||
}!void {
|
||||
const gpa = elf_file.base.comp.gpa;
|
||||
|
||||
for (self.input_merge_sections_indexes.items) |index| {
|
||||
|
|
@ -809,7 +776,7 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) !void {
|
|||
try err.addMsg("invalid symbol value: {x}", .{esym.st_value});
|
||||
try err.addNote("for symbol {s}", .{sym.name(elf_file)});
|
||||
try err.addNote("in {}", .{self.fmtPath()});
|
||||
return error.MalformedObject;
|
||||
return error.LinkFailure;
|
||||
};
|
||||
|
||||
sym.ref = .{ .index = res.msub_index, .file = imsec.merge_section_index };
|
||||
|
|
@ -834,7 +801,7 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) !void {
|
|||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("invalid relocation at offset 0x{x}", .{rel.r_offset});
|
||||
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
return error.MalformedObject;
|
||||
return error.LinkFailure;
|
||||
};
|
||||
|
||||
const sym_index = try self.addSymbol(gpa);
|
||||
|
|
|
|||
|
|
@ -58,24 +58,16 @@ pub fn parse(self: *SharedObject, elf_file: *Elf, handle: std.fs.File) !void {
|
|||
|
||||
const em = elf_file.base.comp.root_mod.resolved_target.result.toElfMachine();
|
||||
if (em != self.header.?.e_machine) {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"invalid ELF machine type: {s}",
|
||||
.{@tagName(self.header.?.e_machine)},
|
||||
);
|
||||
return error.InvalidMachineType;
|
||||
return elf_file.failFile(self.index, "invalid ELF machine type: {s}", .{
|
||||
@tagName(self.header.?.e_machine),
|
||||
});
|
||||
}
|
||||
|
||||
const shoff = std.math.cast(usize, self.header.?.e_shoff) orelse return error.Overflow;
|
||||
const shnum = std.math.cast(usize, self.header.?.e_shnum) orelse return error.Overflow;
|
||||
const shsize = shnum * @sizeOf(elf.Elf64_Shdr);
|
||||
if (file_size < shoff or file_size < shoff + shsize) {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"corrupted header: section header table extends past the end of file",
|
||||
.{},
|
||||
);
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "corrupted header: section header table extends past the end of file", .{});
|
||||
}
|
||||
|
||||
const shdrs_buffer = try Elf.preadAllAlloc(gpa, handle, shoff, shsize);
|
||||
|
|
@ -90,8 +82,7 @@ pub fn parse(self: *SharedObject, elf_file: *Elf, handle: std.fs.File) !void {
|
|||
for (self.shdrs.items, 0..) |shdr, i| {
|
||||
if (shdr.sh_type != elf.SHT_NOBITS) {
|
||||
if (file_size < shdr.sh_offset or file_size < shdr.sh_offset + shdr.sh_size) {
|
||||
try elf_file.reportParseError2(self.index, "corrupted section header", .{});
|
||||
return error.MalformedObject;
|
||||
return elf_file.failFile(self.index, "corrupted section header", .{});
|
||||
}
|
||||
}
|
||||
switch (shdr.sh_type) {
|
||||
|
|
|
|||
|
|
@ -1,36 +1,24 @@
|
|||
pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void {
|
||||
const gpa = comp.gpa;
|
||||
|
||||
var positionals = std.ArrayList(Compilation.LinkObject).init(gpa);
|
||||
defer positionals.deinit();
|
||||
|
||||
try positionals.ensureUnusedCapacity(comp.objects.len);
|
||||
positionals.appendSliceAssumeCapacity(comp.objects);
|
||||
for (comp.objects) |obj| {
|
||||
switch (Compilation.classifyFileExt(obj.path)) {
|
||||
.object => try parseObjectStaticLibReportingFailure(elf_file, obj.path),
|
||||
.static_library => try parseArchiveStaticLibReportingFailure(elf_file, obj.path),
|
||||
else => try elf_file.addParseError(obj.path, "unrecognized file extension", .{}),
|
||||
}
|
||||
}
|
||||
|
||||
for (comp.c_object_table.keys()) |key| {
|
||||
try positionals.append(.{ .path = key.status.success.object_path });
|
||||
try parseObjectStaticLibReportingFailure(elf_file, key.status.success.object_path);
|
||||
}
|
||||
|
||||
if (module_obj_path) |path| try positionals.append(.{ .path = path });
|
||||
if (module_obj_path) |path| {
|
||||
try parseObjectStaticLibReportingFailure(elf_file, path);
|
||||
}
|
||||
|
||||
if (comp.include_compiler_rt) {
|
||||
try positionals.append(.{ .path = comp.compiler_rt_obj.?.full_object_path });
|
||||
}
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
parsePositionalStaticLib(elf_file, obj.path) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.MalformedArchive,
|
||||
error.InvalidMachineType,
|
||||
error.MismatchedEflags,
|
||||
=> continue, // already reported
|
||||
error.UnknownFileType => try elf_file.reportParseError(obj.path, "unknown file type for an object file", .{}),
|
||||
else => |e| try elf_file.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
try parseObjectStaticLibReportingFailure(elf_file, comp.compiler_rt_obj.?.full_object_path);
|
||||
}
|
||||
|
||||
if (elf_file.base.hasErrors()) return error.FlushFailure;
|
||||
|
|
@ -153,36 +141,22 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]co
|
|||
}
|
||||
|
||||
pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void {
|
||||
const gpa = elf_file.base.comp.gpa;
|
||||
|
||||
var positionals = std.ArrayList(Compilation.LinkObject).init(gpa);
|
||||
defer positionals.deinit();
|
||||
try positionals.ensureUnusedCapacity(comp.objects.len);
|
||||
positionals.appendSliceAssumeCapacity(comp.objects);
|
||||
for (comp.objects) |obj| {
|
||||
if (obj.isObject()) {
|
||||
try elf_file.parseObjectReportingFailure(obj.path);
|
||||
} else {
|
||||
try elf_file.parseLibraryReportingFailure(.{ .path = obj.path }, obj.must_link);
|
||||
}
|
||||
}
|
||||
|
||||
// This is a set of object files emitted by clang in a single `build-exe` invocation.
|
||||
// For instance, the implicit `a.o` as compiled by `zig build-exe a.c` will end up
|
||||
// in this set.
|
||||
for (comp.c_object_table.keys()) |key| {
|
||||
try positionals.append(.{ .path = key.status.success.object_path });
|
||||
try elf_file.parseObjectReportingFailure(key.status.success.object_path);
|
||||
}
|
||||
|
||||
if (module_obj_path) |path| try positionals.append(.{ .path = path });
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
elf_file.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.MalformedArchive,
|
||||
error.InvalidMachineType,
|
||||
error.MismatchedEflags,
|
||||
=> continue, // already reported
|
||||
else => |e| try elf_file.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
if (module_obj_path) |path| try elf_file.parseObjectReportingFailure(path);
|
||||
|
||||
if (elf_file.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
|
|
@ -224,14 +198,20 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
|||
if (elf_file.base.hasErrors()) return error.FlushFailure;
|
||||
}
|
||||
|
||||
fn parsePositionalStaticLib(elf_file: *Elf, path: []const u8) Elf.ParseError!void {
|
||||
if (try Object.isObject(path)) {
|
||||
try parseObjectStaticLib(elf_file, path);
|
||||
} else if (try Archive.isArchive(path)) {
|
||||
try parseArchiveStaticLib(elf_file, path);
|
||||
} else return error.UnknownFileType;
|
||||
// TODO: should we check for LD script?
|
||||
// Actually, should we even unpack an archive?
|
||||
fn parseObjectStaticLibReportingFailure(elf_file: *Elf, path: []const u8) error{OutOfMemory}!void {
|
||||
parseObjectStaticLib(elf_file, path) catch |err| switch (err) {
|
||||
error.LinkFailure => return,
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => |e| try elf_file.addParseError(path, "parsing object failed: {s}", .{@errorName(e)}),
|
||||
};
|
||||
}
|
||||
|
||||
fn parseArchiveStaticLibReportingFailure(elf_file: *Elf, path: []const u8) error{OutOfMemory}!void {
|
||||
parseArchiveStaticLib(elf_file, path) catch |err| switch (err) {
|
||||
error.LinkFailure => return,
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => |e| try elf_file.addParseError(path, "parsing static library failed: {s}", .{@errorName(e)}),
|
||||
};
|
||||
}
|
||||
|
||||
fn parseObjectStaticLib(elf_file: *Elf, path: []const u8) Elf.ParseError!void {
|
||||
|
|
|
|||
|
|
@ -3916,7 +3916,7 @@ fn testUnknownFileTypeError(b: *Build, opts: Options) *Step {
|
|||
// "note: while parsing /?/liba.dylib",
|
||||
// } });
|
||||
expectLinkErrors(exe, test_step, .{
|
||||
.contains = "error: unexpected error: parsing input file failed with error InvalidLdScript",
|
||||
.contains = "error: invalid token in LD script: '\\x00\\x00\\x00\\x0c\\x00\\x00\\x00/usr/lib/dyld\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0d' (0:1069)",
|
||||
});
|
||||
|
||||
return test_step;
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue