mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 05:44:20 +00:00
WIP: move many global settings to become per-Module
Much of the logic from Compilation.create() is extracted into Compilation.Config.resolve() which accepts many optional settings and produces concrete settings. This separate step is needed by API users of Compilation so that they can pass the resolved global settings to the Module creation function, which itself needs to resolve per-Module settings. Since the target and other things are no longer global settings, I did not want them stored in link.File (in the `options` field). That options field was already a kludge; those options should be resolved into concrete settings. This commit also starts to work on that, deleting link.Options, moving the fields into Compilation and ObjectFormat-specific structs instead. Some fields were ephemeral and should not have been stored at all, such as symbol_size_hint. The link.File object of Compilation is now a `?*link.File` and `null` when -fno-emit-bin is passed. It is now arena-allocated along with Compilation itself, avoiding some messy cleanup code that was there before. On the command line, it is now possible to configure the standard library itself by using `--mod std` just like any other module. This meant that the CLI needed to create the standard library module rather than having Compilation create it. There are a lot of changes in this commit and it's still not done. I didn't realize how quickly this changeset was going to balloon out of control, and there are still many lines that need to be changed before it even compiles successfully. * introduce std.Build.Cache.HashHelper.oneShot * add error_tracing to std.Build.Module * extract build.zig file generation into src/Builtin.zig * each CSourceFile and RcSourceFile now has a Module owner, which determines some of the C compiler flags.
This commit is contained in:
parent
b92e30ff0b
commit
12de7e3472
16 changed files with 3265 additions and 2709 deletions
|
|
@ -521,6 +521,7 @@ set(ZIG_STAGE2_SOURCES
|
|||
"${CMAKE_SOURCE_DIR}/src/Air.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/AstGen.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/Compilation.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/Compilation/Config.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/Liveness.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/Module.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/Package.zig"
|
||||
|
|
|
|||
|
|
@ -312,6 +312,20 @@ pub const HashHelper = struct {
|
|||
) catch unreachable;
|
||||
return out_digest;
|
||||
}
|
||||
|
||||
pub fn oneShot(bytes: []const u8) [hex_digest_len]u8 {
|
||||
var hasher: Hasher = hasher_init;
|
||||
hasher.update(bytes);
|
||||
var bin_digest: BinDigest = undefined;
|
||||
hasher.final(&bin_digest);
|
||||
var out_digest: [hex_digest_len]u8 = undefined;
|
||||
_ = fmt.bufPrint(
|
||||
&out_digest,
|
||||
"{s}",
|
||||
.{fmt.fmtSliceHexLower(&bin_digest)},
|
||||
) catch unreachable;
|
||||
return out_digest;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Lock = struct {
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ valgrind: ?bool,
|
|||
pic: ?bool,
|
||||
red_zone: ?bool,
|
||||
omit_frame_pointer: ?bool,
|
||||
error_tracing: ?bool,
|
||||
link_libc: ?bool,
|
||||
link_libcpp: ?bool,
|
||||
|
||||
|
|
@ -177,6 +178,7 @@ pub const CreateOptions = struct {
|
|||
/// Whether to omit the stack frame pointer. Frees up a register and makes it
|
||||
/// more difficult to obtain stack traces. Has target-dependent effects.
|
||||
omit_frame_pointer: ?bool = null,
|
||||
error_tracing: ?bool = null,
|
||||
};
|
||||
|
||||
pub const Import = struct {
|
||||
|
|
@ -216,6 +218,7 @@ pub fn init(m: *Module, owner: *std.Build, options: CreateOptions, compile: ?*St
|
|||
.pic = options.pic,
|
||||
.red_zone = options.red_zone,
|
||||
.omit_frame_pointer = options.omit_frame_pointer,
|
||||
.error_tracing = options.error_tracing,
|
||||
.export_symbol_names = &.{},
|
||||
};
|
||||
|
||||
|
|
@ -601,6 +604,7 @@ pub fn appendZigProcessFlags(
|
|||
try addFlag(zig_args, m.stack_check, "-fstack-check", "-fno-stack-check");
|
||||
try addFlag(zig_args, m.stack_protector, "-fstack-protector", "-fno-stack-protector");
|
||||
try addFlag(zig_args, m.omit_frame_pointer, "-fomit-frame-pointer", "-fno-omit-frame-pointer");
|
||||
try addFlag(zig_args, m.error_tracing, "-ferror-tracing", "-fno-error-tracing");
|
||||
try addFlag(zig_args, m.sanitize_c, "-fsanitize-c", "-fno-sanitize-c");
|
||||
try addFlag(zig_args, m.sanitize_thread, "-fsanitize-thread", "-fno-sanitize-thread");
|
||||
try addFlag(zig_args, m.valgrind, "-fvalgrind", "-fno-valgrind");
|
||||
|
|
|
|||
240
src/Builtin.zig
Normal file
240
src/Builtin.zig
Normal file
|
|
@ -0,0 +1,240 @@
|
|||
target: std.Target,
|
||||
zig_backend: std.builtin.CompilerBackend,
|
||||
output_mode: std.builtin.OutputMode,
|
||||
link_mode: std.builtin.LinkMode,
|
||||
is_test: bool,
|
||||
test_evented_io: bool,
|
||||
single_threaded: bool,
|
||||
link_libc: bool,
|
||||
link_libcpp: bool,
|
||||
optimize_mode: std.builtin.OptimizeMode,
|
||||
error_tracing: bool,
|
||||
valgrind: bool,
|
||||
sanitize_thread: bool,
|
||||
pic: bool,
|
||||
pie: bool,
|
||||
strip: bool,
|
||||
code_model: std.builtin.CodeModel,
|
||||
omit_frame_pointer: bool,
|
||||
wasi_exec_model: std.builtin.WasiExecModel,
|
||||
|
||||
pub fn generate(opts: @This(), allocator: Allocator) Allocator.Error![:0]u8 {
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
defer buffer.deinit();
|
||||
|
||||
const target = opts.target;
|
||||
const generic_arch_name = target.cpu.arch.genericName();
|
||||
const zig_backend = opts.zig_backend;
|
||||
|
||||
@setEvalBranchQuota(4000);
|
||||
try buffer.writer().print(
|
||||
\\const std = @import("std");
|
||||
\\/// Zig version. When writing code that supports multiple versions of Zig, prefer
|
||||
\\/// feature detection (i.e. with `@hasDecl` or `@hasField`) over version checks.
|
||||
\\pub const zig_version = std.SemanticVersion.parse(zig_version_string) catch unreachable;
|
||||
\\pub const zig_version_string = "{s}";
|
||||
\\pub const zig_backend = std.builtin.CompilerBackend.{};
|
||||
\\
|
||||
\\pub const output_mode = std.builtin.OutputMode.{};
|
||||
\\pub const link_mode = std.builtin.LinkMode.{};
|
||||
\\pub const is_test = {};
|
||||
\\pub const single_threaded = {};
|
||||
\\pub const abi = std.Target.Abi.{};
|
||||
\\pub const cpu: std.Target.Cpu = .{{
|
||||
\\ .arch = .{},
|
||||
\\ .model = &std.Target.{}.cpu.{},
|
||||
\\ .features = std.Target.{}.featureSet(&[_]std.Target.{}.Feature{{
|
||||
\\
|
||||
, .{
|
||||
build_options.version,
|
||||
std.zig.fmtId(@tagName(zig_backend)),
|
||||
std.zig.fmtId(@tagName(opts.output_mode)),
|
||||
std.zig.fmtId(@tagName(opts.link_mode)),
|
||||
opts.is_test,
|
||||
opts.single_threaded,
|
||||
std.zig.fmtId(@tagName(target.abi)),
|
||||
std.zig.fmtId(@tagName(target.cpu.arch)),
|
||||
std.zig.fmtId(generic_arch_name),
|
||||
std.zig.fmtId(target.cpu.model.name),
|
||||
std.zig.fmtId(generic_arch_name),
|
||||
std.zig.fmtId(generic_arch_name),
|
||||
});
|
||||
|
||||
for (target.cpu.arch.allFeaturesList(), 0..) |feature, index_usize| {
|
||||
const index = @as(std.Target.Cpu.Feature.Set.Index, @intCast(index_usize));
|
||||
const is_enabled = target.cpu.features.isEnabled(index);
|
||||
if (is_enabled) {
|
||||
try buffer.writer().print(" .{},\n", .{std.zig.fmtId(feature.name)});
|
||||
}
|
||||
}
|
||||
try buffer.writer().print(
|
||||
\\ }}),
|
||||
\\}};
|
||||
\\pub const os = std.Target.Os{{
|
||||
\\ .tag = .{},
|
||||
\\ .version_range = .{{
|
||||
,
|
||||
.{std.zig.fmtId(@tagName(target.os.tag))},
|
||||
);
|
||||
|
||||
switch (target.os.getVersionRange()) {
|
||||
.none => try buffer.appendSlice(" .none = {} },\n"),
|
||||
.semver => |semver| try buffer.writer().print(
|
||||
\\ .semver = .{{
|
||||
\\ .min = .{{
|
||||
\\ .major = {},
|
||||
\\ .minor = {},
|
||||
\\ .patch = {},
|
||||
\\ }},
|
||||
\\ .max = .{{
|
||||
\\ .major = {},
|
||||
\\ .minor = {},
|
||||
\\ .patch = {},
|
||||
\\ }},
|
||||
\\ }}}},
|
||||
\\
|
||||
, .{
|
||||
semver.min.major,
|
||||
semver.min.minor,
|
||||
semver.min.patch,
|
||||
|
||||
semver.max.major,
|
||||
semver.max.minor,
|
||||
semver.max.patch,
|
||||
}),
|
||||
.linux => |linux| try buffer.writer().print(
|
||||
\\ .linux = .{{
|
||||
\\ .range = .{{
|
||||
\\ .min = .{{
|
||||
\\ .major = {},
|
||||
\\ .minor = {},
|
||||
\\ .patch = {},
|
||||
\\ }},
|
||||
\\ .max = .{{
|
||||
\\ .major = {},
|
||||
\\ .minor = {},
|
||||
\\ .patch = {},
|
||||
\\ }},
|
||||
\\ }},
|
||||
\\ .glibc = .{{
|
||||
\\ .major = {},
|
||||
\\ .minor = {},
|
||||
\\ .patch = {},
|
||||
\\ }},
|
||||
\\ }}}},
|
||||
\\
|
||||
, .{
|
||||
linux.range.min.major,
|
||||
linux.range.min.minor,
|
||||
linux.range.min.patch,
|
||||
|
||||
linux.range.max.major,
|
||||
linux.range.max.minor,
|
||||
linux.range.max.patch,
|
||||
|
||||
linux.glibc.major,
|
||||
linux.glibc.minor,
|
||||
linux.glibc.patch,
|
||||
}),
|
||||
.windows => |windows| try buffer.writer().print(
|
||||
\\ .windows = .{{
|
||||
\\ .min = {s},
|
||||
\\ .max = {s},
|
||||
\\ }}}},
|
||||
\\
|
||||
,
|
||||
.{ windows.min, windows.max },
|
||||
),
|
||||
}
|
||||
try buffer.appendSlice(
|
||||
\\};
|
||||
\\pub const target: std.Target = .{
|
||||
\\ .cpu = cpu,
|
||||
\\ .os = os,
|
||||
\\ .abi = abi,
|
||||
\\ .ofmt = object_format,
|
||||
\\
|
||||
);
|
||||
|
||||
if (target.dynamic_linker.get()) |dl| {
|
||||
try buffer.writer().print(
|
||||
\\ .dynamic_linker = std.Target.DynamicLinker.init("{s}"),
|
||||
\\}};
|
||||
\\
|
||||
, .{dl});
|
||||
} else {
|
||||
try buffer.appendSlice(
|
||||
\\ .dynamic_linker = std.Target.DynamicLinker.none,
|
||||
\\};
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
// This is so that compiler_rt and libc.zig libraries know whether they
|
||||
// will eventually be linked with libc. They make different decisions
|
||||
// about what to export depending on whether another libc will be linked
|
||||
// in. For example, compiler_rt will not export the __chkstk symbol if it
|
||||
// knows libc will provide it, and likewise c.zig will not export memcpy.
|
||||
const link_libc = opts.link_libc;
|
||||
|
||||
try buffer.writer().print(
|
||||
\\pub const object_format = std.Target.ObjectFormat.{};
|
||||
\\pub const mode = std.builtin.OptimizeMode.{};
|
||||
\\pub const link_libc = {};
|
||||
\\pub const link_libcpp = {};
|
||||
\\pub const have_error_return_tracing = {};
|
||||
\\pub const valgrind_support = {};
|
||||
\\pub const sanitize_thread = {};
|
||||
\\pub const position_independent_code = {};
|
||||
\\pub const position_independent_executable = {};
|
||||
\\pub const strip_debug_info = {};
|
||||
\\pub const code_model = std.builtin.CodeModel.{};
|
||||
\\pub const omit_frame_pointer = {};
|
||||
\\
|
||||
, .{
|
||||
std.zig.fmtId(@tagName(target.ofmt)),
|
||||
std.zig.fmtId(@tagName(opts.optimize_mode)),
|
||||
link_libc,
|
||||
opts.link_libcpp,
|
||||
opts.error_tracing,
|
||||
opts.valgrind,
|
||||
opts.sanitize_thread,
|
||||
opts.pic,
|
||||
opts.pie,
|
||||
opts.strip,
|
||||
std.zig.fmtId(@tagName(opts.code_model)),
|
||||
opts.omit_frame_pointer,
|
||||
});
|
||||
|
||||
if (target.os.tag == .wasi) {
|
||||
const wasi_exec_model_fmt = std.zig.fmtId(@tagName(opts.wasi_exec_model));
|
||||
try buffer.writer().print(
|
||||
\\pub const wasi_exec_model = std.builtin.WasiExecModel.{};
|
||||
\\
|
||||
, .{wasi_exec_model_fmt});
|
||||
}
|
||||
|
||||
if (opts.is_test) {
|
||||
try buffer.appendSlice(
|
||||
\\pub var test_functions: []const std.builtin.TestFn = undefined; // overwritten later
|
||||
\\
|
||||
);
|
||||
if (opts.test_evented_io) {
|
||||
try buffer.appendSlice(
|
||||
\\pub const test_io_mode = .evented;
|
||||
\\
|
||||
);
|
||||
} else {
|
||||
try buffer.appendSlice(
|
||||
\\pub const test_io_mode = .blocking;
|
||||
\\
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return buffer.toOwnedSliceSentinel(0);
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const build_options = @import("build_options");
|
||||
1457
src/Compilation.zig
1457
src/Compilation.zig
File diff suppressed because it is too large
Load diff
382
src/Compilation/Config.zig
Normal file
382
src/Compilation/Config.zig
Normal file
|
|
@ -0,0 +1,382 @@
|
|||
//! User-specified settings that have all the defaults resolved into concrete values.
|
||||
|
||||
have_zcu: bool,
|
||||
output_mode: std.builtin.OutputMode,
|
||||
link_mode: std.builtin.LinkMode,
|
||||
link_libc: bool,
|
||||
link_libcpp: bool,
|
||||
link_libunwind: bool,
|
||||
any_unwind_tables: bool,
|
||||
pie: bool,
|
||||
/// If this is true then linker code is responsible for making an LLVM IR
|
||||
/// Module, outputting it to an object file, and then linking that together
|
||||
/// with link options and other objects. Otherwise (depending on `use_lld`)
|
||||
/// linker code directly outputs and updates the final binary.
|
||||
use_llvm: bool,
|
||||
/// Whether or not the LLVM library API will be used by the LLVM backend.
|
||||
use_lib_llvm: bool,
|
||||
/// If this is true then linker code is responsible for outputting an object
|
||||
/// file and then using LLD to link it together with the link options and other
|
||||
/// objects. Otherwise (depending on `use_llvm`) linker code directly outputs
|
||||
/// and updates the final binary.
|
||||
use_lld: bool,
|
||||
c_frontend: CFrontend,
|
||||
lto: bool,
|
||||
/// WASI-only. Type of WASI execution model ("command" or "reactor").
|
||||
/// Always set to `command` for non-WASI targets.
|
||||
wasi_exec_model: std.builtin.WasiExecModel,
|
||||
import_memory: bool,
|
||||
export_memory: bool,
|
||||
shared_memory: bool,
|
||||
is_test: bool,
|
||||
test_evented_io: bool,
|
||||
entry: ?[]const u8,
|
||||
|
||||
pub const CFrontend = enum { clang, aro };
|
||||
|
||||
pub const Options = struct {
|
||||
output_mode: std.builtin.OutputMode,
|
||||
resolved_target: Module.ResolvedTarget,
|
||||
is_test: bool,
|
||||
have_zcu: bool,
|
||||
emit_bin: bool,
|
||||
root_optimize_mode: ?std.builtin.OptimizeMode = null,
|
||||
link_mode: ?std.builtin.LinkMode = null,
|
||||
ensure_libc_on_non_freestanding: bool = false,
|
||||
ensure_libcpp_on_non_freestanding: bool = false,
|
||||
any_non_single_threaded: bool = false,
|
||||
any_sanitize_thread: bool = false,
|
||||
any_unwind_tables: bool = false,
|
||||
any_dyn_libs: bool = false,
|
||||
c_source_files_len: usize = 0,
|
||||
emit_llvm_ir: bool = false,
|
||||
emit_llvm_bc: bool = false,
|
||||
link_libc: ?bool = null,
|
||||
link_libcpp: ?bool = null,
|
||||
link_libunwind: ?bool = null,
|
||||
pie: ?bool = null,
|
||||
use_llvm: ?bool = null,
|
||||
use_lib_llvm: ?bool = null,
|
||||
use_lld: ?bool = null,
|
||||
use_clang: ?bool = null,
|
||||
lto: ?bool = null,
|
||||
entry: union(enum) {
|
||||
default,
|
||||
disabled,
|
||||
enabled,
|
||||
named: []const u8,
|
||||
} = .default,
|
||||
/// WASI-only. Type of WASI execution model ("command" or "reactor").
|
||||
wasi_exec_model: ?std.builtin.WasiExecModel = null,
|
||||
import_memory: ?bool = null,
|
||||
export_memory: ?bool = null,
|
||||
shared_memory: ?bool = null,
|
||||
test_evented_io: bool = false,
|
||||
};
|
||||
|
||||
pub fn resolve(options: Options) !Config {
|
||||
const target = options.resolved_target.result;
|
||||
|
||||
// WASI-only. Resolve the optional exec-model option, defaults to command.
|
||||
if (target.os.tag != .wasi and options.wasi_exec_model != null)
|
||||
return error.WasiExecModelRequiresWasi;
|
||||
const wasi_exec_model = options.wasi_exec_model orelse .command;
|
||||
|
||||
const shared_memory = b: {
|
||||
if (!target.cpu.arch.isWasm()) {
|
||||
if (options.shared_memory == true) return error.SharedMemoryIsWasmOnly;
|
||||
break :b false;
|
||||
}
|
||||
if (options.output_mode == .Obj) {
|
||||
if (options.shared_memory == true) return error.ObjectFilesCannotShareMemory;
|
||||
break :b false;
|
||||
}
|
||||
if (!std.Target.wasm.featureSetHasAll(target.cpu.features, .{ .atomics, .bulk_memory })) {
|
||||
if (options.shared_memory == true)
|
||||
return error.SharedMemoryRequiresAtomicsAndBulkMemory;
|
||||
break :b false;
|
||||
}
|
||||
if (options.any_non_single_threaded) {
|
||||
if (options.shared_memory == false)
|
||||
return error.ThreadsRequireSharedMemory;
|
||||
break :b true;
|
||||
}
|
||||
break :b options.shared_memory orelse false;
|
||||
};
|
||||
|
||||
const entry: ?[]const u8 = switch (options.entry) {
|
||||
.disabled => null,
|
||||
.default => b: {
|
||||
if (options.output_mode != .Exe) break :b null;
|
||||
break :b target_util.defaultEntrySymbolName(target, wasi_exec_model) orelse
|
||||
return error.UnknownTargetEntryPoint;
|
||||
},
|
||||
.enabled => target_util.defaultEntrySymbolName(target, wasi_exec_model) orelse
|
||||
return error.UnknownTargetEntryPoint,
|
||||
.named => |name| name,
|
||||
};
|
||||
if (entry != null and options.output_mode != .Exe)
|
||||
return error.NonExecutableEntryPoint;
|
||||
|
||||
// *If* the LLVM backend were to be selected, should Zig use the LLVM
|
||||
// library to build the LLVM module?
|
||||
const use_lib_llvm = b: {
|
||||
if (!build_options.have_llvm) {
|
||||
if (options.use_lib_llvm == true) return error.LlvmLibraryUnavailable;
|
||||
break :b false;
|
||||
}
|
||||
break :b options.use_lib_llvm orelse true;
|
||||
};
|
||||
|
||||
const root_optimize_mode = options.root_optimize_mode orelse .Debug;
|
||||
|
||||
// Make a decision on whether to use LLVM backend for machine code generation.
|
||||
// Note that using the LLVM backend does not necessarily mean using LLVM libraries.
|
||||
// For example, Zig can emit .bc and .ll files directly, and this is still considered
|
||||
// using "the LLVM backend".
|
||||
const use_llvm = b: {
|
||||
// If emitting to LLVM bitcode object format, must use LLVM backend.
|
||||
if (options.emit_llvm_ir or options.emit_llvm_bc) {
|
||||
if (options.use_llvm == false) return error.EmittingLlvmModuleRequiresLlvmBackend;
|
||||
break :b true;
|
||||
}
|
||||
|
||||
// If LLVM does not support the target, then we can't use it.
|
||||
if (!target_util.hasLlvmSupport(target, target.ofmt)) {
|
||||
if (options.use_llvm == true) return error.LlvmLacksTargetSupport;
|
||||
break :b false;
|
||||
}
|
||||
|
||||
if (options.use_llvm) |x| break :b x;
|
||||
|
||||
// If we have no zig code to compile, no need for LLVM.
|
||||
if (!options.have_zcu) break :b false;
|
||||
|
||||
// If we cannot use LLVM libraries, then our own backends will be a
|
||||
// better default since the LLVM backend can only produce bitcode
|
||||
// and not an object file or executable.
|
||||
if (!use_lib_llvm) break :b false;
|
||||
|
||||
// Prefer LLVM for release builds.
|
||||
if (root_optimize_mode != .Debug) break :b true;
|
||||
|
||||
// At this point we would prefer to use our own self-hosted backend,
|
||||
// because the compilation speed is better than LLVM. But only do it if
|
||||
// we are confident in the robustness of the backend.
|
||||
break :b !target_util.selfHostedBackendIsAsRobustAsLlvm(target);
|
||||
};
|
||||
|
||||
if (!use_lib_llvm and use_llvm and options.emit_bin) {
|
||||
// Explicit request to use LLVM to produce an object file, but without
|
||||
// using LLVM libraries. Impossible.
|
||||
return error.EmittingBinaryRequiresLlvmLibrary;
|
||||
}
|
||||
|
||||
// Make a decision on whether to use LLD or our own linker.
|
||||
const use_lld = b: {
|
||||
if (target.isDarwin()) {
|
||||
if (options.use_lld == true) return error.LldIncompatibleOs;
|
||||
break :b false;
|
||||
}
|
||||
|
||||
if (!build_options.have_llvm) {
|
||||
if (options.use_lld == true) return error.LldUnavailable;
|
||||
break :b false;
|
||||
}
|
||||
|
||||
if (target.ofmt == .c) {
|
||||
if (options.use_lld == true) return error.LldIncompatibleObjectFormat;
|
||||
break :b false;
|
||||
}
|
||||
|
||||
if (options.lto == true) {
|
||||
if (options.use_lld == false) return error.LtoRequiresLld;
|
||||
break :b true;
|
||||
}
|
||||
|
||||
if (options.use_lld) |x| break :b x;
|
||||
break :b true;
|
||||
};
|
||||
|
||||
// Make a decision on whether to use Clang or Aro for translate-c and compiling C files.
|
||||
const c_frontend: CFrontend = b: {
|
||||
if (!build_options.have_llvm) {
|
||||
if (options.use_clang == true) return error.ClangUnavailable;
|
||||
break :b .aro;
|
||||
}
|
||||
if (options.use_clang) |clang| {
|
||||
break :b if (clang) .clang else .aro;
|
||||
}
|
||||
break :b .clang;
|
||||
};
|
||||
|
||||
const lto = b: {
|
||||
if (!use_lld) {
|
||||
// zig ld LTO support is tracked by
|
||||
// https://github.com/ziglang/zig/issues/8680
|
||||
if (options.lto == true) return error.LtoRequiresLld;
|
||||
break :b false;
|
||||
}
|
||||
|
||||
if (options.lto) |x| break :b x;
|
||||
if (options.c_source_files_len == 0) break :b false;
|
||||
|
||||
if (target.cpu.arch.isRISCV()) {
|
||||
// Clang and LLVM currently don't support RISC-V target-abi for LTO.
|
||||
// Compiling with LTO may fail or produce undesired results.
|
||||
// See https://reviews.llvm.org/D71387
|
||||
// See https://reviews.llvm.org/D102582
|
||||
break :b false;
|
||||
}
|
||||
|
||||
break :b switch (options.output_mode) {
|
||||
.Lib, .Obj => false,
|
||||
.Exe => switch (root_optimize_mode) {
|
||||
.Debug => false,
|
||||
.ReleaseSafe, .ReleaseFast, .ReleaseSmall => true,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const link_libcpp = b: {
|
||||
if (options.link_libcpp == true) break :b true;
|
||||
if (options.any_sanitize_thread) {
|
||||
// TSAN is (for now...) implemented in C++ so it requires linking libc++.
|
||||
if (options.link_libcpp == false) return error.SanitizeThreadRequiresLibCpp;
|
||||
break :b true;
|
||||
}
|
||||
if (options.ensure_libcpp_on_non_freestanding and target.os.tag != .freestanding)
|
||||
break :b true;
|
||||
|
||||
break :b false;
|
||||
};
|
||||
|
||||
const link_libunwind = b: {
|
||||
if (link_libcpp and target_util.libcNeedsLibUnwind(target)) {
|
||||
if (options.link_libunwind == false) return error.LibCppRequiresLibUnwind;
|
||||
break :b true;
|
||||
}
|
||||
break :b options.link_libunwind orelse false;
|
||||
};
|
||||
|
||||
const link_libc = b: {
|
||||
if (target_util.osRequiresLibC(target)) {
|
||||
if (options.link_libc == false) return error.OsRequiresLibC;
|
||||
break :b true;
|
||||
}
|
||||
if (link_libcpp) {
|
||||
if (options.link_libc == false) return error.LibCppRequiresLibC;
|
||||
break :b true;
|
||||
}
|
||||
if (link_libunwind) {
|
||||
if (options.link_libc == false) return error.LibUnwindRequiresLibC;
|
||||
break :b true;
|
||||
}
|
||||
if (options.link_libc) |x| break :b x;
|
||||
if (options.ensure_libc_on_non_freestanding and target.os.tag != .freestanding)
|
||||
break :b true;
|
||||
|
||||
break :b false;
|
||||
};
|
||||
|
||||
const any_unwind_tables = options.any_unwind_tables or
|
||||
link_libunwind or target_util.needUnwindTables(target);
|
||||
|
||||
const link_mode = b: {
|
||||
const explicitly_exe_or_dyn_lib = switch (options.output_mode) {
|
||||
.Obj => false,
|
||||
.Lib => (options.link_mode orelse .Static) == .Dynamic,
|
||||
.Exe => true,
|
||||
};
|
||||
|
||||
if (target_util.cannotDynamicLink(target)) {
|
||||
if (options.link_mode == .Dynamic) return error.TargetCannotDynamicLink;
|
||||
break :b .Static;
|
||||
}
|
||||
if (explicitly_exe_or_dyn_lib and link_libc and
|
||||
(target.isGnuLibC() or target_util.osRequiresLibC(target)))
|
||||
{
|
||||
if (options.link_mode == .Static) return error.LibCRequiresDynamicLinking;
|
||||
break :b .Dynamic;
|
||||
}
|
||||
// When creating a executable that links to system libraries, we
|
||||
// require dynamic linking, but we must not link static libraries
|
||||
// or object files dynamically!
|
||||
if (options.any_dyn_libs and options.output_mode == .Exe) {
|
||||
if (options.link_mode == .Static) return error.SharedLibrariesRequireDynamicLinking;
|
||||
break :b .Dynamic;
|
||||
}
|
||||
|
||||
if (options.link_mode) |link_mode| break :b link_mode;
|
||||
|
||||
if (explicitly_exe_or_dyn_lib and link_libc and
|
||||
options.resolved_target.is_native_abi and target.abi.isMusl())
|
||||
{
|
||||
// If targeting the system's native ABI and the system's libc is
|
||||
// musl, link dynamically by default.
|
||||
break :b .Dynamic;
|
||||
}
|
||||
|
||||
// Static is generally a better default. Fight me.
|
||||
break :b .Static;
|
||||
};
|
||||
|
||||
const import_memory = options.import_memory orelse false;
|
||||
const export_memory = b: {
|
||||
if (link_mode == .Dynamic) {
|
||||
if (options.export_memory == true) return error.ExportMemoryAndDynamicIncompatible;
|
||||
break :b false;
|
||||
}
|
||||
if (options.export_memory) |x| break :b x;
|
||||
break :b !import_memory;
|
||||
};
|
||||
|
||||
const pie: bool = b: {
|
||||
switch (options.output_mode) {
|
||||
.Obj, .Exe => {},
|
||||
.Lib => if (link_mode == .Dynamic) {
|
||||
if (options.pie == true) return error.DynamicLibraryPrecludesPie;
|
||||
break :b false;
|
||||
},
|
||||
}
|
||||
if (target_util.requiresPIE(target)) {
|
||||
if (options.pie == false) return error.TargetRequiresPie;
|
||||
break :b true;
|
||||
}
|
||||
if (options.any_sanitize_thread) {
|
||||
if (options.pie == false) return error.SanitizeThreadRequiresPie;
|
||||
break :b true;
|
||||
}
|
||||
if (options.pie) |pie| break :b pie;
|
||||
break :b false;
|
||||
};
|
||||
|
||||
return .{
|
||||
.output_mode = options.output_mode,
|
||||
.have_zcu = options.have_zcu,
|
||||
.is_test = options.is_test,
|
||||
.test_evented_io = options.test_evented_io,
|
||||
.link_mode = link_mode,
|
||||
.link_libc = link_libc,
|
||||
.link_libcpp = link_libcpp,
|
||||
.link_libunwind = link_libunwind,
|
||||
.any_unwind_tables = any_unwind_tables,
|
||||
.pie = pie,
|
||||
.lto = lto,
|
||||
.import_memory = import_memory,
|
||||
.export_memory = export_memory,
|
||||
.shared_memory = shared_memory,
|
||||
.c_frontend = c_frontend,
|
||||
.use_llvm = use_llvm,
|
||||
.use_lib_llvm = use_lib_llvm,
|
||||
.use_lld = use_lld,
|
||||
.entry = entry,
|
||||
.wasi_exec_model = wasi_exec_model,
|
||||
};
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const Module = @import("../Package.zig").Module;
|
||||
const Config = @This();
|
||||
const target_util = @import("../target.zig");
|
||||
const build_options = @import("build_options");
|
||||
|
|
@ -59,6 +59,7 @@ root_mod: *Package.Module,
|
|||
/// Normally, `main_mod` and `root_mod` are the same. The exception is `zig test`, in which
|
||||
/// `root_mod` is the test runner, and `main_mod` is the user's source file which has the tests.
|
||||
main_mod: *Package.Module,
|
||||
std_mod: *Package.Module,
|
||||
sema_prog_node: std.Progress.Node = undefined,
|
||||
|
||||
/// Used by AstGen worker to load and store ZIR cache.
|
||||
|
|
@ -3599,7 +3600,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
|
|||
|
||||
// TODO: figure out how this works under incremental changes to builtin.zig!
|
||||
const builtin_type_target_index: InternPool.Index = blk: {
|
||||
const std_mod = mod.main_mod.deps.get("std").?;
|
||||
const std_mod = mod.std_mod;
|
||||
if (decl.getFileScope(mod).mod != std_mod) break :blk .none;
|
||||
// We're in the std module.
|
||||
const std_file = (try mod.importPkg(std_mod)).file;
|
||||
|
|
@ -3924,10 +3925,7 @@ pub fn importFile(
|
|||
import_string: []const u8,
|
||||
) !ImportFileResult {
|
||||
if (std.mem.eql(u8, import_string, "std")) {
|
||||
return mod.importPkg(mod.main_mod.deps.get("std").?);
|
||||
}
|
||||
if (std.mem.eql(u8, import_string, "builtin")) {
|
||||
return mod.importPkg(mod.main_mod.deps.get("builtin").?);
|
||||
return mod.importPkg(mod.std_mod);
|
||||
}
|
||||
if (std.mem.eql(u8, import_string, "root")) {
|
||||
return mod.importPkg(mod.root_mod);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
//! Corresponds to something that Zig source code can `@import`.
|
||||
//! Not to be confused with src/Module.zig which should be renamed
|
||||
//! to something else. https://github.com/ziglang/zig/issues/14307
|
||||
//! Not to be confused with src/Module.zig which will be renamed
|
||||
//! to Zcu. https://github.com/ziglang/zig/issues/14307
|
||||
|
||||
/// Only files inside this directory can be imported.
|
||||
root: Package.Path,
|
||||
|
|
@ -14,6 +14,26 @@ fully_qualified_name: []const u8,
|
|||
/// responsible for detecting these names and using the correct package.
|
||||
deps: Deps = .{},
|
||||
|
||||
resolved_target: ResolvedTarget,
|
||||
optimize_mode: std.builtin.OptimizeMode,
|
||||
code_model: std.builtin.CodeModel,
|
||||
single_threaded: bool,
|
||||
error_tracing: bool,
|
||||
valgrind: bool,
|
||||
pic: bool,
|
||||
strip: bool,
|
||||
omit_frame_pointer: bool,
|
||||
stack_check: bool,
|
||||
stack_protector: u32,
|
||||
red_zone: bool,
|
||||
sanitize_c: bool,
|
||||
sanitize_thread: bool,
|
||||
unwind_tables: bool,
|
||||
cc_argv: []const []const u8,
|
||||
|
||||
/// The contents of `@import("builtin")` for this module.
|
||||
generated_builtin_source: []const u8,
|
||||
|
||||
pub const Deps = std.StringArrayHashMapUnmanaged(*Module);
|
||||
|
||||
pub const Tree = struct {
|
||||
|
|
@ -21,10 +41,382 @@ pub const Tree = struct {
|
|||
build_module_table: std.AutoArrayHashMapUnmanaged(MultiHashHexDigest, *Module),
|
||||
};
|
||||
|
||||
pub fn create(allocator: Allocator, m: Module) Allocator.Error!*Module {
|
||||
const new = try allocator.create(Module);
|
||||
new.* = m;
|
||||
return new;
|
||||
pub const CreateOptions = struct {
|
||||
/// Where to store builtin.zig. The global cache directory is used because
|
||||
/// it is a pure function based on CLI flags.
|
||||
global_cache_directory: Cache.Directory,
|
||||
paths: Paths,
|
||||
fully_qualified_name: []const u8,
|
||||
|
||||
cc_argv: []const []const u8,
|
||||
inherited: Inherited,
|
||||
global: Compilation.Config,
|
||||
/// If this is null then `resolved_target` must be non-null.
|
||||
parent: ?*Package.Module,
|
||||
|
||||
builtin_mod: ?*Package.Module,
|
||||
|
||||
pub const Paths = struct {
|
||||
root: Package.Path,
|
||||
/// Relative to `root`. May contain path separators.
|
||||
root_src_path: []const u8,
|
||||
};
|
||||
|
||||
pub const Inherited = struct {
|
||||
/// If this is null then `parent` must be non-null.
|
||||
resolved_target: ?ResolvedTarget = null,
|
||||
optimize_mode: ?std.builtin.OptimizeMode = null,
|
||||
code_model: ?std.builtin.CodeModel = null,
|
||||
single_threaded: ?bool = null,
|
||||
error_tracing: ?bool = null,
|
||||
valgrind: ?bool = null,
|
||||
pic: ?bool = null,
|
||||
strip: ?bool = null,
|
||||
omit_frame_pointer: ?bool = null,
|
||||
stack_check: ?bool = null,
|
||||
/// null means default.
|
||||
/// 0 means no stack protector.
|
||||
/// other number means stack protection with that buffer size.
|
||||
stack_protector: ?u32 = null,
|
||||
red_zone: ?bool = null,
|
||||
unwind_tables: ?bool = null,
|
||||
sanitize_c: ?bool = null,
|
||||
sanitize_thread: ?bool = null,
|
||||
};
|
||||
};
|
||||
|
||||
pub const ResolvedTarget = struct {
|
||||
result: std.Target,
|
||||
is_native_os: bool,
|
||||
is_native_abi: bool,
|
||||
llvm_cpu_features: ?[*:0]const u8 = null,
|
||||
};
|
||||
|
||||
/// At least one of `parent` and `resolved_target` must be non-null.
|
||||
pub fn create(arena: Allocator, options: CreateOptions) !*Package.Module {
|
||||
const resolved_target = options.inherited.resolved_target orelse options.parent.?.resolved_target;
|
||||
const target = resolved_target.result;
|
||||
|
||||
const optimize_mode = options.inherited.optimize_mode orelse
|
||||
if (options.parent) |p| p.optimize_mode else .Debug;
|
||||
|
||||
const unwind_tables = options.inherited.unwind_tables orelse
|
||||
if (options.parent) |p| p.unwind_tables else options.global.any_unwind_tables;
|
||||
|
||||
const strip = b: {
|
||||
if (options.inherited.strip) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.strip;
|
||||
if (optimize_mode == .ReleaseSmall) break :b true;
|
||||
if (!target_util.hasDebugInfo(target)) break :b true;
|
||||
break :b false;
|
||||
};
|
||||
|
||||
const valgrind = b: {
|
||||
if (!target_util.hasValgrindSupport(target)) {
|
||||
if (options.inherited.valgrind == true)
|
||||
return error.ValgrindUnsupportedOnTarget;
|
||||
break :b false;
|
||||
}
|
||||
if (options.inherited.valgrind) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.valgrind;
|
||||
if (strip) break :b false;
|
||||
break :b optimize_mode == .Debug;
|
||||
};
|
||||
|
||||
const zig_backend = target_util.zigBackend(target, options.global.use_llvm);
|
||||
|
||||
const single_threaded = b: {
|
||||
if (target_util.alwaysSingleThreaded(target)) {
|
||||
if (options.inherited.single_threaded == false)
|
||||
return error.TargetRequiresSingleThreaded;
|
||||
break :b true;
|
||||
}
|
||||
|
||||
if (options.global.have_zcu) {
|
||||
if (!target_util.supportsThreads(target, zig_backend)) {
|
||||
if (options.inherited.single_threaded == false)
|
||||
return error.BackendRequiresSingleThreaded;
|
||||
break :b true;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.inherited.single_threaded) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.single_threaded;
|
||||
break :b target_util.defaultSingleThreaded(target);
|
||||
};
|
||||
|
||||
const error_tracing = b: {
|
||||
if (options.inherited.error_tracing) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.error_tracing;
|
||||
if (strip) break :b false;
|
||||
break :b switch (optimize_mode) {
|
||||
.Debug => true,
|
||||
.ReleaseSafe, .ReleaseFast, .ReleaseSmall => false,
|
||||
};
|
||||
};
|
||||
|
||||
const pic = b: {
|
||||
if (target_util.requiresPIC(target, options.global.link_libc)) {
|
||||
if (options.inherited.pic == false)
|
||||
return error.TargetRequiresPic;
|
||||
break :b true;
|
||||
}
|
||||
if (options.global.pie) {
|
||||
if (options.inherited.pic == false)
|
||||
return error.PieRequiresPic;
|
||||
break :b true;
|
||||
}
|
||||
if (options.global.link_mode == .Dynamic) {
|
||||
if (options.inherited.pic == false)
|
||||
return error.DynamicLinkingRequiresPic;
|
||||
break :b true;
|
||||
}
|
||||
if (options.inherited.pic) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.pic;
|
||||
break :b false;
|
||||
};
|
||||
|
||||
const red_zone = b: {
|
||||
if (!target_util.hasRedZone(target)) {
|
||||
if (options.inherited.red_zone == true)
|
||||
return error.TargetHasNoRedZone;
|
||||
break :b true;
|
||||
}
|
||||
if (options.inherited.red_zone) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.red_zone;
|
||||
break :b true;
|
||||
};
|
||||
|
||||
const omit_frame_pointer = b: {
|
||||
if (options.inherited.omit_frame_pointer) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.omit_frame_pointer;
|
||||
if (optimize_mode == .Debug) break :b false;
|
||||
break :b true;
|
||||
};
|
||||
|
||||
const sanitize_thread = b: {
|
||||
if (options.inherited.sanitize_thread) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.sanitize_thread;
|
||||
break :b false;
|
||||
};
|
||||
|
||||
const code_model = b: {
|
||||
if (options.inherited.code_model) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.code_model;
|
||||
break :b .default;
|
||||
};
|
||||
|
||||
const is_safe_mode = switch (optimize_mode) {
|
||||
.Debug, .ReleaseSafe => true,
|
||||
.ReleaseFast, .ReleaseSmall => false,
|
||||
};
|
||||
|
||||
const sanitize_c = b: {
|
||||
if (options.inherited.sanitize_c) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.sanitize_c;
|
||||
break :b is_safe_mode;
|
||||
};
|
||||
|
||||
const stack_check = b: {
|
||||
if (!target_util.supportsStackProbing(target)) {
|
||||
if (options.inherited.stack_check == true)
|
||||
return error.StackCheckUnsupportedByTarget;
|
||||
break :b false;
|
||||
}
|
||||
if (options.inherited.stack_check) |x| break :b x;
|
||||
if (options.parent) |p| break :b p.stack_check;
|
||||
break :b is_safe_mode;
|
||||
};
|
||||
|
||||
const stack_protector: u32 = sp: {
|
||||
if (!target_util.supportsStackProtector(target, zig_backend)) {
|
||||
if (options.inherited.stack_protector) |x| {
|
||||
if (x > 0) return error.StackProtectorUnsupportedByTarget;
|
||||
}
|
||||
break :sp 0;
|
||||
}
|
||||
|
||||
// This logic is checking for linking libc because otherwise our start code
|
||||
// which is trying to set up TLS (i.e. the fs/gs registers) but the stack
|
||||
// protection code depends on fs/gs registers being already set up.
|
||||
// If we were able to annotate start code, or perhaps the entire std lib,
|
||||
// as being exempt from stack protection checks, we could change this logic
|
||||
// to supporting stack protection even when not linking libc.
|
||||
// TODO file issue about this
|
||||
if (!options.global.link_libc) {
|
||||
if (options.inherited.stack_protector) |x| {
|
||||
if (x > 0) return error.StackProtectorUnavailableWithoutLibC;
|
||||
}
|
||||
break :sp 0;
|
||||
}
|
||||
|
||||
if (options.inherited.stack_protector) |x| break :sp x;
|
||||
if (options.parent) |p| break :sp p.stack_protector;
|
||||
if (!is_safe_mode) break :sp 0;
|
||||
|
||||
break :sp target_util.default_stack_protector_buffer_size;
|
||||
};
|
||||
|
||||
const llvm_cpu_features: ?[*:0]const u8 = b: {
|
||||
if (resolved_target.llvm_cpu_features) |x| break :b x;
|
||||
if (!options.global.use_llvm) break :b null;
|
||||
|
||||
var buf = std.ArrayList(u8).init(arena);
|
||||
for (target.cpu.arch.allFeaturesList(), 0..) |feature, index_usize| {
|
||||
const index = @as(std.Target.Cpu.Feature.Set.Index, @intCast(index_usize));
|
||||
const is_enabled = target.cpu.features.isEnabled(index);
|
||||
|
||||
if (feature.llvm_name) |llvm_name| {
|
||||
const plus_or_minus = "-+"[@intFromBool(is_enabled)];
|
||||
try buf.ensureUnusedCapacity(2 + llvm_name.len);
|
||||
buf.appendAssumeCapacity(plus_or_minus);
|
||||
buf.appendSliceAssumeCapacity(llvm_name);
|
||||
buf.appendSliceAssumeCapacity(",");
|
||||
}
|
||||
}
|
||||
if (buf.items.len == 0) break :b "";
|
||||
assert(std.mem.endsWith(u8, buf.items, ","));
|
||||
buf.items[buf.items.len - 1] = 0;
|
||||
buf.shrinkAndFree(buf.items.len);
|
||||
break :b buf.items[0 .. buf.items.len - 1 :0].ptr;
|
||||
};
|
||||
|
||||
const builtin_mod = options.builtin_mod orelse b: {
|
||||
const generated_builtin_source = try Builtin.generate(.{
|
||||
.target = target,
|
||||
.zig_backend = zig_backend,
|
||||
.output_mode = options.global.output_mode,
|
||||
.link_mode = options.global.link_mode,
|
||||
.is_test = options.global.is_test,
|
||||
.test_evented_io = options.global.test_evented_io,
|
||||
.single_threaded = single_threaded,
|
||||
.link_libc = options.global.link_libc,
|
||||
.link_libcpp = options.global.link_libcpp,
|
||||
.optimize_mode = optimize_mode,
|
||||
.error_tracing = error_tracing,
|
||||
.valgrind = valgrind,
|
||||
.sanitize_thread = sanitize_thread,
|
||||
.pic = pic,
|
||||
.pie = options.global.pie,
|
||||
.strip = strip,
|
||||
.code_model = code_model,
|
||||
.omit_frame_pointer = omit_frame_pointer,
|
||||
.wasi_exec_model = options.global.wasi_exec_model,
|
||||
}, arena);
|
||||
|
||||
const digest = Cache.HashHelper.oneShot(generated_builtin_source);
|
||||
const builtin_sub_path = try arena.dupe(u8, "b" ++ std.fs.path.sep_str ++ digest);
|
||||
const new = try arena.create(Module);
|
||||
new.* = .{
|
||||
.root = .{
|
||||
.root_dir = options.global_cache_directory,
|
||||
.sub_path = builtin_sub_path,
|
||||
},
|
||||
.root_src_path = "builtin.zig",
|
||||
.fully_qualified_name = if (options.parent == null)
|
||||
"builtin"
|
||||
else
|
||||
try std.fmt.allocPrint(arena, "{s}.builtin", .{options.fully_qualified_name}),
|
||||
.resolved_target = .{
|
||||
.result = target,
|
||||
.is_native_os = resolved_target.is_native_os,
|
||||
.is_native_abi = resolved_target.is_native_abi,
|
||||
.llvm_cpu_features = llvm_cpu_features,
|
||||
},
|
||||
.optimize_mode = optimize_mode,
|
||||
.single_threaded = single_threaded,
|
||||
.error_tracing = error_tracing,
|
||||
.valgrind = valgrind,
|
||||
.pic = pic,
|
||||
.strip = strip,
|
||||
.omit_frame_pointer = omit_frame_pointer,
|
||||
.stack_check = stack_check,
|
||||
.stack_protector = stack_protector,
|
||||
.code_model = code_model,
|
||||
.red_zone = red_zone,
|
||||
.generated_builtin_source = generated_builtin_source,
|
||||
.sanitize_c = sanitize_c,
|
||||
.sanitize_thread = sanitize_thread,
|
||||
.unwind_tables = unwind_tables,
|
||||
.cc_argv = &.{},
|
||||
};
|
||||
break :b new;
|
||||
};
|
||||
|
||||
const mod = try arena.create(Module);
|
||||
mod.* = .{
|
||||
.root = options.paths.root,
|
||||
.root_src_path = options.paths.root_src_path,
|
||||
.fully_qualified_name = options.fully_qualified_name,
|
||||
.resolved_target = .{
|
||||
.result = target,
|
||||
.is_native_os = resolved_target.is_native_os,
|
||||
.is_native_abi = resolved_target.is_native_abi,
|
||||
.llvm_cpu_features = llvm_cpu_features,
|
||||
},
|
||||
.optimize_mode = optimize_mode,
|
||||
.single_threaded = single_threaded,
|
||||
.error_tracing = error_tracing,
|
||||
.valgrind = valgrind,
|
||||
.pic = pic,
|
||||
.strip = strip,
|
||||
.omit_frame_pointer = omit_frame_pointer,
|
||||
.stack_check = stack_check,
|
||||
.stack_protector = stack_protector,
|
||||
.code_model = code_model,
|
||||
.red_zone = red_zone,
|
||||
.generated_builtin_source = builtin_mod.generated_builtin_source,
|
||||
.sanitize_c = sanitize_c,
|
||||
.sanitize_thread = sanitize_thread,
|
||||
.unwind_tables = unwind_tables,
|
||||
.cc_argv = options.cc_argv,
|
||||
};
|
||||
|
||||
try mod.deps.ensureUnusedCapacity(arena, 1);
|
||||
mod.deps.putAssumeCapacityNoClobber("builtin", builtin_mod);
|
||||
|
||||
return mod;
|
||||
}
|
||||
|
||||
/// All fields correspond to `CreateOptions`.
|
||||
pub const LimitedOptions = struct {
|
||||
root: Package.Path,
|
||||
root_src_path: []const u8,
|
||||
fully_qualified_name: []const u8,
|
||||
};
|
||||
|
||||
/// This one can only be used if the Module will only be used for AstGen and earlier in
|
||||
/// the pipeline. Illegal behavior occurs if a limited module touches Sema.
|
||||
pub fn createLimited(gpa: Allocator, options: LimitedOptions) Allocator.Error!*Package.Module {
|
||||
const mod = try gpa.create(Module);
|
||||
mod.* = .{
|
||||
.root = options.root,
|
||||
.root_src_path = options.root_src_path,
|
||||
.fully_qualified_name = options.fully_qualified_name,
|
||||
|
||||
.resolved_target = undefined,
|
||||
.optimize_mode = undefined,
|
||||
.code_model = undefined,
|
||||
.single_threaded = undefined,
|
||||
.error_tracing = undefined,
|
||||
.valgrind = undefined,
|
||||
.pic = undefined,
|
||||
.strip = undefined,
|
||||
.omit_frame_pointer = undefined,
|
||||
.stack_check = undefined,
|
||||
.stack_protector = undefined,
|
||||
.red_zone = undefined,
|
||||
.sanitize_c = undefined,
|
||||
.sanitize_thread = undefined,
|
||||
.unwind_tables = undefined,
|
||||
.cc_argv = undefined,
|
||||
.generated_builtin_source = undefined,
|
||||
};
|
||||
return mod;
|
||||
}
|
||||
|
||||
pub fn getBuiltinDependency(m: *Module) *Module {
|
||||
return m.deps.values()[0];
|
||||
}
|
||||
|
||||
const Module = @This();
|
||||
|
|
@ -32,3 +424,8 @@ const Package = @import("../Package.zig");
|
|||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const MultiHashHexDigest = Package.Manifest.MultiHashHexDigest;
|
||||
const target_util = @import("../target.zig");
|
||||
const Cache = std.Build.Cache;
|
||||
const Builtin = @import("../Builtin.zig");
|
||||
const assert = std.debug.assert;
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
|
|
|
|||
|
|
@ -36668,7 +36668,7 @@ fn getBuiltinDecl(sema: *Sema, block: *Block, name: []const u8) CompileError!Int
|
|||
|
||||
const mod = sema.mod;
|
||||
const ip = &mod.intern_pool;
|
||||
const std_mod = mod.main_mod.deps.get("std").?;
|
||||
const std_mod = mod.std_mod;
|
||||
const std_file = (mod.importPkg(std_mod) catch unreachable).file;
|
||||
const opt_builtin_inst = (try sema.namespaceLookupRef(
|
||||
block,
|
||||
|
|
|
|||
|
|
@ -853,16 +853,9 @@ pub const Object = struct {
|
|||
/// want to iterate over it while adding entries to it.
|
||||
pub const DITypeMap = std.AutoArrayHashMapUnmanaged(InternPool.Index, AnnotatedDITypePtr);
|
||||
|
||||
pub fn create(gpa: Allocator, options: link.Options) !*Object {
|
||||
const obj = try gpa.create(Object);
|
||||
errdefer gpa.destroy(obj);
|
||||
obj.* = try Object.init(gpa, options);
|
||||
return obj;
|
||||
}
|
||||
|
||||
pub fn init(gpa: Allocator, options: link.Options) !Object {
|
||||
const llvm_target_triple = try targetTriple(gpa, options.target);
|
||||
defer gpa.free(llvm_target_triple);
|
||||
pub fn create(arena: Allocator, options: link.File.OpenOptions) !*Object {
|
||||
const gpa = options.comp.gpa;
|
||||
const llvm_target_triple = try targetTriple(arena, options.target);
|
||||
|
||||
var builder = try Builder.init(.{
|
||||
.allocator = gpa,
|
||||
|
|
@ -899,19 +892,14 @@ pub const Object = struct {
|
|||
// TODO: the only concern I have with this is WASI as either host or target, should
|
||||
// we leave the paths as relative then?
|
||||
const compile_unit_dir_z = blk: {
|
||||
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
if (options.module) |mod| m: {
|
||||
const d = try mod.root_mod.root.joinStringZ(builder.gpa, "");
|
||||
const d = try mod.root_mod.root.joinStringZ(arena, "");
|
||||
if (d.len == 0) break :m;
|
||||
if (std.fs.path.isAbsolute(d)) break :blk d;
|
||||
const abs = std.fs.realpath(d, &buf) catch break :blk d;
|
||||
builder.gpa.free(d);
|
||||
break :blk try builder.gpa.dupeZ(u8, abs);
|
||||
break :blk std.fs.realpathAlloc(arena, d) catch d;
|
||||
}
|
||||
const cwd = try std.process.getCwd(&buf);
|
||||
break :blk try builder.gpa.dupeZ(u8, cwd);
|
||||
break :blk try std.process.getCwdAlloc(arena);
|
||||
};
|
||||
defer builder.gpa.free(compile_unit_dir_z);
|
||||
|
||||
builder.llvm.di_compile_unit = builder.llvm.di_builder.?.createCompileUnit(
|
||||
DW.LANG.C99,
|
||||
|
|
@ -989,7 +977,8 @@ pub const Object = struct {
|
|||
}
|
||||
}
|
||||
|
||||
return .{
|
||||
const obj = try arena.create(Object);
|
||||
obj.* = .{
|
||||
.gpa = gpa,
|
||||
.builder = builder,
|
||||
.module = options.module.?,
|
||||
|
|
@ -1009,9 +998,11 @@ pub const Object = struct {
|
|||
.null_opt_usize = .no_init,
|
||||
.struct_field_map = .{},
|
||||
};
|
||||
return obj;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Object, gpa: Allocator) void {
|
||||
pub fn deinit(self: *Object) void {
|
||||
const gpa = self.gpa;
|
||||
self.di_map.deinit(gpa);
|
||||
self.di_type_map.deinit(gpa);
|
||||
if (self.builder.useLibLlvm()) {
|
||||
|
|
@ -1028,11 +1019,6 @@ pub const Object = struct {
|
|||
self.* = undefined;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Object, gpa: Allocator) void {
|
||||
self.deinit(gpa);
|
||||
gpa.destroy(self);
|
||||
}
|
||||
|
||||
fn locPath(
|
||||
arena: Allocator,
|
||||
opt_loc: ?Compilation.EmitLoc,
|
||||
|
|
@ -2899,7 +2885,7 @@ pub const Object = struct {
|
|||
fn getStackTraceType(o: *Object) Allocator.Error!Type {
|
||||
const mod = o.module;
|
||||
|
||||
const std_mod = mod.main_mod.deps.get("std").?;
|
||||
const std_mod = mod.std_mod;
|
||||
const std_file = (mod.importPkg(std_mod) catch unreachable).file;
|
||||
|
||||
const builtin_str = try mod.intern_pool.getOrPutString(mod.gpa, "builtin");
|
||||
|
|
|
|||
525
src/link.zig
525
src/link.zig
|
|
@ -66,237 +66,18 @@ pub fn hashAddFrameworks(man: *Cache.Manifest, hm: []const Framework) !void {
|
|||
|
||||
pub const producer_string = if (builtin.is_test) "zig test" else "zig " ++ build_options.version;
|
||||
|
||||
pub const Emit = struct {
|
||||
/// Where the output will go.
|
||||
directory: Compilation.Directory,
|
||||
/// Path to the output file, relative to `directory`.
|
||||
sub_path: []const u8,
|
||||
|
||||
/// Returns the full path to `basename` if it were in the same directory as the
|
||||
/// `Emit` sub_path.
|
||||
pub fn basenamePath(emit: Emit, arena: Allocator, basename: [:0]const u8) ![:0]const u8 {
|
||||
const full_path = if (emit.directory.path) |p|
|
||||
try fs.path.join(arena, &[_][]const u8{ p, emit.sub_path })
|
||||
else
|
||||
emit.sub_path;
|
||||
|
||||
if (fs.path.dirname(full_path)) |dirname| {
|
||||
return try fs.path.joinZ(arena, &.{ dirname, basename });
|
||||
} else {
|
||||
return basename;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Options = struct {
|
||||
/// This is `null` when `-fno-emit-bin` is used.
|
||||
emit: ?Emit,
|
||||
/// This is `null` when not building a Windows DLL, or when `-fno-emit-implib` is used.
|
||||
implib_emit: ?Emit,
|
||||
/// This is non-null when `-femit-docs` is provided.
|
||||
docs_emit: ?Emit,
|
||||
target: std.Target,
|
||||
output_mode: std.builtin.OutputMode,
|
||||
link_mode: std.builtin.LinkMode,
|
||||
optimize_mode: std.builtin.OptimizeMode,
|
||||
machine_code_model: std.builtin.CodeModel,
|
||||
root_name: [:0]const u8,
|
||||
/// Not every Compilation compiles .zig code! For example you could do `zig build-exe foo.o`.
|
||||
module: ?*Module,
|
||||
/// The root path for the dynamic linker and system libraries (as well as frameworks on Darwin)
|
||||
sysroot: ?[]const u8,
|
||||
/// Used for calculating how much space to reserve for symbols in case the binary file
|
||||
/// does not already have a symbol table.
|
||||
symbol_count_hint: u64 = 32,
|
||||
/// Used for calculating how much space to reserve for executable program code in case
|
||||
/// the binary file does not already have such a section.
|
||||
program_code_size_hint: u64 = 256 * 1024,
|
||||
entry_addr: ?u64 = null,
|
||||
entry: ?[]const u8,
|
||||
stack_size_override: ?u64,
|
||||
image_base_override: ?u64,
|
||||
/// 0 means no stack protector
|
||||
/// other value means stack protector with that buffer size.
|
||||
stack_protector: u32,
|
||||
cache_mode: CacheMode,
|
||||
include_compiler_rt: bool,
|
||||
/// Set to `true` to omit debug info.
|
||||
strip: bool,
|
||||
/// If this is true then this link code is responsible for outputting an object
|
||||
/// file and then using LLD to link it together with the link options and other objects.
|
||||
/// Otherwise (depending on `use_llvm`) this link code directly outputs and updates the final binary.
|
||||
use_lld: bool,
|
||||
/// If this is true then this link code is responsible for making an LLVM IR Module,
|
||||
/// outputting it to an object file, and then linking that together with link options and
|
||||
/// other objects.
|
||||
/// Otherwise (depending on `use_lld`) this link code directly outputs and updates the final binary.
|
||||
use_llvm: bool,
|
||||
use_lib_llvm: bool,
|
||||
link_libc: bool,
|
||||
link_libcpp: bool,
|
||||
link_libunwind: bool,
|
||||
darwin_sdk_layout: ?DarwinSdkLayout,
|
||||
function_sections: bool,
|
||||
data_sections: bool,
|
||||
no_builtin: bool,
|
||||
eh_frame_hdr: bool,
|
||||
emit_relocs: bool,
|
||||
rdynamic: bool,
|
||||
z_nodelete: bool,
|
||||
z_notext: bool,
|
||||
z_defs: bool,
|
||||
z_origin: bool,
|
||||
z_nocopyreloc: bool,
|
||||
z_now: bool,
|
||||
z_relro: bool,
|
||||
z_common_page_size: ?u64,
|
||||
z_max_page_size: ?u64,
|
||||
tsaware: bool,
|
||||
nxcompat: bool,
|
||||
dynamicbase: bool,
|
||||
linker_optimization: u8,
|
||||
compress_debug_sections: CompressDebugSections,
|
||||
bind_global_refs_locally: bool,
|
||||
import_memory: bool,
|
||||
export_memory: bool,
|
||||
import_symbols: bool,
|
||||
import_table: bool,
|
||||
export_table: bool,
|
||||
initial_memory: ?u64,
|
||||
max_memory: ?u64,
|
||||
shared_memory: bool,
|
||||
export_symbol_names: []const []const u8,
|
||||
global_base: ?u64,
|
||||
is_native_os: bool,
|
||||
is_native_abi: bool,
|
||||
pic: bool,
|
||||
pie: bool,
|
||||
lto: bool,
|
||||
valgrind: bool,
|
||||
tsan: bool,
|
||||
stack_check: bool,
|
||||
red_zone: bool,
|
||||
omit_frame_pointer: bool,
|
||||
single_threaded: bool,
|
||||
verbose_link: bool,
|
||||
dll_export_fns: bool,
|
||||
error_return_tracing: bool,
|
||||
skip_linker_dependencies: bool,
|
||||
each_lib_rpath: bool,
|
||||
build_id: std.zig.BuildId,
|
||||
disable_lld_caching: bool,
|
||||
is_test: bool,
|
||||
hash_style: HashStyle,
|
||||
sort_section: ?SortSection,
|
||||
major_subsystem_version: ?u32,
|
||||
minor_subsystem_version: ?u32,
|
||||
gc_sections: ?bool = null,
|
||||
allow_shlib_undefined: ?bool,
|
||||
subsystem: ?std.Target.SubSystem,
|
||||
linker_script: ?[]const u8,
|
||||
version_script: ?[]const u8,
|
||||
soname: ?[]const u8,
|
||||
llvm_cpu_features: ?[*:0]const u8,
|
||||
print_gc_sections: bool,
|
||||
print_icf_sections: bool,
|
||||
print_map: bool,
|
||||
opt_bisect_limit: i32,
|
||||
|
||||
objects: []Compilation.LinkObject,
|
||||
framework_dirs: []const []const u8,
|
||||
frameworks: []const Framework,
|
||||
/// These are *always* dynamically linked. Static libraries will be
|
||||
/// provided as positional arguments.
|
||||
system_libs: std.StringArrayHashMapUnmanaged(SystemLib),
|
||||
wasi_emulated_libs: []const wasi_libc.CRTFile,
|
||||
// TODO: remove this. libraries are resolved by the frontend.
|
||||
lib_dirs: []const []const u8,
|
||||
rpath_list: []const []const u8,
|
||||
|
||||
/// List of symbols forced as undefined in the symbol table
|
||||
/// thus forcing their resolution by the linker.
|
||||
/// Corresponds to `-u <symbol>` for ELF/MachO and `/include:<symbol>` for COFF/PE.
|
||||
force_undefined_symbols: std.StringArrayHashMapUnmanaged(void),
|
||||
/// Use a wrapper function for symbol. Any undefined reference to symbol
|
||||
/// will be resolved to __wrap_symbol. Any undefined reference to
|
||||
/// __real_symbol will be resolved to symbol. This can be used to provide a
|
||||
/// wrapper for a system function. The wrapper function should be called
|
||||
/// __wrap_symbol. If it wishes to call the system function, it should call
|
||||
/// __real_symbol.
|
||||
symbol_wrap_set: std.StringArrayHashMapUnmanaged(void),
|
||||
|
||||
version: ?std.SemanticVersion,
|
||||
compatibility_version: ?std.SemanticVersion,
|
||||
libc_installation: ?*const LibCInstallation,
|
||||
|
||||
dwarf_format: ?std.dwarf.Format,
|
||||
|
||||
/// WASI-only. Type of WASI execution model ("command" or "reactor").
|
||||
wasi_exec_model: std.builtin.WasiExecModel = undefined,
|
||||
|
||||
/// (Zig compiler development) Enable dumping of linker's state as JSON.
|
||||
enable_link_snapshots: bool = false,
|
||||
|
||||
/// (Darwin) Install name for the dylib
|
||||
install_name: ?[]const u8 = null,
|
||||
|
||||
/// (Darwin) Path to entitlements file
|
||||
entitlements: ?[]const u8 = null,
|
||||
|
||||
/// (Darwin) size of the __PAGEZERO segment
|
||||
pagezero_size: ?u64 = null,
|
||||
|
||||
/// (Darwin) set minimum space for future expansion of the load commands
|
||||
headerpad_size: ?u32 = null,
|
||||
|
||||
/// (Darwin) set enough space as if all paths were MATPATHLEN
|
||||
headerpad_max_install_names: bool = false,
|
||||
|
||||
/// (Darwin) remove dylibs that are unreachable by the entry point or exported symbols
|
||||
dead_strip_dylibs: bool = false,
|
||||
|
||||
/// (Windows) PDB source path prefix to instruct the linker how to resolve relative
|
||||
/// paths when consolidating CodeView streams into a single PDB file.
|
||||
pdb_source_path: ?[]const u8 = null,
|
||||
|
||||
/// (Windows) PDB output path
|
||||
pdb_out_path: ?[]const u8 = null,
|
||||
|
||||
/// (Windows) .def file to specify when linking
|
||||
module_definition_file: ?[]const u8 = null,
|
||||
|
||||
/// (SPIR-V) whether to generate a structured control flow graph or not
|
||||
want_structured_cfg: ?bool = null,
|
||||
|
||||
pub fn effectiveOutputMode(options: Options) std.builtin.OutputMode {
|
||||
return if (options.use_lld) .Obj else options.output_mode;
|
||||
}
|
||||
|
||||
pub fn move(self: *Options) Options {
|
||||
const copied_state = self.*;
|
||||
self.system_libs = .{};
|
||||
self.force_undefined_symbols = .{};
|
||||
return copied_state;
|
||||
}
|
||||
};
|
||||
|
||||
pub const HashStyle = enum { sysv, gnu, both };
|
||||
|
||||
pub const CompressDebugSections = enum { none, zlib, zstd };
|
||||
|
||||
/// The filesystem layout of darwin SDK elements.
|
||||
pub const DarwinSdkLayout = enum {
|
||||
/// macOS SDK layout: TOP { /usr/include, /usr/lib, /System/Library/Frameworks }.
|
||||
sdk,
|
||||
/// Shipped libc layout: TOP { /lib/libc/include, /lib/libc/darwin, <NONE> }.
|
||||
vendored,
|
||||
};
|
||||
|
||||
pub const File = struct {
|
||||
tag: Tag,
|
||||
options: Options,
|
||||
|
||||
/// The owner of this output File.
|
||||
comp: *Compilation,
|
||||
emit: Compilation.Emit,
|
||||
|
||||
file: ?fs.File,
|
||||
allocator: Allocator,
|
||||
/// When linking with LLD, this linker code will output an object file only at
|
||||
/// this location, and then this path can be placed on the LLD linker line.
|
||||
intermediary_basename: ?[]const u8 = null,
|
||||
|
|
@ -307,103 +88,132 @@ pub const File = struct {
|
|||
|
||||
child_pid: ?std.ChildProcess.Id = null,
|
||||
|
||||
pub const OpenOptions = struct {
|
||||
comp: *Compilation,
|
||||
emit: Compilation.Emit,
|
||||
|
||||
symbol_count_hint: u64 = 32,
|
||||
program_code_size_hint: u64 = 256 * 1024,
|
||||
|
||||
/// Virtual address of the entry point procedure relative to image base.
|
||||
entry_addr: ?u64,
|
||||
stack_size_override: ?u64,
|
||||
image_base_override: ?u64,
|
||||
function_sections: bool,
|
||||
data_sections: bool,
|
||||
no_builtin: bool,
|
||||
eh_frame_hdr: bool,
|
||||
emit_relocs: bool,
|
||||
rdynamic: bool,
|
||||
optimization: u8,
|
||||
linker_script: ?[]const u8,
|
||||
z_nodelete: bool,
|
||||
z_notext: bool,
|
||||
z_defs: bool,
|
||||
z_origin: bool,
|
||||
z_nocopyreloc: bool,
|
||||
z_now: bool,
|
||||
z_relro: bool,
|
||||
z_common_page_size: ?u64,
|
||||
z_max_page_size: ?u64,
|
||||
tsaware: bool,
|
||||
nxcompat: bool,
|
||||
dynamicbase: bool,
|
||||
compress_debug_sections: CompressDebugSections,
|
||||
bind_global_refs_locally: bool,
|
||||
import_symbols: bool,
|
||||
import_table: bool,
|
||||
export_table: bool,
|
||||
initial_memory: ?u64,
|
||||
max_memory: ?u64,
|
||||
export_symbol_names: []const []const u8,
|
||||
global_base: ?u64,
|
||||
verbose_link: bool,
|
||||
dll_export_fns: bool,
|
||||
skip_linker_dependencies: bool,
|
||||
parent_compilation_link_libc: bool,
|
||||
each_lib_rpath: bool,
|
||||
build_id: std.zig.BuildId,
|
||||
disable_lld_caching: bool,
|
||||
hash_style: HashStyle,
|
||||
sort_section: ?SortSection,
|
||||
major_subsystem_version: ?u32,
|
||||
minor_subsystem_version: ?u32,
|
||||
gc_sections: ?bool = null,
|
||||
allow_shlib_undefined: ?bool,
|
||||
subsystem: ?std.Target.SubSystem,
|
||||
version_script: ?[]const u8,
|
||||
soname: ?[]const u8,
|
||||
print_gc_sections: bool,
|
||||
print_icf_sections: bool,
|
||||
print_map: bool,
|
||||
opt_bisect_limit: i32,
|
||||
|
||||
/// List of symbols forced as undefined in the symbol table
|
||||
/// thus forcing their resolution by the linker.
|
||||
/// Corresponds to `-u <symbol>` for ELF/MachO and `/include:<symbol>` for COFF/PE.
|
||||
force_undefined_symbols: std.StringArrayHashMapUnmanaged(void),
|
||||
/// Use a wrapper function for symbol. Any undefined reference to symbol
|
||||
/// will be resolved to __wrap_symbol. Any undefined reference to
|
||||
/// __real_symbol will be resolved to symbol. This can be used to provide a
|
||||
/// wrapper for a system function. The wrapper function should be called
|
||||
/// __wrap_symbol. If it wishes to call the system function, it should call
|
||||
/// __real_symbol.
|
||||
symbol_wrap_set: std.StringArrayHashMapUnmanaged(void),
|
||||
|
||||
compatibility_version: ?std.SemanticVersion,
|
||||
|
||||
dwarf_format: ?std.dwarf.Format,
|
||||
|
||||
// TODO: remove this. libraries are resolved by the frontend.
|
||||
lib_dirs: []const []const u8,
|
||||
rpath_list: []const []const u8,
|
||||
|
||||
/// (Zig compiler development) Enable dumping of linker's state as JSON.
|
||||
enable_link_snapshots: bool,
|
||||
|
||||
/// (Darwin) Install name for the dylib
|
||||
install_name: ?[]const u8,
|
||||
/// (Darwin) Path to entitlements file
|
||||
entitlements: ?[]const u8,
|
||||
/// (Darwin) size of the __PAGEZERO segment
|
||||
pagezero_size: ?u64,
|
||||
/// (Darwin) set minimum space for future expansion of the load commands
|
||||
headerpad_size: ?u32,
|
||||
/// (Darwin) set enough space as if all paths were MATPATHLEN
|
||||
headerpad_max_install_names: bool,
|
||||
/// (Darwin) remove dylibs that are unreachable by the entry point or exported symbols
|
||||
dead_strip_dylibs: bool,
|
||||
framework_dirs: []const []const u8,
|
||||
frameworks: []const Framework,
|
||||
darwin_sdk_layout: ?MachO.SdkLayout,
|
||||
|
||||
/// (Windows) PDB source path prefix to instruct the linker how to resolve relative
|
||||
/// paths when consolidating CodeView streams into a single PDB file.
|
||||
pdb_source_path: ?[]const u8,
|
||||
/// (Windows) PDB output path
|
||||
pdb_out_path: ?[]const u8,
|
||||
/// (Windows) .def file to specify when linking
|
||||
module_definition_file: ?[]const u8,
|
||||
|
||||
/// (SPIR-V) whether to generate a structured control flow graph or not
|
||||
want_structured_cfg: ?bool,
|
||||
|
||||
wasi_emulated_libs: []const wasi_libc.CRTFile,
|
||||
};
|
||||
|
||||
/// Attempts incremental linking, if the file already exists. If
|
||||
/// incremental linking fails, falls back to truncating the file and
|
||||
/// rewriting it. A malicious file is detected as incremental link failure
|
||||
/// and does not cause Illegal Behavior. This operation is not atomic.
|
||||
pub fn openPath(allocator: Allocator, options: Options) !*File {
|
||||
const have_macho = !build_options.only_c;
|
||||
if (have_macho and options.target.ofmt == .macho) {
|
||||
return &(try MachO.openPath(allocator, options)).base;
|
||||
/// `arena` is used for allocations with the same lifetime as the created File.
|
||||
pub fn open(arena: Allocator, options: OpenOptions) !*File {
|
||||
switch (Tag.fromObjectFormat(options.comp.root_mod.resolved_target.result.ofmt)) {
|
||||
inline else => |tag| {
|
||||
const ptr = try tag.Type().open(arena, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
}
|
||||
|
||||
if (options.emit == null) {
|
||||
return switch (options.target.ofmt) {
|
||||
.coff => &(try Coff.createEmpty(allocator, options)).base,
|
||||
.elf => &(try Elf.createEmpty(allocator, options)).base,
|
||||
.macho => unreachable,
|
||||
.wasm => &(try Wasm.createEmpty(allocator, options)).base,
|
||||
.plan9 => return &(try Plan9.createEmpty(allocator, options)).base,
|
||||
.c => unreachable, // Reported error earlier.
|
||||
.spirv => &(try SpirV.createEmpty(allocator, options)).base,
|
||||
.nvptx => &(try NvPtx.createEmpty(allocator, options)).base,
|
||||
.hex => return error.HexObjectFormatUnimplemented,
|
||||
.raw => return error.RawObjectFormatUnimplemented,
|
||||
.dxcontainer => return error.DirectXContainerObjectFormatUnimplemented,
|
||||
};
|
||||
}
|
||||
const emit = options.emit.?;
|
||||
const use_lld = build_options.have_llvm and options.use_lld; // comptime-known false when !have_llvm
|
||||
const sub_path = if (use_lld) blk: {
|
||||
if (options.module == null) {
|
||||
// No point in opening a file, we would not write anything to it.
|
||||
// Initialize with empty.
|
||||
return switch (options.target.ofmt) {
|
||||
.coff => &(try Coff.createEmpty(allocator, options)).base,
|
||||
.elf => &(try Elf.createEmpty(allocator, options)).base,
|
||||
.macho => unreachable,
|
||||
.plan9 => &(try Plan9.createEmpty(allocator, options)).base,
|
||||
.wasm => &(try Wasm.createEmpty(allocator, options)).base,
|
||||
.c => unreachable, // Reported error earlier.
|
||||
.spirv => &(try SpirV.createEmpty(allocator, options)).base,
|
||||
.nvptx => &(try NvPtx.createEmpty(allocator, options)).base,
|
||||
.hex => return error.HexObjectFormatUnimplemented,
|
||||
.raw => return error.RawObjectFormatUnimplemented,
|
||||
.dxcontainer => return error.DirectXContainerObjectFormatUnimplemented,
|
||||
};
|
||||
}
|
||||
// Open a temporary object file, not the final output file because we
|
||||
// want to link with LLD.
|
||||
break :blk try std.fmt.allocPrint(allocator, "{s}{s}", .{
|
||||
emit.sub_path, options.target.ofmt.fileExt(options.target.cpu.arch),
|
||||
});
|
||||
} else emit.sub_path;
|
||||
errdefer if (use_lld) allocator.free(sub_path);
|
||||
|
||||
const file: *File = f: {
|
||||
switch (options.target.ofmt) {
|
||||
.coff => {
|
||||
if (build_options.only_c) unreachable;
|
||||
break :f &(try Coff.openPath(allocator, sub_path, options)).base;
|
||||
},
|
||||
.elf => {
|
||||
if (build_options.only_c) unreachable;
|
||||
break :f &(try Elf.openPath(allocator, sub_path, options)).base;
|
||||
},
|
||||
.macho => unreachable,
|
||||
.plan9 => {
|
||||
if (build_options.only_c) unreachable;
|
||||
break :f &(try Plan9.openPath(allocator, sub_path, options)).base;
|
||||
},
|
||||
.wasm => {
|
||||
if (build_options.only_c) unreachable;
|
||||
break :f &(try Wasm.openPath(allocator, sub_path, options)).base;
|
||||
},
|
||||
.c => {
|
||||
break :f &(try C.openPath(allocator, sub_path, options)).base;
|
||||
},
|
||||
.spirv => {
|
||||
if (build_options.only_c) unreachable;
|
||||
break :f &(try SpirV.openPath(allocator, sub_path, options)).base;
|
||||
},
|
||||
.nvptx => {
|
||||
if (build_options.only_c) unreachable;
|
||||
break :f &(try NvPtx.openPath(allocator, sub_path, options)).base;
|
||||
},
|
||||
.hex => return error.HexObjectFormatUnimplemented,
|
||||
.raw => return error.RawObjectFormatUnimplemented,
|
||||
.dxcontainer => return error.DirectXContainerObjectFormatUnimplemented,
|
||||
}
|
||||
};
|
||||
|
||||
if (use_lld) {
|
||||
// TODO this intermediary_basename isn't enough; in the case of `zig build-exe`,
|
||||
// we also want to put the intermediary object file in the cache while the
|
||||
// main emit directory is the cwd.
|
||||
file.intermediary_basename = sub_path;
|
||||
}
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
pub fn cast(base: *File, comptime T: type) ?*T {
|
||||
|
|
@ -664,56 +474,45 @@ pub const File = struct {
|
|||
pub fn destroy(base: *File) void {
|
||||
base.releaseLock();
|
||||
if (base.file) |f| f.close();
|
||||
if (base.intermediary_basename) |sub_path| base.allocator.free(sub_path);
|
||||
base.options.system_libs.deinit(base.allocator);
|
||||
base.options.force_undefined_symbols.deinit(base.allocator);
|
||||
switch (base.tag) {
|
||||
.coff => {
|
||||
if (build_options.only_c) unreachable;
|
||||
const parent = @fieldParentPtr(Coff, "base", base);
|
||||
parent.deinit();
|
||||
base.allocator.destroy(parent);
|
||||
},
|
||||
.elf => {
|
||||
if (build_options.only_c) unreachable;
|
||||
const parent = @fieldParentPtr(Elf, "base", base);
|
||||
parent.deinit();
|
||||
base.allocator.destroy(parent);
|
||||
},
|
||||
.macho => {
|
||||
if (build_options.only_c) unreachable;
|
||||
const parent = @fieldParentPtr(MachO, "base", base);
|
||||
parent.deinit();
|
||||
base.allocator.destroy(parent);
|
||||
},
|
||||
.c => {
|
||||
const parent = @fieldParentPtr(C, "base", base);
|
||||
parent.deinit();
|
||||
base.allocator.destroy(parent);
|
||||
},
|
||||
.wasm => {
|
||||
if (build_options.only_c) unreachable;
|
||||
const parent = @fieldParentPtr(Wasm, "base", base);
|
||||
parent.deinit();
|
||||
base.allocator.destroy(parent);
|
||||
},
|
||||
.spirv => {
|
||||
if (build_options.only_c) unreachable;
|
||||
const parent = @fieldParentPtr(SpirV, "base", base);
|
||||
parent.deinit();
|
||||
base.allocator.destroy(parent);
|
||||
},
|
||||
.plan9 => {
|
||||
if (build_options.only_c) unreachable;
|
||||
const parent = @fieldParentPtr(Plan9, "base", base);
|
||||
parent.deinit();
|
||||
base.allocator.destroy(parent);
|
||||
},
|
||||
.nvptx => {
|
||||
if (build_options.only_c) unreachable;
|
||||
const parent = @fieldParentPtr(NvPtx, "base", base);
|
||||
parent.deinit();
|
||||
base.allocator.destroy(parent);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
@ -1197,6 +996,35 @@ pub const File = struct {
|
|||
spirv,
|
||||
plan9,
|
||||
nvptx,
|
||||
|
||||
pub fn Type(comptime tag: Tag) type {
|
||||
return switch (tag) {
|
||||
.coff => Coff,
|
||||
.elf => Elf,
|
||||
.macho => MachO,
|
||||
.c => C,
|
||||
.wasm => Wasm,
|
||||
.spirv => SpirV,
|
||||
.plan9 => Plan9,
|
||||
.nvptx => NvPtx,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fromObjectFormat(ofmt: std.Target.ObjectFormat) Tag {
|
||||
return switch (ofmt) {
|
||||
.coff => .coff,
|
||||
.elf => .elf,
|
||||
.macho => .macho,
|
||||
.wasm => .wasm,
|
||||
.plan9 => .plan9,
|
||||
.c => .c,
|
||||
.spirv => .spirv,
|
||||
.nvptx => .nvptx,
|
||||
.hex => @panic("TODO implement hex object format"),
|
||||
.raw => @panic("TODO implement raw object format"),
|
||||
.dxcontainer => @panic("TODO implement dxcontainer object format"),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const ErrorFlags = struct {
|
||||
|
|
@ -1235,6 +1063,33 @@ pub const File = struct {
|
|||
}
|
||||
};
|
||||
|
||||
pub fn effectiveOutputMode(
|
||||
use_lld: bool,
|
||||
output_mode: std.builtin.OutputMode,
|
||||
) std.builtin.OutputMode {
|
||||
return if (use_lld) .Obj else output_mode;
|
||||
}
|
||||
|
||||
pub fn determineMode(
|
||||
use_lld: bool,
|
||||
output_mode: std.builtin.OutputMode,
|
||||
link_mode: std.builtin.LinkMode,
|
||||
) fs.File.Mode {
|
||||
// On common systems with a 0o022 umask, 0o777 will still result in a file created
|
||||
// with 0o755 permissions, but it works appropriately if the system is configured
|
||||
// more leniently. As another data point, C's fopen seems to open files with the
|
||||
// 666 mode.
|
||||
const executable_mode = if (builtin.target.os.tag == .windows) 0 else 0o777;
|
||||
switch (effectiveOutputMode(use_lld, output_mode)) {
|
||||
.Lib => return switch (link_mode) {
|
||||
.Dynamic => executable_mode,
|
||||
.Static => fs.File.default_mode,
|
||||
},
|
||||
.Exe => return executable_mode,
|
||||
.Obj => return fs.File.default_mode,
|
||||
}
|
||||
}
|
||||
|
||||
pub const C = @import("link/C.zig");
|
||||
pub const Coff = @import("link/Coff.zig");
|
||||
pub const Plan9 = @import("link/Plan9.zig");
|
||||
|
|
@ -1245,19 +1100,3 @@ pub const File = struct {
|
|||
pub const NvPtx = @import("link/NvPtx.zig");
|
||||
pub const Dwarf = @import("link/Dwarf.zig");
|
||||
};
|
||||
|
||||
pub fn determineMode(options: Options) fs.File.Mode {
|
||||
// On common systems with a 0o022 umask, 0o777 will still result in a file created
|
||||
// with 0o755 permissions, but it works appropriately if the system is configured
|
||||
// more leniently. As another data point, C's fopen seems to open files with the
|
||||
// 666 mode.
|
||||
const executable_mode = if (builtin.target.os.tag == .windows) 0 else 0o777;
|
||||
switch (options.effectiveOutputMode()) {
|
||||
.Lib => return switch (options.link_mode) {
|
||||
.Dynamic => executable_mode,
|
||||
.Static => fs.File.default_mode,
|
||||
},
|
||||
.Exe => return executable_mode,
|
||||
.Obj => return fs.File.default_mode,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -48,9 +48,6 @@ got_table_count_dirty: bool = true,
|
|||
got_table_contents_dirty: bool = true,
|
||||
imports_count_dirty: bool = true,
|
||||
|
||||
/// Virtual address of the entry point procedure relative to image base.
|
||||
entry_addr: ?u32 = null,
|
||||
|
||||
/// Table of tracked LazySymbols.
|
||||
lazy_syms: LazySymbolTable = .{},
|
||||
|
||||
|
|
@ -226,44 +223,150 @@ const ideal_factor = 3;
|
|||
const minimum_text_block_size = 64;
|
||||
pub const min_text_capacity = padToIdeal(minimum_text_block_size);
|
||||
|
||||
pub fn openPath(allocator: Allocator, sub_path: []const u8, options: link.Options) !*Coff {
|
||||
assert(options.target.ofmt == .coff);
|
||||
pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Coff {
|
||||
if (build_options.only_c) unreachable;
|
||||
const target = options.comp.root_mod.resolved_target.result;
|
||||
assert(target.ofmt == .coff);
|
||||
|
||||
if (options.use_llvm) {
|
||||
return createEmpty(allocator, options);
|
||||
}
|
||||
|
||||
const self = try createEmpty(allocator, options);
|
||||
const self = try createEmpty(arena, options);
|
||||
errdefer self.base.destroy();
|
||||
|
||||
const file = try options.emit.?.directory.handle.createFile(sub_path, .{
|
||||
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
|
||||
const use_llvm = build_options.have_llvm and options.comp.config.use_llvm;
|
||||
|
||||
if (use_lld and use_llvm) {
|
||||
// LLVM emits the object file; LLD links it into the final product.
|
||||
return self;
|
||||
}
|
||||
|
||||
const sub_path = if (!use_lld) options.emit.sub_path else p: {
|
||||
// Open a temporary object file, not the final output file because we
|
||||
// want to link with LLD.
|
||||
const o_file_path = try std.fmt.allocPrint(arena, "{s}{s}", .{
|
||||
options.emit.sub_path, target.ofmt.fileExt(target.cpu.arch),
|
||||
});
|
||||
self.base.intermediary_basename = o_file_path;
|
||||
break :p o_file_path;
|
||||
};
|
||||
|
||||
self.base.file = try options.emit.directory.handle.createFile(sub_path, .{
|
||||
.truncate = false,
|
||||
.read = true,
|
||||
.mode = link.determineMode(options),
|
||||
.mode = link.File.determineMode(
|
||||
use_lld,
|
||||
options.comp.config.output_mode,
|
||||
options.comp.config.link_mode,
|
||||
),
|
||||
});
|
||||
self.base.file = file;
|
||||
|
||||
try self.populateMissingMetadata();
|
||||
assert(self.llvm_object == null);
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
try self.strtab.buffer.ensureUnusedCapacity(gpa, @sizeOf(u32));
|
||||
self.strtab.buffer.appendNTimesAssumeCapacity(0, @sizeOf(u32));
|
||||
|
||||
try self.temp_strtab.buffer.append(gpa, 0);
|
||||
|
||||
// Index 0 is always a null symbol.
|
||||
try self.locals.append(gpa, .{
|
||||
.name = [_]u8{0} ** 8,
|
||||
.value = 0,
|
||||
.section_number = .UNDEFINED,
|
||||
.type = .{ .base_type = .NULL, .complex_type = .NULL },
|
||||
.storage_class = .NULL,
|
||||
.number_of_aux_symbols = 0,
|
||||
});
|
||||
|
||||
if (self.text_section_index == null) {
|
||||
const file_size: u32 = @intCast(options.program_code_size_hint);
|
||||
self.text_section_index = try self.allocateSection(".text", file_size, .{
|
||||
.CNT_CODE = 1,
|
||||
.MEM_EXECUTE = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.got_section_index == null) {
|
||||
const file_size = @as(u32, @intCast(options.symbol_count_hint)) * self.ptr_width.size();
|
||||
self.got_section_index = try self.allocateSection(".got", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.rdata_section_index == null) {
|
||||
const file_size: u32 = self.page_size;
|
||||
self.rdata_section_index = try self.allocateSection(".rdata", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.data_section_index == null) {
|
||||
const file_size: u32 = self.page_size;
|
||||
self.data_section_index = try self.allocateSection(".data", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_READ = 1,
|
||||
.MEM_WRITE = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.idata_section_index == null) {
|
||||
const file_size = @as(u32, @intCast(options.symbol_count_hint)) * self.ptr_width.size();
|
||||
self.idata_section_index = try self.allocateSection(".idata", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.reloc_section_index == null) {
|
||||
const file_size = @as(u32, @intCast(options.symbol_count_hint)) * @sizeOf(coff.BaseRelocation);
|
||||
self.reloc_section_index = try self.allocateSection(".reloc", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_DISCARDABLE = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.strtab_offset == null) {
|
||||
const file_size = @as(u32, @intCast(self.strtab.buffer.items.len));
|
||||
self.strtab_offset = self.findFreeSpace(file_size, @alignOf(u32)); // 4bytes aligned seems like a good idea here
|
||||
log.debug("found strtab free space 0x{x} to 0x{x}", .{ self.strtab_offset.?, self.strtab_offset.? + file_size });
|
||||
}
|
||||
|
||||
{
|
||||
// We need to find out what the max file offset is according to section headers.
|
||||
// Otherwise, we may end up with an COFF binary with file size not matching the final section's
|
||||
// offset + it's filesize.
|
||||
// TODO I don't like this here one bit
|
||||
var max_file_offset: u64 = 0;
|
||||
for (self.sections.items(.header)) |header| {
|
||||
if (header.pointer_to_raw_data + header.size_of_raw_data > max_file_offset) {
|
||||
max_file_offset = header.pointer_to_raw_data + header.size_of_raw_data;
|
||||
}
|
||||
}
|
||||
try self.base.file.?.pwriteAll(&[_]u8{0}, max_file_offset);
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createEmpty(gpa: Allocator, options: link.Options) !*Coff {
|
||||
const ptr_width: PtrWidth = switch (options.target.ptrBitWidth()) {
|
||||
pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*Coff {
|
||||
const target = options.comp.root_mod.resolved_target.result;
|
||||
const ptr_width: PtrWidth = switch (target.ptrBitWidth()) {
|
||||
0...32 => .p32,
|
||||
33...64 => .p64,
|
||||
else => return error.UnsupportedCOFFArchitecture,
|
||||
};
|
||||
const page_size: u32 = switch (options.target.cpu.arch) {
|
||||
const page_size: u32 = switch (target.cpu.arch) {
|
||||
else => 0x1000,
|
||||
};
|
||||
const self = try gpa.create(Coff);
|
||||
errdefer gpa.destroy(self);
|
||||
const self = try arena.create(Coff);
|
||||
self.* = .{
|
||||
.base = .{
|
||||
.tag = .coff,
|
||||
.options = options,
|
||||
.allocator = gpa,
|
||||
.comp = options.comp,
|
||||
.emit = options.emit,
|
||||
.file = null,
|
||||
},
|
||||
.ptr_width = ptr_width,
|
||||
|
|
@ -271,16 +374,17 @@ pub fn createEmpty(gpa: Allocator, options: link.Options) !*Coff {
|
|||
.data_directories = comptime mem.zeroes([coff.IMAGE_NUMBEROF_DIRECTORY_ENTRIES]coff.ImageDataDirectory),
|
||||
};
|
||||
|
||||
if (options.use_llvm) {
|
||||
self.llvm_object = try LlvmObject.create(gpa, options);
|
||||
const use_llvm = build_options.have_llvm and options.comp.config.use_llvm;
|
||||
if (use_llvm and options.comp.config.have_zcu) {
|
||||
self.llvm_object = try LlvmObject.create(arena, options);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Coff) void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
if (self.llvm_object) |llvm_object| llvm_object.destroy(gpa);
|
||||
if (self.llvm_object) |llvm_object| llvm_object.deinit();
|
||||
|
||||
for (self.objects.items) |*object| {
|
||||
object.deinit(gpa);
|
||||
|
|
@ -349,97 +453,6 @@ pub fn deinit(self: *Coff) void {
|
|||
self.base_relocs.deinit(gpa);
|
||||
}
|
||||
|
||||
fn populateMissingMetadata(self: *Coff) !void {
|
||||
assert(self.llvm_object == null);
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
try self.strtab.buffer.ensureUnusedCapacity(gpa, @sizeOf(u32));
|
||||
self.strtab.buffer.appendNTimesAssumeCapacity(0, @sizeOf(u32));
|
||||
|
||||
try self.temp_strtab.buffer.append(gpa, 0);
|
||||
|
||||
// Index 0 is always a null symbol.
|
||||
try self.locals.append(gpa, .{
|
||||
.name = [_]u8{0} ** 8,
|
||||
.value = 0,
|
||||
.section_number = .UNDEFINED,
|
||||
.type = .{ .base_type = .NULL, .complex_type = .NULL },
|
||||
.storage_class = .NULL,
|
||||
.number_of_aux_symbols = 0,
|
||||
});
|
||||
|
||||
if (self.text_section_index == null) {
|
||||
const file_size = @as(u32, @intCast(self.base.options.program_code_size_hint));
|
||||
self.text_section_index = try self.allocateSection(".text", file_size, .{
|
||||
.CNT_CODE = 1,
|
||||
.MEM_EXECUTE = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.got_section_index == null) {
|
||||
const file_size = @as(u32, @intCast(self.base.options.symbol_count_hint)) * self.ptr_width.size();
|
||||
self.got_section_index = try self.allocateSection(".got", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.rdata_section_index == null) {
|
||||
const file_size: u32 = self.page_size;
|
||||
self.rdata_section_index = try self.allocateSection(".rdata", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.data_section_index == null) {
|
||||
const file_size: u32 = self.page_size;
|
||||
self.data_section_index = try self.allocateSection(".data", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_READ = 1,
|
||||
.MEM_WRITE = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.idata_section_index == null) {
|
||||
const file_size = @as(u32, @intCast(self.base.options.symbol_count_hint)) * self.ptr_width.size();
|
||||
self.idata_section_index = try self.allocateSection(".idata", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.reloc_section_index == null) {
|
||||
const file_size = @as(u32, @intCast(self.base.options.symbol_count_hint)) * @sizeOf(coff.BaseRelocation);
|
||||
self.reloc_section_index = try self.allocateSection(".reloc", file_size, .{
|
||||
.CNT_INITIALIZED_DATA = 1,
|
||||
.MEM_DISCARDABLE = 1,
|
||||
.MEM_READ = 1,
|
||||
});
|
||||
}
|
||||
|
||||
if (self.strtab_offset == null) {
|
||||
const file_size = @as(u32, @intCast(self.strtab.buffer.items.len));
|
||||
self.strtab_offset = self.findFreeSpace(file_size, @alignOf(u32)); // 4bytes aligned seems like a good idea here
|
||||
log.debug("found strtab free space 0x{x} to 0x{x}", .{ self.strtab_offset.?, self.strtab_offset.? + file_size });
|
||||
}
|
||||
|
||||
{
|
||||
// We need to find out what the max file offset is according to section headers.
|
||||
// Otherwise, we may end up with an COFF binary with file size not matching the final section's
|
||||
// offset + it's filesize.
|
||||
// TODO I don't like this here one bit
|
||||
var max_file_offset: u64 = 0;
|
||||
for (self.sections.items(.header)) |header| {
|
||||
if (header.pointer_to_raw_data + header.size_of_raw_data > max_file_offset) {
|
||||
max_file_offset = header.pointer_to_raw_data + header.size_of_raw_data;
|
||||
}
|
||||
}
|
||||
try self.base.file.?.pwriteAll(&[_]u8{0}, max_file_offset);
|
||||
}
|
||||
}
|
||||
|
||||
fn allocateSection(self: *Coff, name: []const u8, size: u32, flags: coff.SectionHeaderFlags) !u16 {
|
||||
const index = @as(u16, @intCast(self.sections.slice().len));
|
||||
const off = self.findFreeSpace(size, default_file_alignment);
|
||||
|
|
@ -471,8 +484,9 @@ fn allocateSection(self: *Coff, name: []const u8, size: u32, flags: coff.Section
|
|||
.number_of_linenumbers = 0,
|
||||
.flags = flags,
|
||||
};
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.setSectionName(&header, name);
|
||||
try self.sections.append(self.base.allocator, .{ .header = header });
|
||||
try self.sections.append(gpa, .{ .header = header });
|
||||
return index;
|
||||
}
|
||||
|
||||
|
|
@ -654,7 +668,7 @@ fn allocateAtom(self: *Coff, atom_index: Atom.Index, new_atom_size: u32, alignme
|
|||
}
|
||||
|
||||
pub fn allocateSymbol(self: *Coff) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.locals.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
const index = blk: {
|
||||
|
|
@ -682,7 +696,7 @@ pub fn allocateSymbol(self: *Coff) !u32 {
|
|||
}
|
||||
|
||||
fn allocateGlobal(self: *Coff) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.globals.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
const index = blk: {
|
||||
|
|
@ -706,15 +720,16 @@ fn allocateGlobal(self: *Coff) !u32 {
|
|||
}
|
||||
|
||||
fn addGotEntry(self: *Coff, target: SymbolWithLoc) !void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
if (self.got_table.lookup.contains(target)) return;
|
||||
const got_index = try self.got_table.allocateEntry(self.base.allocator, target);
|
||||
const got_index = try self.got_table.allocateEntry(gpa, target);
|
||||
try self.writeOffsetTableEntry(got_index);
|
||||
self.got_table_count_dirty = true;
|
||||
self.markRelocsDirtyByTarget(target);
|
||||
}
|
||||
|
||||
pub fn createAtom(self: *Coff) !Atom.Index {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const atom_index = @as(Atom.Index, @intCast(self.atoms.items.len));
|
||||
const atom = try self.atoms.addOne(gpa);
|
||||
const sym_index = try self.allocateSymbol();
|
||||
|
|
@ -759,7 +774,7 @@ fn writeAtom(self: *Coff, atom_index: Atom.Index, code: []u8) !void {
|
|||
file_offset + code.len,
|
||||
});
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
// Gather relocs which can be resolved.
|
||||
// We need to do this as we will be applying different slide values depending
|
||||
|
|
@ -870,7 +885,7 @@ fn writeOffsetTableEntry(self: *Coff, index: usize) !void {
|
|||
|
||||
if (is_hot_update_compatible) {
|
||||
if (self.base.child_pid) |handle| {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const slide = @intFromPtr(self.hot_state.loaded_base_address.?);
|
||||
const actual_vmaddr = vmaddr + slide;
|
||||
const pvaddr = @as(*anyopaque, @ptrFromInt(actual_vmaddr));
|
||||
|
|
@ -974,7 +989,7 @@ pub fn ptraceDetach(self: *Coff, handle: std.ChildProcess.Id) void {
|
|||
fn freeAtom(self: *Coff, atom_index: Atom.Index) void {
|
||||
log.debug("freeAtom {d}", .{atom_index});
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
// Remove any relocs and base relocs associated with this Atom
|
||||
Atom.freeRelocations(self, atom_index);
|
||||
|
|
@ -1061,7 +1076,8 @@ pub fn updateFunc(self: *Coff, mod: *Module, func_index: InternPool.Index, air:
|
|||
self.freeUnnamedConsts(decl_index);
|
||||
Atom.freeRelocations(self, atom_index);
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
|
||||
const gpa = self.base.comp.gpa;
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const res = try codegen.generateFunction(
|
||||
|
|
@ -1090,7 +1106,7 @@ pub fn updateFunc(self: *Coff, mod: *Module, func_index: InternPool.Index, air:
|
|||
}
|
||||
|
||||
pub fn lowerUnnamedConst(self: *Coff, tv: TypedValue, decl_index: InternPool.DeclIndex) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const gop = try self.unnamed_const_atoms.getOrPut(gpa, decl_index);
|
||||
|
|
@ -1121,7 +1137,7 @@ const LowerConstResult = union(enum) {
|
|||
};
|
||||
|
||||
fn lowerConst(self: *Coff, name: []const u8, tv: TypedValue, required_alignment: InternPool.Alignment, sect_id: u16, src_loc: Module.SrcLoc) !LowerConstResult {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
|
@ -1174,13 +1190,14 @@ pub fn updateDecl(
|
|||
return;
|
||||
}
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
if (decl.isExtern(mod)) {
|
||||
// TODO make this part of getGlobalSymbol
|
||||
const variable = decl.getOwnedVariable(mod).?;
|
||||
const name = mod.intern_pool.stringToSlice(decl.name);
|
||||
const lib_name = mod.intern_pool.stringToSliceUnwrap(variable.lib_name);
|
||||
const global_index = try self.getGlobalSymbol(name, lib_name);
|
||||
try self.need_got_table.put(self.base.allocator, global_index, {});
|
||||
try self.need_got_table.put(gpa, global_index, {});
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -1188,7 +1205,7 @@ pub fn updateDecl(
|
|||
Atom.freeRelocations(self, atom_index);
|
||||
const atom = self.getAtom(atom_index);
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
|
||||
|
|
@ -1220,7 +1237,7 @@ fn updateLazySymbolAtom(
|
|||
atom_index: Atom.Index,
|
||||
section_index: u16,
|
||||
) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
|
||||
var required_alignment: InternPool.Alignment = .none;
|
||||
|
|
@ -1281,8 +1298,9 @@ fn updateLazySymbolAtom(
|
|||
}
|
||||
|
||||
pub fn getOrCreateAtomForLazySymbol(self: *Coff, sym: link.File.LazySymbol) !Atom.Index {
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const gop = try self.lazy_syms.getOrPut(self.base.allocator, sym.getDecl(mod));
|
||||
const gop = try self.lazy_syms.getOrPut(gpa, sym.getDecl(mod));
|
||||
errdefer _ = if (!gop.found_existing) self.lazy_syms.pop();
|
||||
if (!gop.found_existing) gop.value_ptr.* = .{};
|
||||
const metadata: struct { atom: *Atom.Index, state: *LazySymbolMetadata.State } = switch (sym.kind) {
|
||||
|
|
@ -1305,7 +1323,8 @@ pub fn getOrCreateAtomForLazySymbol(self: *Coff, sym: link.File.LazySymbol) !Ato
|
|||
}
|
||||
|
||||
pub fn getOrCreateAtomForDecl(self: *Coff, decl_index: InternPool.DeclIndex) !Atom.Index {
|
||||
const gop = try self.decls.getOrPut(self.base.allocator, decl_index);
|
||||
const gpa = self.base.comp.gpa;
|
||||
const gop = try self.decls.getOrPut(gpa, decl_index);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{
|
||||
.atom = try self.createAtom(),
|
||||
|
|
@ -1401,7 +1420,7 @@ fn updateDeclCode(self: *Coff, decl_index: InternPool.DeclIndex, code: []u8, com
|
|||
}
|
||||
|
||||
fn freeUnnamedConsts(self: *Coff, decl_index: InternPool.DeclIndex) void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const unnamed_consts = self.unnamed_const_atoms.getPtr(decl_index) orelse return;
|
||||
for (unnamed_consts.items) |atom_index| {
|
||||
self.freeAtom(atom_index);
|
||||
|
|
@ -1412,6 +1431,7 @@ fn freeUnnamedConsts(self: *Coff, decl_index: InternPool.DeclIndex) void {
|
|||
pub fn freeDecl(self: *Coff, decl_index: InternPool.DeclIndex) void {
|
||||
if (self.llvm_object) |llvm_object| return llvm_object.freeDecl(decl_index);
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
|
||||
|
|
@ -1421,7 +1441,7 @@ pub fn freeDecl(self: *Coff, decl_index: InternPool.DeclIndex) void {
|
|||
var kv = const_kv;
|
||||
self.freeAtom(kv.value.atom);
|
||||
self.freeUnnamedConsts(decl_index);
|
||||
kv.value.exports.deinit(self.base.allocator);
|
||||
kv.value.exports.deinit(gpa);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1476,7 +1496,7 @@ pub fn updateExports(
|
|||
|
||||
if (self.base.options.emit == null) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const metadata = switch (exported) {
|
||||
.decl_index => |decl_index| blk: {
|
||||
|
|
@ -1574,7 +1594,7 @@ pub fn deleteDeclExport(
|
|||
const name = mod.intern_pool.stringToSlice(name_ip);
|
||||
const sym_index = metadata.getExportPtr(self, name) orelse return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index.*, .file = null };
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
log.debug("deleting export '{s}'", .{name});
|
||||
|
|
@ -1602,7 +1622,7 @@ pub fn deleteDeclExport(
|
|||
}
|
||||
|
||||
fn resolveGlobalSymbol(self: *Coff, current: SymbolWithLoc) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const sym = self.getSymbol(current);
|
||||
const sym_name = self.getSymbolName(current);
|
||||
|
||||
|
|
@ -1653,7 +1673,7 @@ pub fn flushModule(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Nod
|
|||
sub_prog_node.activate();
|
||||
defer sub_prog_node.end();
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const module = self.base.options.module orelse return error.LinkingWithoutZigSourceUnimplemented;
|
||||
|
||||
|
|
@ -1794,7 +1814,7 @@ pub fn lowerAnonDecl(
|
|||
explicit_alignment: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
) !codegen.Result {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const ty = Type.fromInterned(mod.intern_pool.typeOf(decl_val));
|
||||
const decl_alignment = switch (explicit_alignment) {
|
||||
|
|
@ -1868,7 +1888,7 @@ pub fn getGlobalSymbol(self: *Coff, name: []const u8, lib_name_name: ?[]const u8
|
|||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
gop.value_ptr.* = sym_loc;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
try self.setSymbolName(sym, name);
|
||||
sym.storage_class = .EXTERNAL;
|
||||
|
|
@ -1895,7 +1915,7 @@ pub fn updateDeclLineNumber(self: *Coff, module: *Module, decl_index: InternPool
|
|||
/// TODO: note that .ABSOLUTE is used as padding within each block; we could use this fact to do
|
||||
/// incremental updates and writes into the table instead of doing it all at once
|
||||
fn writeBaseRelocations(self: *Coff) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var page_table = std.AutoHashMap(u32, std.ArrayList(coff.BaseRelocation)).init(gpa);
|
||||
defer {
|
||||
|
|
@ -2006,7 +2026,7 @@ fn writeImportTables(self: *Coff) !void {
|
|||
if (self.idata_section_index == null) return;
|
||||
if (!self.imports_count_dirty) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const ext = ".dll";
|
||||
const header = &self.sections.items(.header)[self.idata_section_index.?];
|
||||
|
|
@ -2154,7 +2174,8 @@ fn writeStrtab(self: *Coff) !void {
|
|||
|
||||
log.debug("writing strtab from 0x{x} to 0x{x}", .{ self.strtab_offset.?, self.strtab_offset.? + needed_size });
|
||||
|
||||
var buffer = std.ArrayList(u8).init(self.base.allocator);
|
||||
const gpa = self.base.comp.gpa;
|
||||
var buffer = std.ArrayList(u8).init(gpa);
|
||||
defer buffer.deinit();
|
||||
try buffer.ensureTotalCapacityPrecise(needed_size);
|
||||
buffer.appendSliceAssumeCapacity(self.strtab.buffer.items);
|
||||
|
|
@ -2176,7 +2197,7 @@ fn writeDataDirectoriesHeaders(self: *Coff) !void {
|
|||
}
|
||||
|
||||
fn writeHeader(self: *Coff) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var buffer = std.ArrayList(u8).init(gpa);
|
||||
defer buffer.deinit();
|
||||
const writer = buffer.writer();
|
||||
|
|
@ -2499,7 +2520,7 @@ pub fn getOrPutGlobalPtr(self: *Coff, name: []const u8) !GetOrPutGlobalPtrResult
|
|||
if (self.getGlobalPtr(name)) |ptr| {
|
||||
return GetOrPutGlobalPtrResult{ .found_existing = true, .value_ptr = ptr };
|
||||
}
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const global_index = try self.allocateGlobal();
|
||||
const global_name = try gpa.dupe(u8, name);
|
||||
_ = try self.resolver.put(gpa, global_name, global_index);
|
||||
|
|
@ -2530,7 +2551,8 @@ fn setSectionName(self: *Coff, header: *coff.SectionHeader, name: []const u8) !v
|
|||
@memset(header.name[name.len..], 0);
|
||||
return;
|
||||
}
|
||||
const offset = try self.strtab.insert(self.base.allocator, name);
|
||||
const gpa = self.base.comp.gpa;
|
||||
const offset = try self.strtab.insert(gpa, name);
|
||||
const name_offset = fmt.bufPrint(&header.name, "/{d}", .{offset}) catch unreachable;
|
||||
@memset(header.name[name_offset.len..], 0);
|
||||
}
|
||||
|
|
@ -2549,7 +2571,8 @@ fn setSymbolName(self: *Coff, symbol: *coff.Symbol, name: []const u8) !void {
|
|||
@memset(symbol.name[name.len..], 0);
|
||||
return;
|
||||
}
|
||||
const offset = try self.strtab.insert(self.base.allocator, name);
|
||||
const gpa = self.base.comp.gpa;
|
||||
const offset = try self.strtab.insert(gpa, name);
|
||||
@memset(symbol.name[0..4], 0);
|
||||
mem.writeInt(u32, symbol.name[4..8], offset, .little);
|
||||
}
|
||||
|
|
|
|||
186
src/link/Elf.zig
186
src/link/Elf.zig
|
|
@ -200,26 +200,34 @@ pub const min_text_capacity = padToIdeal(minimum_atom_size);
|
|||
|
||||
pub const PtrWidth = enum { p32, p64 };
|
||||
|
||||
pub fn openPath(allocator: Allocator, sub_path: []const u8, options: link.Options) !*Elf {
|
||||
assert(options.target.ofmt == .elf);
|
||||
pub fn open(arena: Allocator, options: link.File.OpenOptions) !*Elf {
|
||||
if (build_options.only_c) unreachable;
|
||||
const target = options.comp.root_mod.resolved_target.result;
|
||||
assert(target.ofmt == .elf);
|
||||
|
||||
const self = try createEmpty(allocator, options);
|
||||
const use_lld = build_options.have_llvm and options.comp.config.use_lld;
|
||||
const use_llvm = build_options.have_llvm and options.comp.config.use_llvm;
|
||||
|
||||
const self = try createEmpty(arena, options);
|
||||
errdefer self.base.destroy();
|
||||
|
||||
if (use_lld and use_llvm) {
|
||||
// LLVM emits the object file; LLD links it into the final product.
|
||||
return self;
|
||||
}
|
||||
|
||||
const is_obj = options.output_mode == .Obj;
|
||||
const is_obj_or_ar = is_obj or (options.output_mode == .Lib and options.link_mode == .Static);
|
||||
|
||||
if (options.use_llvm) {
|
||||
const use_lld = build_options.have_llvm and self.base.options.use_lld;
|
||||
if (use_lld) return self;
|
||||
|
||||
if (options.module != null) {
|
||||
self.base.intermediary_basename = try std.fmt.allocPrint(allocator, "{s}{s}", .{
|
||||
sub_path, options.target.ofmt.fileExt(options.target.cpu.arch),
|
||||
});
|
||||
}
|
||||
}
|
||||
errdefer if (self.base.intermediary_basename) |path| allocator.free(path);
|
||||
const sub_path = if (!use_lld) options.emit.sub_path else p: {
|
||||
// Open a temporary object file, not the final output file because we
|
||||
// want to link with LLD.
|
||||
const o_file_path = try std.fmt.allocPrint(arena, "{s}{s}", .{
|
||||
options.emit.sub_path, target.ofmt.fileExt(target.cpu.arch),
|
||||
});
|
||||
self.base.intermediary_basename = o_file_path;
|
||||
break :p o_file_path;
|
||||
};
|
||||
|
||||
self.base.file = try options.emit.?.directory.handle.createFile(sub_path, .{
|
||||
.truncate = false,
|
||||
|
|
@ -227,24 +235,26 @@ pub fn openPath(allocator: Allocator, sub_path: []const u8, options: link.Option
|
|||
.mode = link.determineMode(options),
|
||||
});
|
||||
|
||||
const gpa = options.comp.gpa;
|
||||
|
||||
// Index 0 is always a null symbol.
|
||||
try self.symbols.append(allocator, .{});
|
||||
try self.symbols.append(gpa, .{});
|
||||
// Index 0 is always a null symbol.
|
||||
try self.symbols_extra.append(allocator, 0);
|
||||
try self.symbols_extra.append(gpa, 0);
|
||||
// Allocate atom index 0 to null atom
|
||||
try self.atoms.append(allocator, .{});
|
||||
try self.atoms.append(gpa, .{});
|
||||
// Append null file at index 0
|
||||
try self.files.append(allocator, .null);
|
||||
try self.files.append(gpa, .null);
|
||||
// Append null byte to string tables
|
||||
try self.shstrtab.append(allocator, 0);
|
||||
try self.strtab.append(allocator, 0);
|
||||
try self.shstrtab.append(gpa, 0);
|
||||
try self.strtab.append(gpa, 0);
|
||||
// There must always be a null shdr in index 0
|
||||
_ = try self.addSection(.{ .name = "" });
|
||||
// Append null symbol in output symtab
|
||||
try self.symtab.append(allocator, null_sym);
|
||||
try self.symtab.append(gpa, null_sym);
|
||||
|
||||
if (!is_obj_or_ar) {
|
||||
try self.dynstrtab.append(allocator, 0);
|
||||
try self.dynstrtab.append(gpa, 0);
|
||||
|
||||
// Initialize PT_PHDR program header
|
||||
const p_align: u16 = switch (self.ptr_width) {
|
||||
|
|
@ -283,10 +293,10 @@ pub fn openPath(allocator: Allocator, sub_path: []const u8, options: link.Option
|
|||
}
|
||||
|
||||
if (options.module != null and !options.use_llvm) {
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(allocator)));
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
|
||||
self.files.set(index, .{ .zig_object = .{
|
||||
.index = index,
|
||||
.path = try std.fmt.allocPrint(self.base.allocator, "{s}.o", .{std.fs.path.stem(
|
||||
.path = try std.fmt.allocPrint(arena, "{s}.o", .{std.fs.path.stem(
|
||||
options.module.?.main_mod.root_src_path,
|
||||
)}),
|
||||
} });
|
||||
|
|
@ -298,16 +308,16 @@ pub fn openPath(allocator: Allocator, sub_path: []const u8, options: link.Option
|
|||
return self;
|
||||
}
|
||||
|
||||
pub fn createEmpty(gpa: Allocator, options: link.Options) !*Elf {
|
||||
const ptr_width: PtrWidth = switch (options.target.ptrBitWidth()) {
|
||||
pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*Elf {
|
||||
const target = options.comp.root_mod.resolved_target.result;
|
||||
const ptr_width: PtrWidth = switch (target.ptrBitWidth()) {
|
||||
0...32 => .p32,
|
||||
33...64 => .p64,
|
||||
else => return error.UnsupportedELFArchitecture,
|
||||
};
|
||||
const self = try gpa.create(Elf);
|
||||
errdefer gpa.destroy(self);
|
||||
const self = try arena.create(Elf);
|
||||
|
||||
const page_size: u32 = switch (options.target.cpu.arch) {
|
||||
const page_size: u32 = switch (target.cpu.arch) {
|
||||
.powerpc64le => 0x10000,
|
||||
.sparc64 => 0x2000,
|
||||
else => 0x1000,
|
||||
|
|
@ -321,25 +331,25 @@ pub fn createEmpty(gpa: Allocator, options: link.Options) !*Elf {
|
|||
self.* = .{
|
||||
.base = .{
|
||||
.tag = .elf,
|
||||
.options = options,
|
||||
.allocator = gpa,
|
||||
.comp = options.comp,
|
||||
.emit = options.emit,
|
||||
.file = null,
|
||||
},
|
||||
.ptr_width = ptr_width,
|
||||
.page_size = page_size,
|
||||
.default_sym_version = default_sym_version,
|
||||
};
|
||||
if (options.use_llvm and options.module != null) {
|
||||
self.llvm_object = try LlvmObject.create(gpa, options);
|
||||
if (options.use_llvm and options.comp.config.have_zcu) {
|
||||
self.llvm_object = try LlvmObject.create(arena, options);
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Elf) void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
if (self.llvm_object) |llvm_object| llvm_object.destroy(gpa);
|
||||
if (self.llvm_object) |llvm_object| llvm_object.deinit();
|
||||
|
||||
for (self.files.items(.tags), self.files.items(.data)) |tag, *data| switch (tag) {
|
||||
.null => {},
|
||||
|
|
@ -496,10 +506,11 @@ fn findFreeSpace(self: *Elf, object_size: u64, min_alignment: u64) u64 {
|
|||
|
||||
/// TODO move to ZigObject
|
||||
pub fn initMetadata(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const ptr_size = self.ptrWidthBytes();
|
||||
const ptr_bit_width = self.base.options.target.ptrBitWidth();
|
||||
const is_linux = self.base.options.target.os.tag == .linux;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const ptr_bit_width = target.ptrBitWidth();
|
||||
const is_linux = target.os.tag == .linux;
|
||||
const zig_object = self.zigObjectPtr().?;
|
||||
|
||||
const fillSection = struct {
|
||||
|
|
@ -943,7 +954,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
|
|||
if (use_lld) return;
|
||||
}
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var sub_prog_node = prog_node.start("ELF Flush", 0);
|
||||
sub_prog_node.activate();
|
||||
defer sub_prog_node.end();
|
||||
|
|
@ -952,7 +963,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
|
|||
defer arena_allocator.deinit();
|
||||
const arena = arena_allocator.allocator();
|
||||
|
||||
const target = self.base.options.target;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
|
||||
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
|
||||
const module_obj_path: ?[]const u8 = if (self.base.intermediary_basename) |path| blk: {
|
||||
|
|
@ -1303,7 +1314,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
|
|||
}
|
||||
|
||||
pub fn flushStaticLib(self: *Elf, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var positionals = std.ArrayList(Compilation.LinkObject).init(gpa);
|
||||
defer positionals.deinit();
|
||||
|
|
@ -1447,7 +1458,7 @@ pub fn flushStaticLib(self: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
|||
}
|
||||
|
||||
pub fn flushObject(self: *Elf, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var positionals = std.ArrayList(Compilation.LinkObject).init(gpa);
|
||||
defer positionals.deinit();
|
||||
|
|
@ -1524,7 +1535,7 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
|
|||
defer arena_allocator.deinit();
|
||||
const arena = arena_allocator.allocator();
|
||||
|
||||
const target = self.base.options.target;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
|
||||
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
|
||||
const module_obj_path: ?[]const u8 = if (self.base.intermediary_basename) |path| blk: {
|
||||
|
|
@ -1574,7 +1585,7 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
|
|||
}
|
||||
} else {
|
||||
if (!self.isStatic()) {
|
||||
if (self.base.options.target.dynamic_linker.get()) |path| {
|
||||
if (target.dynamic_linker.get()) |path| {
|
||||
try argv.append("-dynamic-linker");
|
||||
try argv.append(path);
|
||||
}
|
||||
|
|
@ -1842,7 +1853,7 @@ fn parseObject(self: *Elf, path: []const u8) ParseError!void {
|
|||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const in_file = try std.fs.cwd().openFile(path, .{});
|
||||
defer in_file.close();
|
||||
const data = try in_file.readToEndAlloc(gpa, std.math.maxInt(u32));
|
||||
|
|
@ -1862,7 +1873,7 @@ fn parseArchive(self: *Elf, path: []const u8, must_link: bool) ParseError!void {
|
|||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const in_file = try std.fs.cwd().openFile(path, .{});
|
||||
defer in_file.close();
|
||||
const data = try in_file.readToEndAlloc(gpa, std.math.maxInt(u32));
|
||||
|
|
@ -1888,7 +1899,7 @@ fn parseSharedObject(self: *Elf, lib: SystemLib) ParseError!void {
|
|||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const in_file = try std.fs.cwd().openFile(lib.path, .{});
|
||||
defer in_file.close();
|
||||
const data = try in_file.readToEndAlloc(gpa, std.math.maxInt(u32));
|
||||
|
|
@ -1910,7 +1921,7 @@ fn parseLdScript(self: *Elf, lib: SystemLib) ParseError!void {
|
|||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const in_file = try std.fs.cwd().openFile(lib.path, .{});
|
||||
defer in_file.close();
|
||||
const data = try in_file.readToEndAlloc(gpa, std.math.maxInt(u32));
|
||||
|
|
@ -1996,7 +2007,7 @@ fn accessLibPath(
|
|||
link_mode: ?std.builtin.LinkMode,
|
||||
) !bool {
|
||||
const sep = fs.path.sep_str;
|
||||
const target = self.base.options.target;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
test_path.clearRetainingCapacity();
|
||||
const prefix = if (link_mode != null) "lib" else "";
|
||||
const suffix = if (link_mode) |mode| switch (mode) {
|
||||
|
|
@ -2190,7 +2201,7 @@ fn claimUnresolvedObject(self: *Elf) void {
|
|||
/// This is also the point where we will report undefined symbols for any
|
||||
/// alloc sections.
|
||||
fn scanRelocs(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var undefs = std.AutoHashMap(Symbol.Index, std.ArrayList(Atom.Index)).init(gpa);
|
||||
defer {
|
||||
|
|
@ -2293,7 +2304,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
|
|||
const is_exe_or_dyn_lib = is_dyn_lib or self.base.options.output_mode == .Exe;
|
||||
const have_dynamic_linker = self.base.options.link_libc and
|
||||
self.base.options.link_mode == .Dynamic and is_exe_or_dyn_lib;
|
||||
const target = self.base.options.target;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const gc_sections = self.base.options.gc_sections orelse !is_obj;
|
||||
const stack_size = self.base.options.stack_size_override orelse 16777216;
|
||||
const allow_shlib_undefined = self.base.options.allow_shlib_undefined orelse !self.base.options.is_native_os;
|
||||
|
|
@ -2374,7 +2385,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
|
|||
man.hash.addBytes(libc_installation.crt_dir.?);
|
||||
}
|
||||
if (have_dynamic_linker) {
|
||||
man.hash.addOptionalBytes(self.base.options.target.dynamic_linker.get());
|
||||
man.hash.addOptionalBytes(target.dynamic_linker.get());
|
||||
}
|
||||
}
|
||||
man.hash.addOptionalBytes(self.base.options.soname);
|
||||
|
|
@ -2687,7 +2698,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
|
|||
}
|
||||
|
||||
if (have_dynamic_linker) {
|
||||
if (self.base.options.target.dynamic_linker.get()) |dynamic_linker| {
|
||||
if (target.dynamic_linker.get()) |dynamic_linker| {
|
||||
try argv.append("-dynamic-linker");
|
||||
try argv.append(dynamic_linker);
|
||||
}
|
||||
|
|
@ -2937,7 +2948,8 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
|
|||
}
|
||||
|
||||
fn writeDwarfAddrAssumeCapacity(self: *Elf, buf: *std.ArrayList(u8), addr: u64) void {
|
||||
const target_endian = self.base.options.target.cpu.arch.endian();
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const target_endian = target.cpu.arch.endian();
|
||||
switch (self.ptr_width) {
|
||||
.p32 => mem.writeInt(u32, buf.addManyAsArrayAssumeCapacity(4), @as(u32, @intCast(addr)), target_endian),
|
||||
.p64 => mem.writeInt(u64, buf.addManyAsArrayAssumeCapacity(8), addr, target_endian),
|
||||
|
|
@ -2945,8 +2957,9 @@ fn writeDwarfAddrAssumeCapacity(self: *Elf, buf: *std.ArrayList(u8), addr: u64)
|
|||
}
|
||||
|
||||
fn writeShdrTable(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const target_endian = self.base.options.target.cpu.arch.endian();
|
||||
const gpa = self.base.comp.gpa;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const target_endian = target.cpu.arch.endian();
|
||||
const foreign_endian = target_endian != builtin.cpu.arch.endian();
|
||||
const shsize: u64 = switch (self.ptr_width) {
|
||||
.p32 => @sizeOf(elf.Elf32_Shdr),
|
||||
|
|
@ -3001,8 +3014,9 @@ fn writeShdrTable(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn writePhdrTable(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const target_endian = self.base.options.target.cpu.arch.endian();
|
||||
const gpa = self.base.comp.gpa;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const target_endian = target.cpu.arch.endian();
|
||||
const foreign_endian = target_endian != builtin.cpu.arch.endian();
|
||||
const phdr_table = &self.phdrs.items[self.phdr_table_index.?];
|
||||
|
||||
|
|
@ -3054,7 +3068,8 @@ fn writeElfHeader(self: *Elf) !void {
|
|||
};
|
||||
index += 1;
|
||||
|
||||
const endian = self.base.options.target.cpu.arch.endian();
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const endian = target.cpu.arch.endian();
|
||||
hdr_buf[index] = switch (endian) {
|
||||
.little => elf.ELFDATA2LSB,
|
||||
.big => elf.ELFDATA2MSB,
|
||||
|
|
@ -3083,7 +3098,7 @@ fn writeElfHeader(self: *Elf) !void {
|
|||
mem.writeInt(u16, hdr_buf[index..][0..2], @intFromEnum(elf_type), endian);
|
||||
index += 2;
|
||||
|
||||
const machine = self.base.options.target.cpu.arch.toElfMachine();
|
||||
const machine = target.cpu.arch.toElfMachine();
|
||||
mem.writeInt(u16, hdr_buf[index..][0..2], @intFromEnum(machine), endian);
|
||||
index += 2;
|
||||
|
||||
|
|
@ -3248,7 +3263,7 @@ fn addLinkerDefinedSymbols(self: *Elf) !void {
|
|||
|
||||
for (self.shdrs.items) |shdr| {
|
||||
if (self.getStartStopBasename(shdr)) |name| {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.start_stop_indexes.ensureUnusedCapacity(gpa, 2);
|
||||
|
||||
const start = try std.fmt.allocPrintZ(gpa, "__start_{s}", .{name});
|
||||
|
|
@ -3394,6 +3409,7 @@ fn initOutputSections(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn initSyntheticSections(self: *Elf) !void {
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const ptr_size = self.ptrWidthBytes();
|
||||
|
||||
const needs_eh_frame = for (self.objects.items) |index| {
|
||||
|
|
@ -3503,7 +3519,7 @@ fn initSyntheticSections(self: *Elf) !void {
|
|||
// a segfault in the dynamic linker trying to load a binary that is static
|
||||
// and doesn't contain .dynamic section.
|
||||
if (self.isStatic() and !self.base.options.pie) break :blk false;
|
||||
break :blk self.base.options.target.dynamic_linker.get() != null;
|
||||
break :blk target.dynamic_linker.get() != null;
|
||||
};
|
||||
if (needs_interp) {
|
||||
self.interp_section_index = try self.addSection(.{
|
||||
|
|
@ -3613,7 +3629,7 @@ fn initSectionsObject(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn initComdatGroups(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
for (self.objects.items) |index| {
|
||||
const object = self.file(index).?.object;
|
||||
|
|
@ -3732,7 +3748,7 @@ fn initSpecialPhdrs(self: *Elf) !void {
|
|||
/// Ties are broken by the file prority which corresponds to the inclusion of input sections in this output section
|
||||
/// we are about to sort.
|
||||
fn sortInitFini(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const Entry = struct {
|
||||
priority: i32,
|
||||
|
|
@ -3872,7 +3888,7 @@ fn sortPhdrs(self: *Elf) error{OutOfMemory}!void {
|
|||
}
|
||||
};
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var entries = try std.ArrayList(Entry).initCapacity(gpa, self.phdrs.items.len);
|
||||
defer entries.deinit();
|
||||
for (0..self.phdrs.items.len) |phndx| {
|
||||
|
|
@ -3977,7 +3993,7 @@ fn sortShdrs(self: *Elf) !void {
|
|||
}
|
||||
};
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var entries = try std.ArrayList(Entry).initCapacity(gpa, self.shdrs.items.len);
|
||||
defer entries.deinit();
|
||||
for (0..self.shdrs.items.len) |shndx| {
|
||||
|
|
@ -4004,7 +4020,7 @@ fn sortShdrs(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn resetShdrIndexes(self: *Elf, backlinks: []const u16) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
for (&[_]*?u16{
|
||||
&self.eh_frame_section_index,
|
||||
|
|
@ -4187,6 +4203,7 @@ fn resetShdrIndexes(self: *Elf, backlinks: []const u16) !void {
|
|||
}
|
||||
|
||||
fn updateSectionSizes(self: *Elf) !void {
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
for (self.output_sections.keys(), self.output_sections.values()) |shndx, atom_list| {
|
||||
const shdr = &self.shdrs.items[shndx];
|
||||
for (atom_list.items) |atom_index| {
|
||||
|
|
@ -4244,7 +4261,7 @@ fn updateSectionSizes(self: *Elf) !void {
|
|||
}
|
||||
|
||||
if (self.interp_section_index) |index| {
|
||||
self.shdrs.items[index].sh_size = self.base.options.target.dynamic_linker.get().?.len + 1;
|
||||
self.shdrs.items[index].sh_size = target.dynamic_linker.get().?.len + 1;
|
||||
}
|
||||
|
||||
if (self.hash_section_index) |index| {
|
||||
|
|
@ -4453,7 +4470,7 @@ fn allocateAllocSections(self: *Elf) error{OutOfMemory}!void {
|
|||
// as we are more interested in quick turnaround and compatibility
|
||||
// with `findFreeSpace` mechanics than anything else.
|
||||
const Cover = std.ArrayList(u16);
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var covers: [max_number_of_object_segments]Cover = undefined;
|
||||
for (&covers) |*cover| {
|
||||
cover.* = Cover.init(gpa);
|
||||
|
|
@ -4691,7 +4708,7 @@ fn allocateAtoms(self: *Elf) void {
|
|||
}
|
||||
|
||||
fn writeAtoms(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var undefs = std.AutoHashMap(Symbol.Index, std.ArrayList(Atom.Index)).init(gpa);
|
||||
defer {
|
||||
|
|
@ -4779,7 +4796,7 @@ fn writeAtoms(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn writeAtomsObject(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
// TODO iterate over `output_sections` directly
|
||||
for (self.shdrs.items, 0..) |shdr, shndx| {
|
||||
|
|
@ -4852,7 +4869,7 @@ fn updateSymtabSize(self: *Elf) !void {
|
|||
var nglobals: u32 = 0;
|
||||
var strsize: u32 = 0;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var files = std.ArrayList(File.Index).init(gpa);
|
||||
defer files.deinit();
|
||||
try files.ensureTotalCapacityPrecise(self.objects.items.len + self.shared_objects.items.len + 2);
|
||||
|
|
@ -4935,11 +4952,12 @@ fn updateSymtabSize(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn writeSyntheticSections(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
if (self.interp_section_index) |shndx| {
|
||||
var buffer: [256]u8 = undefined;
|
||||
const interp = self.base.options.target.dynamic_linker.get().?;
|
||||
const interp = target.dynamic_linker.get().?;
|
||||
@memcpy(buffer[0..interp.len], interp);
|
||||
buffer[interp.len] = 0;
|
||||
const contents = buffer[0 .. interp.len + 1];
|
||||
|
|
@ -5065,7 +5083,7 @@ fn writeSyntheticSections(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn writeSyntheticSectionsObject(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
for (self.output_rela_sections.values()) |sec| {
|
||||
if (sec.atom_list.items.len == 0) continue;
|
||||
|
|
@ -5135,7 +5153,7 @@ fn writeSyntheticSectionsObject(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn writeComdatGroups(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
for (self.comdat_group_sections.items) |cgs| {
|
||||
const shdr = self.shdrs.items[cgs.shndx];
|
||||
const sh_size = math.cast(usize, shdr.sh_size) orelse return error.Overflow;
|
||||
|
|
@ -5160,7 +5178,8 @@ fn writeShStrtab(self: *Elf) !void {
|
|||
}
|
||||
|
||||
fn writeSymtab(self: *Elf) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const symtab_shdr = self.shdrs.items[self.symtab_section_index.?];
|
||||
const strtab_shdr = self.shdrs.items[self.strtab_section_index.?];
|
||||
const sym_size: u64 = switch (self.ptr_width) {
|
||||
|
|
@ -5220,7 +5239,7 @@ fn writeSymtab(self: *Elf) !void {
|
|||
self.plt_got.writeSymtab(self);
|
||||
}
|
||||
|
||||
const foreign_endian = self.base.options.target.cpu.arch.endian() != builtin.cpu.arch.endian();
|
||||
const foreign_endian = target.cpu.arch.endian() != builtin.cpu.arch.endian();
|
||||
switch (self.ptr_width) {
|
||||
.p32 => {
|
||||
const buf = try gpa.alloc(elf.Elf32_Sym, self.symtab.items.len);
|
||||
|
|
@ -5299,7 +5318,8 @@ fn ptrWidthBytes(self: Elf) u8 {
|
|||
/// Does not necessarily match `ptrWidthBytes` for example can be 2 bytes
|
||||
/// in a 32-bit ELF file.
|
||||
pub fn archPtrWidthBytes(self: Elf) u8 {
|
||||
return @as(u8, @intCast(@divExact(self.base.options.target.ptrBitWidth(), 8)));
|
||||
const target = self.base.comp.root_mod.resolved_target.result;
|
||||
return @intCast(@divExact(target.ptrBitWidth(), 8));
|
||||
}
|
||||
|
||||
fn phdrTo32(phdr: elf.Elf64_Phdr) elf.Elf32_Phdr {
|
||||
|
|
@ -5694,7 +5714,7 @@ pub const AddSectionOpts = struct {
|
|||
};
|
||||
|
||||
pub fn addSection(self: *Elf, opts: AddSectionOpts) !u16 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const index = @as(u16, @intCast(self.shdrs.items.len));
|
||||
const shdr = try self.shdrs.addOne(gpa);
|
||||
shdr.* = .{
|
||||
|
|
@ -5887,7 +5907,7 @@ const GetOrPutGlobalResult = struct {
|
|||
};
|
||||
|
||||
pub fn getOrPutGlobal(self: *Elf, name: []const u8) !GetOrPutGlobalResult {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const name_off = try self.strings.insert(gpa, name);
|
||||
const gop = try self.resolver.getOrPut(gpa, name_off);
|
||||
if (!gop.found_existing) {
|
||||
|
|
@ -5923,7 +5943,7 @@ const GetOrCreateComdatGroupOwnerResult = struct {
|
|||
};
|
||||
|
||||
pub fn getOrCreateComdatGroupOwner(self: *Elf, name: [:0]const u8) !GetOrCreateComdatGroupOwnerResult {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const off = try self.strings.insert(gpa, name);
|
||||
const gop = try self.comdat_groups_table.getOrPut(gpa, off);
|
||||
if (!gop.found_existing) {
|
||||
|
|
@ -6039,7 +6059,7 @@ pub fn insertDynString(self: *Elf, name: []const u8) error{OutOfMemory}!u32 {
|
|||
}
|
||||
|
||||
fn reportUndefinedSymbols(self: *Elf, undefs: anytype) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const max_notes = 4;
|
||||
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, undefs.count());
|
||||
|
|
|
|||
|
|
@ -143,14 +143,23 @@ tlv_table: TlvSymbolTable = .{},
|
|||
/// Hot-code swapping state.
|
||||
hot_state: if (is_hot_update_compatible) HotUpdateState else struct {} = .{},
|
||||
|
||||
pub fn openPath(allocator: Allocator, options: link.Options) !*MachO {
|
||||
assert(options.target.ofmt == .macho);
|
||||
darwin_sdk_layout: ?SdkLayout,
|
||||
|
||||
if (options.emit == null) {
|
||||
return createEmpty(allocator, options);
|
||||
}
|
||||
/// The filesystem layout of darwin SDK elements.
|
||||
pub const SdkLayout = enum {
|
||||
/// macOS SDK layout: TOP { /usr/include, /usr/lib, /System/Library/Frameworks }.
|
||||
sdk,
|
||||
/// Shipped libc layout: TOP { /lib/libc/include, /lib/libc/darwin, <NONE> }.
|
||||
vendored,
|
||||
};
|
||||
|
||||
const emit = options.emit.?;
|
||||
pub fn open(arena: Allocator, options: link.File.OpenOptions) !*MachO {
|
||||
if (build_options.only_c) unreachable;
|
||||
const target = options.comp.root_mod.resolved_target.result;
|
||||
assert(target.ofmt == .macho);
|
||||
|
||||
const gpa = options.comp.gpa;
|
||||
const emit = options.emit;
|
||||
const mode: Mode = mode: {
|
||||
if (options.use_llvm or options.module == null or options.cache_mode == .whole)
|
||||
break :mode .zld;
|
||||
|
|
@ -160,17 +169,16 @@ pub fn openPath(allocator: Allocator, options: link.Options) !*MachO {
|
|||
if (options.module == null) {
|
||||
// No point in opening a file, we would not write anything to it.
|
||||
// Initialize with empty.
|
||||
return createEmpty(allocator, options);
|
||||
return createEmpty(arena, options);
|
||||
}
|
||||
// Open a temporary object file, not the final output file because we
|
||||
// want to link with LLD.
|
||||
break :blk try std.fmt.allocPrint(allocator, "{s}{s}", .{
|
||||
emit.sub_path, options.target.ofmt.fileExt(options.target.cpu.arch),
|
||||
break :blk try std.fmt.allocPrint(arena, "{s}{s}", .{
|
||||
emit.sub_path, target.ofmt.fileExt(target.cpu.arch),
|
||||
});
|
||||
} else emit.sub_path;
|
||||
errdefer if (mode == .zld) allocator.free(sub_path);
|
||||
|
||||
const self = try createEmpty(allocator, options);
|
||||
const self = try createEmpty(arena, options);
|
||||
errdefer self.base.destroy();
|
||||
|
||||
if (mode == .zld) {
|
||||
|
|
@ -186,7 +194,6 @@ pub fn openPath(allocator: Allocator, options: link.Options) !*MachO {
|
|||
.read = true,
|
||||
.mode = link.determineMode(options),
|
||||
});
|
||||
errdefer file.close();
|
||||
self.base.file = file;
|
||||
|
||||
if (!options.strip and options.module != null) {
|
||||
|
|
@ -194,11 +201,10 @@ pub fn openPath(allocator: Allocator, options: link.Options) !*MachO {
|
|||
log.debug("creating {s}.dSYM bundle", .{sub_path});
|
||||
|
||||
const d_sym_path = try std.fmt.allocPrint(
|
||||
allocator,
|
||||
arena,
|
||||
"{s}.dSYM" ++ fs.path.sep_str ++ "Contents" ++ fs.path.sep_str ++ "Resources" ++ fs.path.sep_str ++ "DWARF",
|
||||
.{sub_path},
|
||||
);
|
||||
defer allocator.free(d_sym_path);
|
||||
|
||||
var d_sym_bundle = try emit.directory.handle.makeOpenPath(d_sym_path, .{});
|
||||
defer d_sym_bundle.close();
|
||||
|
|
@ -209,21 +215,21 @@ pub fn openPath(allocator: Allocator, options: link.Options) !*MachO {
|
|||
});
|
||||
|
||||
self.d_sym = .{
|
||||
.allocator = allocator,
|
||||
.dwarf = link.File.Dwarf.init(allocator, &self.base, .dwarf32),
|
||||
.allocator = gpa,
|
||||
.dwarf = link.File.Dwarf.init(gpa, &self.base, .dwarf32),
|
||||
.file = d_sym_file,
|
||||
};
|
||||
}
|
||||
|
||||
// Index 0 is always a null symbol.
|
||||
try self.locals.append(allocator, .{
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = 0,
|
||||
.n_type = 0,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
try self.strtab.buffer.append(allocator, 0);
|
||||
try self.strtab.buffer.append(gpa, 0);
|
||||
|
||||
try self.populateMissingMetadata();
|
||||
|
||||
|
|
@ -234,15 +240,14 @@ pub fn openPath(allocator: Allocator, options: link.Options) !*MachO {
|
|||
return self;
|
||||
}
|
||||
|
||||
pub fn createEmpty(gpa: Allocator, options: link.Options) !*MachO {
|
||||
const self = try gpa.create(MachO);
|
||||
errdefer gpa.destroy(self);
|
||||
pub fn createEmpty(arena: Allocator, options: link.File.OpenOptions) !*MachO {
|
||||
const self = try arena.create(MachO);
|
||||
|
||||
self.* = .{
|
||||
.base = .{
|
||||
.tag = .macho,
|
||||
.options = options,
|
||||
.allocator = gpa,
|
||||
.comp = options.comp,
|
||||
.emit = options.emit,
|
||||
.file = null,
|
||||
},
|
||||
.mode = if (options.use_llvm or options.module == null or options.cache_mode == .whole)
|
||||
|
|
@ -252,7 +257,7 @@ pub fn createEmpty(gpa: Allocator, options: link.Options) !*MachO {
|
|||
};
|
||||
|
||||
if (options.use_llvm and options.module != null) {
|
||||
self.llvm_object = try LlvmObject.create(gpa, options);
|
||||
self.llvm_object = try LlvmObject.create(arena, options);
|
||||
}
|
||||
|
||||
log.debug("selected linker mode '{s}'", .{@tagName(self.mode)});
|
||||
|
|
@ -261,20 +266,15 @@ pub fn createEmpty(gpa: Allocator, options: link.Options) !*MachO {
|
|||
}
|
||||
|
||||
pub fn flush(self: *MachO, comp: *Compilation, prog_node: *std.Progress.Node) link.File.FlushError!void {
|
||||
if (self.base.options.emit == null) {
|
||||
if (self.llvm_object) |llvm_object| {
|
||||
try llvm_object.flushModule(comp, prog_node);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
if (self.base.options.output_mode == .Lib and self.base.options.link_mode == .Static) {
|
||||
if (build_options.have_llvm) {
|
||||
return self.base.linkAsArchive(comp, prog_node);
|
||||
} else {
|
||||
try self.misc_errors.ensureUnusedCapacity(self.base.allocator, 1);
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, 1);
|
||||
self.misc_errors.appendAssumeCapacity(.{
|
||||
.msg = try self.base.allocator.dupe(u8, "TODO: non-LLVM archiver for MachO object files"),
|
||||
.msg = try gpa.dupe(u8, "TODO: non-LLVM archiver for MachO object files"),
|
||||
});
|
||||
return error.FlushFailure;
|
||||
}
|
||||
|
|
@ -294,7 +294,8 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
|||
return try llvm_object.flushModule(comp, prog_node);
|
||||
}
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
|
||||
const gpa = self.base.comp.gpa;
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = arena_allocator.allocator();
|
||||
|
||||
|
|
@ -391,7 +392,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
|||
|
||||
if (cache_miss) {
|
||||
for (self.dylibs.items) |*dylib| {
|
||||
dylib.deinit(self.base.allocator);
|
||||
dylib.deinit(gpa);
|
||||
}
|
||||
self.dylibs.clearRetainingCapacity();
|
||||
self.dylibs_map.clearRetainingCapacity();
|
||||
|
|
@ -403,7 +404,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
|||
const in_file = try std.fs.cwd().openFile(path, .{});
|
||||
defer in_file.close();
|
||||
|
||||
var parse_ctx = ParseErrorCtx.init(self.base.allocator);
|
||||
var parse_ctx = ParseErrorCtx.init(gpa);
|
||||
defer parse_ctx.deinit();
|
||||
|
||||
self.parseLibrary(
|
||||
|
|
@ -470,7 +471,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
|||
const section = self.sections.get(sym.n_sect - 1).header;
|
||||
const file_offset = section.offset + sym.n_value - section.addr;
|
||||
|
||||
var code = std.ArrayList(u8).init(self.base.allocator);
|
||||
var code = std.ArrayList(u8).init(gpa);
|
||||
defer code.deinit();
|
||||
try code.resize(math.cast(usize, atom.size) orelse return error.Overflow);
|
||||
|
||||
|
|
@ -518,12 +519,12 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
|||
var codesig = CodeSignature.init(getPageSize(self.base.options.target.cpu.arch));
|
||||
codesig.code_directory.ident = self.base.options.emit.?.sub_path;
|
||||
if (self.base.options.entitlements) |path| {
|
||||
try codesig.addEntitlements(self.base.allocator, path);
|
||||
try codesig.addEntitlements(gpa, path);
|
||||
}
|
||||
try self.writeCodeSignaturePadding(&codesig);
|
||||
break :blk codesig;
|
||||
} else null;
|
||||
defer if (codesig) |*csig| csig.deinit(self.base.allocator);
|
||||
defer if (codesig) |*csig| csig.deinit(gpa);
|
||||
|
||||
// Write load commands
|
||||
var lc_buffer = std.ArrayList(u8).init(arena);
|
||||
|
|
@ -555,12 +556,12 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
|||
});
|
||||
},
|
||||
.Lib => if (self.base.options.link_mode == .Dynamic) {
|
||||
try load_commands.writeDylibIdLC(self.base.allocator, &self.base.options, lc_writer);
|
||||
try load_commands.writeDylibIdLC(gpa, &self.base.options, lc_writer);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
try load_commands.writeRpathLCs(self.base.allocator, &self.base.options, lc_writer);
|
||||
try load_commands.writeRpathLCs(gpa, &self.base.options, lc_writer);
|
||||
try lc_writer.writeStruct(macho.source_version_command{
|
||||
.version = 0,
|
||||
});
|
||||
|
|
@ -644,7 +645,8 @@ pub fn resolveLibSystem(
|
|||
search_dirs: []const []const u8,
|
||||
out_libs: anytype,
|
||||
) !void {
|
||||
var tmp_arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
|
||||
const gpa = self.base.comp.gpa;
|
||||
var tmp_arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
defer tmp_arena_allocator.deinit();
|
||||
const tmp_arena = tmp_arena_allocator.allocator();
|
||||
|
||||
|
|
@ -775,7 +777,7 @@ fn parseObject(
|
|||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mtime: u64 = mtime: {
|
||||
const stat = file.stat() catch break :mtime 0;
|
||||
break :mtime @as(u64, @intCast(@divFloor(stat.mtime, 1_000_000_000)));
|
||||
|
|
@ -868,7 +870,7 @@ pub fn parseFatLibrary(
|
|||
cpu_arch: std.Target.Cpu.Arch,
|
||||
ctx: *ParseErrorCtx,
|
||||
) ParseError!u64 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const fat_archs = try fat.parseArchs(gpa, file);
|
||||
defer gpa.free(fat_archs);
|
||||
|
|
@ -892,7 +894,7 @@ fn parseArchive(
|
|||
must_link: bool,
|
||||
ctx: *ParseErrorCtx,
|
||||
) ParseError!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
// We take ownership of the file so that we can store it for the duration of symbol resolution.
|
||||
// TODO we shouldn't need to do that and could pre-parse the archive like we do for zld/ELF?
|
||||
|
|
@ -973,7 +975,7 @@ fn parseDylib(
|
|||
dylib_options: DylibOpts,
|
||||
ctx: *ParseErrorCtx,
|
||||
) ParseError!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const file_stat = try file.stat();
|
||||
const file_size = math.cast(usize, file_stat.size - offset) orelse return error.Overflow;
|
||||
|
||||
|
|
@ -1019,7 +1021,7 @@ fn parseLibStub(
|
|||
dylib_options: DylibOpts,
|
||||
ctx: *ParseErrorCtx,
|
||||
) ParseError!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var lib_stub = try LibStub.loadFromFile(gpa, file);
|
||||
defer lib_stub.deinit();
|
||||
|
||||
|
|
@ -1072,7 +1074,7 @@ fn addDylib(self: *MachO, dylib: Dylib, dylib_options: DylibOpts, ctx: *ParseErr
|
|||
}
|
||||
}
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const gop = try self.dylibs_map.getOrPut(gpa, dylib.id.?.name);
|
||||
if (gop.found_existing) return error.DylibAlreadyExists;
|
||||
|
||||
|
|
@ -1098,7 +1100,7 @@ pub fn parseDependentLibs(self: *MachO, dependent_libs: anytype) !void {
|
|||
// 2) afterwards, we parse dependents of the included dylibs
|
||||
// TODO this should not be performed if the user specifies `-flat_namespace` flag.
|
||||
// See ld64 manpages.
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
while (dependent_libs.readItem()) |dep_id| {
|
||||
defer dep_id.id.deinit(gpa);
|
||||
|
|
@ -1162,7 +1164,8 @@ pub fn writeAtom(self: *MachO, atom_index: Atom.Index, code: []u8) !void {
|
|||
log.debug("writing atom for symbol {s} at file offset 0x{x}", .{ atom.getName(self), file_offset });
|
||||
|
||||
// Gather relocs which can be resolved.
|
||||
var relocs = std.ArrayList(*Relocation).init(self.base.allocator);
|
||||
const gpa = self.base.comp.gpa;
|
||||
var relocs = std.ArrayList(*Relocation).init(gpa);
|
||||
defer relocs.deinit();
|
||||
|
||||
if (self.relocs.getPtr(atom_index)) |rels| {
|
||||
|
|
@ -1237,7 +1240,7 @@ fn writeOffsetTableEntry(self: *MachO, index: usize) !void {
|
|||
fn writeStubHelperPreamble(self: *MachO) !void {
|
||||
if (self.stub_helper_preamble_allocated) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const cpu_arch = self.base.options.target.cpu.arch;
|
||||
const size = stubs.stubHelperPreambleSize(cpu_arch);
|
||||
|
||||
|
|
@ -1290,7 +1293,7 @@ fn writeStubTableEntry(self: *MachO, index: usize) !void {
|
|||
self.stub_table_count_dirty = false;
|
||||
}
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const stubs_header = self.sections.items(.header)[stubs_sect_id];
|
||||
const stub_helper_header = self.sections.items(.header)[stub_helper_sect_id];
|
||||
|
|
@ -1469,7 +1472,7 @@ const CreateAtomOpts = struct {
|
|||
};
|
||||
|
||||
pub fn createAtom(self: *MachO, sym_index: u32, opts: CreateAtomOpts) !Atom.Index {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const index = @as(Atom.Index, @intCast(self.atoms.items.len));
|
||||
const atom = try self.atoms.addOne(gpa);
|
||||
atom.* = .{};
|
||||
|
|
@ -1481,7 +1484,7 @@ pub fn createAtom(self: *MachO, sym_index: u32, opts: CreateAtomOpts) !Atom.Inde
|
|||
}
|
||||
|
||||
pub fn createTentativeDefAtoms(self: *MachO) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
for (self.globals.items) |global| {
|
||||
const sym = self.getSymbolPtr(global);
|
||||
|
|
@ -1536,7 +1539,8 @@ pub fn createDyldPrivateAtom(self: *MachO) !void {
|
|||
.size = @sizeOf(u64),
|
||||
.alignment = .@"8",
|
||||
});
|
||||
try self.atom_by_index_table.putNoClobber(self.base.allocator, sym_index, atom_index);
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.atom_by_index_table.putNoClobber(gpa, sym_index, atom_index);
|
||||
|
||||
if (self.data_section_index == null) {
|
||||
self.data_section_index = try self.initSection("__DATA", "__data", .{});
|
||||
|
|
@ -1560,7 +1564,7 @@ pub fn createDyldPrivateAtom(self: *MachO) !void {
|
|||
}
|
||||
|
||||
fn createThreadLocalDescriptorAtom(self: *MachO, sym_name: []const u8, target: SymbolWithLoc) !Atom.Index {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const size = 3 * @sizeOf(u64);
|
||||
const required_alignment: Alignment = .@"1";
|
||||
const sym_index = try self.allocateSymbol();
|
||||
|
|
@ -1595,7 +1599,7 @@ fn createThreadLocalDescriptorAtom(self: *MachO, sym_name: []const u8, target: S
|
|||
pub fn createMhExecuteHeaderSymbol(self: *MachO) !void {
|
||||
if (self.base.options.output_mode != .Exe) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index };
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
|
|
@ -1622,7 +1626,7 @@ pub fn createDsoHandleSymbol(self: *MachO) !void {
|
|||
const global = self.getGlobalPtr("___dso_handle") orelse return;
|
||||
if (!self.getSymbol(global.*).undf()) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index };
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
|
|
@ -1686,7 +1690,7 @@ pub fn resolveSymbols(self: *MachO) !void {
|
|||
}
|
||||
|
||||
fn resolveGlobalSymbol(self: *MachO, current: SymbolWithLoc) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const sym = self.getSymbol(current);
|
||||
const sym_name = self.getSymbolName(current);
|
||||
|
||||
|
|
@ -1800,7 +1804,7 @@ fn resolveSymbolsInObject(self: *MachO, object_id: u32) !void {
|
|||
fn resolveSymbolsInArchives(self: *MachO) !void {
|
||||
if (self.archives.items.len == 0) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var next_sym: usize = 0;
|
||||
loop: while (next_sym < self.unresolved.count()) {
|
||||
const global = self.globals.items[self.unresolved.keys()[next_sym]];
|
||||
|
|
@ -1829,7 +1833,7 @@ fn resolveSymbolsInArchives(self: *MachO) !void {
|
|||
fn resolveSymbolsInDylibs(self: *MachO) !void {
|
||||
if (self.dylibs.items.len == 0) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var next_sym: usize = 0;
|
||||
loop: while (next_sym < self.unresolved.count()) {
|
||||
const global_index = self.unresolved.keys()[next_sym];
|
||||
|
|
@ -1899,6 +1903,7 @@ fn resolveSymbolsAtLoading(self: *MachO) !void {
|
|||
}
|
||||
|
||||
fn resolveBoundarySymbols(self: *MachO) !void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
var next_sym: usize = 0;
|
||||
while (next_sym < self.unresolved.count()) {
|
||||
const global_index = self.unresolved.keys()[next_sym];
|
||||
|
|
@ -1909,7 +1914,7 @@ fn resolveBoundarySymbols(self: *MachO) !void {
|
|||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index };
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
sym.* = .{
|
||||
.n_strx = try self.strtab.insert(self.base.allocator, self.getSymbolName(global.*)),
|
||||
.n_strx = try self.strtab.insert(gpa, self.getSymbolName(global.*)),
|
||||
.n_type = macho.N_SECT | macho.N_EXT,
|
||||
.n_sect = 0,
|
||||
.n_desc = N_BOUNDARY,
|
||||
|
|
@ -1929,9 +1934,9 @@ fn resolveBoundarySymbols(self: *MachO) !void {
|
|||
}
|
||||
|
||||
pub fn deinit(self: *MachO) void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
if (self.llvm_object) |llvm_object| llvm_object.destroy(gpa);
|
||||
if (self.llvm_object) |llvm_object| llvm_object.deinit();
|
||||
|
||||
if (self.d_sym) |*d_sym| {
|
||||
d_sym.deinit();
|
||||
|
|
@ -2032,7 +2037,7 @@ pub fn deinit(self: *MachO) void {
|
|||
}
|
||||
|
||||
fn freeAtom(self: *MachO, atom_index: Atom.Index) void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
log.debug("freeAtom {d}", .{atom_index});
|
||||
|
||||
// Remove any relocs and base relocs associated with this Atom
|
||||
|
|
@ -2124,7 +2129,8 @@ fn growAtom(self: *MachO, atom_index: Atom.Index, new_atom_size: u64, alignment:
|
|||
}
|
||||
|
||||
pub fn allocateSymbol(self: *MachO) !u32 {
|
||||
try self.locals.ensureUnusedCapacity(self.base.allocator, 1);
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.locals.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
const index = blk: {
|
||||
if (self.locals_free_list.popOrNull()) |index| {
|
||||
|
|
@ -2150,7 +2156,8 @@ pub fn allocateSymbol(self: *MachO) !u32 {
|
|||
}
|
||||
|
||||
fn allocateGlobal(self: *MachO) !u32 {
|
||||
try self.globals.ensureUnusedCapacity(self.base.allocator, 1);
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.globals.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
const index = blk: {
|
||||
if (self.globals_free_list.popOrNull()) |index| {
|
||||
|
|
@ -2171,7 +2178,8 @@ fn allocateGlobal(self: *MachO) !u32 {
|
|||
|
||||
pub fn addGotEntry(self: *MachO, target: SymbolWithLoc) !void {
|
||||
if (self.got_table.lookup.contains(target)) return;
|
||||
const got_index = try self.got_table.allocateEntry(self.base.allocator, target);
|
||||
const gpa = self.base.comp.gpa;
|
||||
const got_index = try self.got_table.allocateEntry(gpa, target);
|
||||
if (self.got_section_index == null) {
|
||||
self.got_section_index = try self.initSection("__DATA_CONST", "__got", .{
|
||||
.flags = macho.S_NON_LAZY_SYMBOL_POINTERS,
|
||||
|
|
@ -2186,7 +2194,8 @@ pub fn addGotEntry(self: *MachO, target: SymbolWithLoc) !void {
|
|||
|
||||
pub fn addStubEntry(self: *MachO, target: SymbolWithLoc) !void {
|
||||
if (self.stub_table.lookup.contains(target)) return;
|
||||
const stub_index = try self.stub_table.allocateEntry(self.base.allocator, target);
|
||||
const gpa = self.base.comp.gpa;
|
||||
const stub_index = try self.stub_table.allocateEntry(gpa, target);
|
||||
if (self.stubs_section_index == null) {
|
||||
self.stubs_section_index = try self.initSection("__TEXT", "__stubs", .{
|
||||
.flags = macho.S_SYMBOL_STUBS |
|
||||
|
|
@ -2212,7 +2221,8 @@ pub fn addStubEntry(self: *MachO, target: SymbolWithLoc) !void {
|
|||
|
||||
pub fn addTlvPtrEntry(self: *MachO, target: SymbolWithLoc) !void {
|
||||
if (self.tlv_ptr_table.lookup.contains(target)) return;
|
||||
_ = try self.tlv_ptr_table.allocateEntry(self.base.allocator, target);
|
||||
const gpa = self.base.comp.gpa;
|
||||
_ = try self.tlv_ptr_table.allocateEntry(gpa, target);
|
||||
if (self.tlv_ptr_section_index == null) {
|
||||
self.tlv_ptr_section_index = try self.initSection("__DATA", "__thread_ptrs", .{
|
||||
.flags = macho.S_THREAD_LOCAL_VARIABLE_POINTERS,
|
||||
|
|
@ -2236,7 +2246,8 @@ pub fn updateFunc(self: *MachO, mod: *Module, func_index: InternPool.Index, air:
|
|||
self.freeUnnamedConsts(decl_index);
|
||||
Atom.freeRelocations(self, atom_index);
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
|
||||
const gpa = self.base.comp.gpa;
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
var decl_state = if (self.d_sym) |*d_sym|
|
||||
|
|
@ -2279,7 +2290,7 @@ pub fn updateFunc(self: *MachO, mod: *Module, func_index: InternPool.Index, air:
|
|||
}
|
||||
|
||||
pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl_index: InternPool.DeclIndex) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const gop = try self.unnamed_const_atoms.getOrPut(gpa, decl_index);
|
||||
if (!gop.found_existing) {
|
||||
|
|
@ -2318,7 +2329,7 @@ fn lowerConst(
|
|||
sect_id: u8,
|
||||
src_loc: Module.SrcLoc,
|
||||
) !LowerConstResult {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
|
@ -2366,6 +2377,7 @@ pub fn updateDecl(self: *MachO, mod: *Module, decl_index: InternPool.DeclIndex)
|
|||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
|
||||
if (decl.val.getExternFunc(mod)) |_| {
|
||||
|
|
@ -2375,8 +2387,8 @@ pub fn updateDecl(self: *MachO, mod: *Module, decl_index: InternPool.DeclIndex)
|
|||
if (decl.isExtern(mod)) {
|
||||
// TODO make this part of getGlobalSymbol
|
||||
const name = mod.intern_pool.stringToSlice(decl.name);
|
||||
const sym_name = try std.fmt.allocPrint(self.base.allocator, "_{s}", .{name});
|
||||
defer self.base.allocator.free(sym_name);
|
||||
const sym_name = try std.fmt.allocPrint(gpa, "_{s}", .{name});
|
||||
defer gpa.free(sym_name);
|
||||
_ = try self.addUndefined(sym_name, .{ .add_got = true });
|
||||
return;
|
||||
}
|
||||
|
|
@ -2391,7 +2403,7 @@ pub fn updateDecl(self: *MachO, mod: *Module, decl_index: InternPool.DeclIndex)
|
|||
const sym_index = self.getAtom(atom_index).getSymbolIndex().?;
|
||||
Atom.freeRelocations(self, atom_index);
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
var decl_state: ?Dwarf.DeclState = if (self.d_sym) |*d_sym|
|
||||
|
|
@ -2449,7 +2461,7 @@ fn updateLazySymbolAtom(
|
|||
atom_index: Atom.Index,
|
||||
section_index: u8,
|
||||
) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
|
||||
var required_alignment: Alignment = .none;
|
||||
|
|
@ -2515,7 +2527,8 @@ fn updateLazySymbolAtom(
|
|||
|
||||
pub fn getOrCreateAtomForLazySymbol(self: *MachO, sym: File.LazySymbol) !Atom.Index {
|
||||
const mod = self.base.options.module.?;
|
||||
const gop = try self.lazy_syms.getOrPut(self.base.allocator, sym.getDecl(mod));
|
||||
const gpa = self.base.comp.gpa;
|
||||
const gop = try self.lazy_syms.getOrPut(gpa, sym.getDecl(mod));
|
||||
errdefer _ = if (!gop.found_existing) self.lazy_syms.pop();
|
||||
if (!gop.found_existing) gop.value_ptr.* = .{};
|
||||
const metadata: struct { atom: *Atom.Index, state: *LazySymbolMetadata.State } = switch (sym.kind) {
|
||||
|
|
@ -2529,7 +2542,7 @@ pub fn getOrCreateAtomForLazySymbol(self: *MachO, sym: File.LazySymbol) !Atom.In
|
|||
.unused => {
|
||||
const sym_index = try self.allocateSymbol();
|
||||
metadata.atom.* = try self.createAtom(sym_index, .{});
|
||||
try self.atom_by_index_table.putNoClobber(self.base.allocator, sym_index, metadata.atom.*);
|
||||
try self.atom_by_index_table.putNoClobber(gpa, sym_index, metadata.atom.*);
|
||||
},
|
||||
.pending_flush => return metadata.atom.*,
|
||||
.flushed => {},
|
||||
|
|
@ -2556,7 +2569,7 @@ fn updateThreadlocalVariable(self: *MachO, module: *Module, decl_index: InternPo
|
|||
const init_sym_index = init_atom.getSymbolIndex().?;
|
||||
Atom.freeRelocations(self, init_atom_index);
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
defer code_buffer.deinit();
|
||||
|
|
@ -2640,11 +2653,12 @@ fn updateThreadlocalVariable(self: *MachO, module: *Module, decl_index: InternPo
|
|||
}
|
||||
|
||||
pub fn getOrCreateAtomForDecl(self: *MachO, decl_index: InternPool.DeclIndex) !Atom.Index {
|
||||
const gop = try self.decls.getOrPut(self.base.allocator, decl_index);
|
||||
const gpa = self.base.comp.gpa;
|
||||
const gop = try self.decls.getOrPut(gpa, decl_index);
|
||||
if (!gop.found_existing) {
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom_index = try self.createAtom(sym_index, .{});
|
||||
try self.atom_by_index_table.putNoClobber(self.base.allocator, sym_index, atom_index);
|
||||
try self.atom_by_index_table.putNoClobber(gpa, sym_index, atom_index);
|
||||
gop.value_ptr.* = .{
|
||||
.atom = atom_index,
|
||||
.section = self.getDeclOutputSection(decl_index),
|
||||
|
|
@ -2694,7 +2708,7 @@ fn getDeclOutputSection(self: *MachO, decl_index: InternPool.DeclIndex) u8 {
|
|||
}
|
||||
|
||||
fn updateDeclCode(self: *MachO, decl_index: InternPool.DeclIndex, code: []u8) !u64 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
|
||||
|
|
@ -2787,7 +2801,7 @@ pub fn updateExports(
|
|||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const metadata = switch (exported) {
|
||||
.decl_index => |decl_index| blk: {
|
||||
|
|
@ -2912,7 +2926,7 @@ pub fn deleteDeclExport(
|
|||
if (self.llvm_object) |_| return;
|
||||
const metadata = self.decls.getPtr(decl_index) orelse return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const exp_name = try std.fmt.allocPrint(gpa, "_{s}", .{mod.intern_pool.stringToSlice(name)});
|
||||
defer gpa.free(exp_name);
|
||||
|
|
@ -2941,7 +2955,7 @@ pub fn deleteDeclExport(
|
|||
}
|
||||
|
||||
fn freeUnnamedConsts(self: *MachO, decl_index: InternPool.DeclIndex) void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const unnamed_consts = self.unnamed_const_atoms.getPtr(decl_index) orelse return;
|
||||
for (unnamed_consts.items) |atom| {
|
||||
self.freeAtom(atom);
|
||||
|
|
@ -2951,6 +2965,7 @@ fn freeUnnamedConsts(self: *MachO, decl_index: InternPool.DeclIndex) void {
|
|||
|
||||
pub fn freeDecl(self: *MachO, decl_index: InternPool.DeclIndex) void {
|
||||
if (self.llvm_object) |llvm_object| return llvm_object.freeDecl(decl_index);
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
|
||||
|
|
@ -2960,7 +2975,7 @@ pub fn freeDecl(self: *MachO, decl_index: InternPool.DeclIndex) void {
|
|||
var kv = const_kv;
|
||||
self.freeAtom(kv.value.atom);
|
||||
self.freeUnnamedConsts(decl_index);
|
||||
kv.value.exports.deinit(self.base.allocator);
|
||||
kv.value.exports.deinit(gpa);
|
||||
}
|
||||
|
||||
if (self.d_sym) |*d_sym| {
|
||||
|
|
@ -2993,7 +3008,7 @@ pub fn lowerAnonDecl(
|
|||
explicit_alignment: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
) !codegen.Result {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.options.module.?;
|
||||
const ty = Type.fromInterned(mod.intern_pool.typeOf(decl_val));
|
||||
const decl_alignment = switch (explicit_alignment) {
|
||||
|
|
@ -3060,7 +3075,7 @@ pub fn getAnonDeclVAddr(self: *MachO, decl_val: InternPool.Index, reloc_info: li
|
|||
fn populateMissingMetadata(self: *MachO) !void {
|
||||
assert(self.mode == .incremental);
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const cpu_arch = self.base.options.target.cpu.arch;
|
||||
const pagezero_vmsize = self.calcPagezeroSize();
|
||||
|
||||
|
|
@ -3228,7 +3243,8 @@ const InitSectionOpts = struct {
|
|||
pub fn initSection(self: *MachO, segname: []const u8, sectname: []const u8, opts: InitSectionOpts) !u8 {
|
||||
log.debug("creating section '{s},{s}'", .{ segname, sectname });
|
||||
const index = @as(u8, @intCast(self.sections.slice().len));
|
||||
try self.sections.append(self.base.allocator, .{
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.sections.append(gpa, .{
|
||||
.segment_index = undefined, // Segments will be created automatically later down the pipeline
|
||||
.header = .{
|
||||
.sectname = makeStaticString(sectname),
|
||||
|
|
@ -3248,7 +3264,7 @@ fn allocateSection(self: *MachO, segname: []const u8, sectname: []const u8, opts
|
|||
flags: u32 = macho.S_REGULAR,
|
||||
reserved2: u32 = 0,
|
||||
}) !u8 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const page_size = getPageSize(self.base.options.target.cpu.arch);
|
||||
// In incremental context, we create one section per segment pairing. This way,
|
||||
// we can move the segment in raw file as we please.
|
||||
|
|
@ -3521,7 +3537,7 @@ fn allocateAtom(self: *MachO, atom_index: Atom.Index, new_atom_size: u64, alignm
|
|||
|
||||
pub fn getGlobalSymbol(self: *MachO, name: []const u8, lib_name: ?[]const u8) !u32 {
|
||||
_ = lib_name;
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const sym_name = try std.fmt.allocPrint(gpa, "_{s}", .{name});
|
||||
defer gpa.free(sym_name);
|
||||
return self.addUndefined(sym_name, .{ .add_stub = true });
|
||||
|
|
@ -3582,7 +3598,7 @@ pub fn writeLinkeditSegmentData(self: *MachO) !void {
|
|||
}
|
||||
|
||||
fn collectRebaseDataFromTableSection(self: *MachO, sect_id: u8, rebase: *Rebase, table: anytype) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const header = self.sections.items(.header)[sect_id];
|
||||
const segment_index = self.sections.items(.segment_index)[sect_id];
|
||||
const segment = self.segments.items[segment_index];
|
||||
|
|
@ -3605,7 +3621,7 @@ fn collectRebaseDataFromTableSection(self: *MachO, sect_id: u8, rebase: *Rebase,
|
|||
}
|
||||
|
||||
fn collectRebaseData(self: *MachO, rebase: *Rebase) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const slice = self.sections.slice();
|
||||
|
||||
for (self.rebases.keys(), 0..) |atom_index, i| {
|
||||
|
|
@ -3715,7 +3731,7 @@ fn collectRebaseData(self: *MachO, rebase: *Rebase) !void {
|
|||
}
|
||||
|
||||
fn collectBindDataFromTableSection(self: *MachO, sect_id: u8, bind: anytype, table: anytype) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const header = self.sections.items(.header)[sect_id];
|
||||
const segment_index = self.sections.items(.segment_index)[sect_id];
|
||||
const segment = self.segments.items[segment_index];
|
||||
|
|
@ -3746,7 +3762,7 @@ fn collectBindDataFromTableSection(self: *MachO, sect_id: u8, bind: anytype, tab
|
|||
}
|
||||
|
||||
fn collectBindData(self: *MachO, bind: anytype, raw_bindings: anytype) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const slice = self.sections.slice();
|
||||
|
||||
for (raw_bindings.keys(), 0..) |atom_index, i| {
|
||||
|
|
@ -3885,12 +3901,13 @@ fn collectBindData(self: *MachO, bind: anytype, raw_bindings: anytype) !void {
|
|||
|
||||
fn collectLazyBindData(self: *MachO, bind: anytype) !void {
|
||||
const sect_id = self.la_symbol_ptr_section_index orelse return;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.collectBindDataFromTableSection(sect_id, bind, self.stub_table);
|
||||
try bind.finalize(self.base.allocator, self);
|
||||
try bind.finalize(gpa, self);
|
||||
}
|
||||
|
||||
fn collectExportData(self: *MachO, trie: *Trie) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
// TODO handle macho.EXPORT_SYMBOL_FLAGS_REEXPORT and macho.EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER.
|
||||
log.debug("generating export trie", .{});
|
||||
|
|
@ -3922,7 +3939,7 @@ fn writeDyldInfoData(self: *MachO) !void {
|
|||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var rebase = Rebase{};
|
||||
defer rebase.deinit(gpa);
|
||||
|
|
@ -4046,7 +4063,7 @@ fn addSymbolToFunctionStarts(self: *MachO, sym_loc: SymbolWithLoc, addresses: *s
|
|||
}
|
||||
|
||||
fn writeFunctionStarts(self: *MachO) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const seg = self.segments.items[self.header_segment_cmd_index.?];
|
||||
|
||||
// We need to sort by address first
|
||||
|
|
@ -4133,7 +4150,7 @@ fn filterDataInCode(
|
|||
}
|
||||
|
||||
pub fn writeDataInCode(self: *MachO) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var out_dice = std.ArrayList(macho.data_in_code_entry).init(gpa);
|
||||
defer out_dice.deinit();
|
||||
|
||||
|
|
@ -4211,13 +4228,14 @@ fn addLocalToSymtab(self: *MachO, sym_loc: SymbolWithLoc, locals: *std.ArrayList
|
|||
if (sym.n_desc == N_BOUNDARY) return; // boundary symbol, skip
|
||||
if (sym.ext()) return; // an export lands in its own symtab section, skip
|
||||
if (self.symbolIsTemp(sym_loc)) return; // local temp symbol, skip
|
||||
const gpa = self.base.comp.gpa;
|
||||
var out_sym = sym;
|
||||
out_sym.n_strx = try self.strtab.insert(self.base.allocator, self.getSymbolName(sym_loc));
|
||||
out_sym.n_strx = try self.strtab.insert(gpa, self.getSymbolName(sym_loc));
|
||||
try locals.append(out_sym);
|
||||
}
|
||||
|
||||
fn writeSymtab(self: *MachO) !SymtabCtx {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var locals = std.ArrayList(macho.nlist_64).init(gpa);
|
||||
defer locals.deinit();
|
||||
|
|
@ -4322,7 +4340,7 @@ fn generateSymbolStabs(
|
|||
) !void {
|
||||
log.debug("generating stabs for '{s}'", .{object.name});
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
var debug_info = object.parseDwarfInfo();
|
||||
|
||||
var lookup = DwarfInfo.AbbrevLookupTable.init(gpa);
|
||||
|
|
@ -4450,7 +4468,7 @@ fn generateSymbolStabsForSymbol(
|
|||
lookup: ?DwarfInfo.SubprogramLookupByName,
|
||||
buf: *[4]macho.nlist_64,
|
||||
) ![]const macho.nlist_64 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const object = self.objects.items[sym_loc.getFile().?];
|
||||
const sym = self.getSymbol(sym_loc);
|
||||
const sym_name = self.getSymbolName(sym_loc);
|
||||
|
|
@ -4536,7 +4554,7 @@ fn generateSymbolStabsForSymbol(
|
|||
}
|
||||
|
||||
pub fn writeStrtab(self: *MachO) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const seg = self.getLinkeditSegmentPtr();
|
||||
const offset = seg.fileoff + seg.filesize;
|
||||
assert(mem.isAlignedGeneric(u64, offset, @alignOf(u64)));
|
||||
|
|
@ -4565,7 +4583,7 @@ const SymtabCtx = struct {
|
|||
};
|
||||
|
||||
pub fn writeDysymtab(self: *MachO, ctx: SymtabCtx) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const nstubs = @as(u32, @intCast(self.stub_table.lookup.count()));
|
||||
const ngot_entries = @as(u32, @intCast(self.got_table.lookup.count()));
|
||||
const nindirectsyms = nstubs * 2 + ngot_entries;
|
||||
|
|
@ -4671,7 +4689,8 @@ pub fn writeCodeSignature(self: *MachO, comp: *const Compilation, code_sig: *Cod
|
|||
const seg = self.segments.items[seg_id];
|
||||
const offset = self.codesig_cmd.dataoff;
|
||||
|
||||
var buffer = std.ArrayList(u8).init(self.base.allocator);
|
||||
const gpa = self.base.comp.gpa;
|
||||
var buffer = std.ArrayList(u8).init(gpa);
|
||||
defer buffer.deinit();
|
||||
try buffer.ensureTotalCapacityPrecise(code_sig.size());
|
||||
try code_sig.writeAdhocSignature(comp, .{
|
||||
|
|
@ -4817,7 +4836,7 @@ pub fn ptraceDetach(self: *MachO, pid: std.os.pid_t) !void {
|
|||
}
|
||||
|
||||
pub fn addUndefined(self: *MachO, name: []const u8, flags: RelocFlags) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const gop = try self.getOrPutGlobalPtr(name);
|
||||
const global_index = self.getGlobalIndex(name).?;
|
||||
|
|
@ -4842,7 +4861,8 @@ pub fn addUndefined(self: *MachO, name: []const u8, flags: RelocFlags) !u32 {
|
|||
}
|
||||
|
||||
fn updateRelocActions(self: *MachO, global_index: u32, flags: RelocFlags) !void {
|
||||
const act_gop = try self.actions.getOrPut(self.base.allocator, global_index);
|
||||
const gpa = self.base.comp.gpa;
|
||||
const act_gop = try self.actions.getOrPut(gpa, global_index);
|
||||
if (!act_gop.found_existing) {
|
||||
act_gop.value_ptr.* = .{};
|
||||
}
|
||||
|
|
@ -5022,7 +5042,7 @@ pub fn getOrPutGlobalPtr(self: *MachO, name: []const u8) !GetOrPutGlobalPtrResul
|
|||
if (self.getGlobalPtr(name)) |ptr| {
|
||||
return GetOrPutGlobalPtrResult{ .found_existing = true, .value_ptr = ptr };
|
||||
}
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const global_index = try self.allocateGlobal();
|
||||
const global_name = try gpa.dupe(u8, name);
|
||||
_ = try self.resolver.put(gpa, global_name, global_index);
|
||||
|
|
@ -5171,6 +5191,7 @@ pub fn handleAndReportParseError(
|
|||
err: ParseError,
|
||||
ctx: *const ParseErrorCtx,
|
||||
) error{OutOfMemory}!void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
const cpu_arch = self.base.options.target.cpu.arch;
|
||||
switch (err) {
|
||||
error.DylibAlreadyExists => {},
|
||||
|
|
@ -5188,7 +5209,7 @@ pub fn handleAndReportParseError(
|
|||
},
|
||||
error.UnknownFileType => try self.reportParseError(path, "unknown file type", .{}),
|
||||
error.InvalidTarget, error.InvalidTargetFatLibrary => {
|
||||
var targets_string = std.ArrayList(u8).init(self.base.allocator);
|
||||
var targets_string = std.ArrayList(u8).init(gpa);
|
||||
defer targets_string.deinit();
|
||||
|
||||
if (ctx.detected_targets.items.len > 1) {
|
||||
|
|
@ -5226,7 +5247,7 @@ fn reportMissingLibraryError(
|
|||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, 1);
|
||||
const notes = try gpa.alloc(File.ErrorMsg, checked_paths.len);
|
||||
errdefer gpa.free(notes);
|
||||
|
|
@ -5246,7 +5267,7 @@ fn reportDependencyError(
|
|||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, 1);
|
||||
var notes = try std.ArrayList(File.ErrorMsg).initCapacity(gpa, 2);
|
||||
defer notes.deinit();
|
||||
|
|
@ -5266,7 +5287,7 @@ pub fn reportParseError(
|
|||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, 1);
|
||||
var notes = try gpa.alloc(File.ErrorMsg, 1);
|
||||
errdefer gpa.free(notes);
|
||||
|
|
@ -5283,7 +5304,7 @@ pub fn reportUnresolvedBoundarySymbol(
|
|||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, 1);
|
||||
var notes = try gpa.alloc(File.ErrorMsg, 1);
|
||||
errdefer gpa.free(notes);
|
||||
|
|
@ -5295,7 +5316,7 @@ pub fn reportUnresolvedBoundarySymbol(
|
|||
}
|
||||
|
||||
pub fn reportUndefined(self: *MachO) error{OutOfMemory}!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const count = self.unresolved.count();
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, count);
|
||||
|
||||
|
|
@ -5327,7 +5348,7 @@ fn reportSymbolCollision(
|
|||
first: SymbolWithLoc,
|
||||
other: SymbolWithLoc,
|
||||
) error{OutOfMemory}!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
var notes = try std.ArrayList(File.ErrorMsg).initCapacity(gpa, 2);
|
||||
|
|
@ -5355,7 +5376,7 @@ fn reportSymbolCollision(
|
|||
}
|
||||
|
||||
fn reportUnhandledSymbolType(self: *MachO, sym_with_loc: SymbolWithLoc) error{OutOfMemory}!void {
|
||||
const gpa = self.base.allocator;
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.misc_errors.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
const notes = try gpa.alloc(File.ErrorMsg, 1);
|
||||
|
|
|
|||
2073
src/main.zig
2073
src/main.zig
File diff suppressed because it is too large
Load diff
|
|
@ -3,6 +3,8 @@ const Type = @import("type.zig").Type;
|
|||
const AddressSpace = std.builtin.AddressSpace;
|
||||
const Alignment = @import("InternPool.zig").Alignment;
|
||||
|
||||
pub const default_stack_protector_buffer_size = 4;
|
||||
|
||||
pub const ArchOsAbi = struct {
|
||||
arch: std.Target.Cpu.Arch,
|
||||
os: std.Target.Os.Tag,
|
||||
|
|
@ -204,11 +206,18 @@ pub fn supports_fpic(target: std.Target) bool {
|
|||
return target.os.tag != .windows and target.os.tag != .uefi;
|
||||
}
|
||||
|
||||
pub fn isSingleThreaded(target: std.Target) bool {
|
||||
pub fn alwaysSingleThreaded(target: std.Target) bool {
|
||||
_ = target;
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn defaultSingleThreaded(target: std.Target) bool {
|
||||
return switch (target.cpu.arch) {
|
||||
.wasm32, .wasm64 => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
/// Valgrind supports more, but Zig does not support them yet.
|
||||
pub fn hasValgrindSupport(target: std.Target) bool {
|
||||
switch (target.cpu.arch) {
|
||||
|
|
@ -375,12 +384,17 @@ pub fn classifyCompilerRtLibName(target: std.Target, name: []const u8) CompilerR
|
|||
}
|
||||
|
||||
pub fn hasDebugInfo(target: std.Target) bool {
|
||||
if (target.cpu.arch.isNvptx()) {
|
||||
// TODO: not sure how to test "ptx >= 7.5" with featureset
|
||||
return std.Target.nvptx.featureSetHas(target.cpu.features, .ptx75);
|
||||
}
|
||||
|
||||
return true;
|
||||
return switch (target.cpu.arch) {
|
||||
.nvptx, .nvptx64 => std.Target.nvptx.featureSetHas(target.cpu.features, .ptx75) or
|
||||
std.Target.nvptx.featureSetHas(target.cpu.features, .ptx76) or
|
||||
std.Target.nvptx.featureSetHas(target.cpu.features, .ptx77) or
|
||||
std.Target.nvptx.featureSetHas(target.cpu.features, .ptx78) or
|
||||
std.Target.nvptx.featureSetHas(target.cpu.features, .ptx80) or
|
||||
std.Target.nvptx.featureSetHas(target.cpu.features, .ptx81),
|
||||
.wasm32, .wasm64 => false,
|
||||
.bpfel, .bpfeb => false,
|
||||
else => true,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn defaultCompilerRtOptimizeMode(target: std.Target) std.builtin.OptimizeMode {
|
||||
|
|
@ -619,3 +633,36 @@ pub fn fnCallConvAllowsZigTypes(target: std.Target, cc: std.builtin.CallingConve
|
|||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn zigBackend(target: std.Target, use_llvm: bool) std.builtin.CompilerBackend {
|
||||
if (use_llvm) return .stage2_llvm;
|
||||
if (target.ofmt == .c) return .stage2_c;
|
||||
return switch (target.cpu.arch) {
|
||||
.wasm32, .wasm64 => std.builtin.CompilerBackend.stage2_wasm,
|
||||
.arm, .armeb, .thumb, .thumbeb => .stage2_arm,
|
||||
.x86_64 => .stage2_x86_64,
|
||||
.x86 => .stage2_x86,
|
||||
.aarch64, .aarch64_be, .aarch64_32 => .stage2_aarch64,
|
||||
.riscv64 => .stage2_riscv64,
|
||||
.sparc64 => .stage2_sparc64,
|
||||
.spirv64 => .stage2_spirv64,
|
||||
else => .other,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn defaultEntrySymbolName(
|
||||
target: std.Target,
|
||||
/// May be `undefined` when `target` is not WASI.
|
||||
wasi_exec_model: std.builtin.WasiExecModel,
|
||||
) ?[]const u8 {
|
||||
return switch (target.ofmt) {
|
||||
.coff => "wWinMainCRTStartup",
|
||||
.macho => "_main",
|
||||
.elf, .plan9 => "_start",
|
||||
.wasm => switch (wasi_exec_model) {
|
||||
.reactor => "_initialize",
|
||||
.command => "_start",
|
||||
},
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue