mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 13:54:21 +00:00
Merge pull request #21882 from alexrp/compiler-fixes
compiler: Fix some real and theoretical miscompilations with `allowzero` and `volatile`
This commit is contained in:
commit
5b606d435d
12 changed files with 512 additions and 88 deletions
|
|
@ -550,6 +550,11 @@ pub fn build(b: *std.Build) !void {
|
||||||
.skip_non_native = skip_non_native,
|
.skip_non_native = skip_non_native,
|
||||||
.skip_libc = skip_libc,
|
.skip_libc = skip_libc,
|
||||||
})) |test_debugger_step| test_step.dependOn(test_debugger_step);
|
})) |test_debugger_step| test_step.dependOn(test_debugger_step);
|
||||||
|
if (tests.addLlvmIrTests(b, .{
|
||||||
|
.enable_llvm = enable_llvm,
|
||||||
|
.test_filters = test_filters,
|
||||||
|
.test_target_filters = test_target_filters,
|
||||||
|
})) |test_llvm_ir_step| test_step.dependOn(test_llvm_ir_step);
|
||||||
|
|
||||||
try addWasiUpdateStep(b, version);
|
try addWasiUpdateStep(b, version);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,7 @@ omit_frame_pointer: ?bool,
|
||||||
error_tracing: ?bool,
|
error_tracing: ?bool,
|
||||||
link_libc: ?bool,
|
link_libc: ?bool,
|
||||||
link_libcpp: ?bool,
|
link_libcpp: ?bool,
|
||||||
|
no_builtin: ?bool,
|
||||||
|
|
||||||
/// Symbols to be exported when compiling to WebAssembly.
|
/// Symbols to be exported when compiling to WebAssembly.
|
||||||
export_symbol_names: []const []const u8 = &.{},
|
export_symbol_names: []const []const u8 = &.{},
|
||||||
|
|
@ -268,6 +269,7 @@ pub const CreateOptions = struct {
|
||||||
/// more difficult to obtain stack traces. Has target-dependent effects.
|
/// more difficult to obtain stack traces. Has target-dependent effects.
|
||||||
omit_frame_pointer: ?bool = null,
|
omit_frame_pointer: ?bool = null,
|
||||||
error_tracing: ?bool = null,
|
error_tracing: ?bool = null,
|
||||||
|
no_builtin: ?bool = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Import = struct {
|
pub const Import = struct {
|
||||||
|
|
@ -314,6 +316,7 @@ pub fn init(
|
||||||
.omit_frame_pointer = options.omit_frame_pointer,
|
.omit_frame_pointer = options.omit_frame_pointer,
|
||||||
.error_tracing = options.error_tracing,
|
.error_tracing = options.error_tracing,
|
||||||
.export_symbol_names = &.{},
|
.export_symbol_names = &.{},
|
||||||
|
.no_builtin = options.no_builtin,
|
||||||
};
|
};
|
||||||
|
|
||||||
m.import_table.ensureUnusedCapacity(allocator, options.imports.len) catch @panic("OOM");
|
m.import_table.ensureUnusedCapacity(allocator, options.imports.len) catch @panic("OOM");
|
||||||
|
|
@ -564,6 +567,7 @@ pub fn appendZigProcessFlags(
|
||||||
try addFlag(zig_args, m.valgrind, "-fvalgrind", "-fno-valgrind");
|
try addFlag(zig_args, m.valgrind, "-fvalgrind", "-fno-valgrind");
|
||||||
try addFlag(zig_args, m.pic, "-fPIC", "-fno-PIC");
|
try addFlag(zig_args, m.pic, "-fPIC", "-fno-PIC");
|
||||||
try addFlag(zig_args, m.red_zone, "-mred-zone", "-mno-red-zone");
|
try addFlag(zig_args, m.red_zone, "-mred-zone", "-mno-red-zone");
|
||||||
|
try addFlag(zig_args, m.no_builtin, "-fno-builtin", "-fbuiltin");
|
||||||
|
|
||||||
if (m.sanitize_c) |sc| switch (sc) {
|
if (m.sanitize_c) |sc| switch (sc) {
|
||||||
.off => try zig_args.append("-fno-sanitize-c"),
|
.off => try zig_args.append("-fno-sanitize-c"),
|
||||||
|
|
|
||||||
|
|
@ -229,8 +229,6 @@ is_linking_libc: bool = false,
|
||||||
/// Computed during make().
|
/// Computed during make().
|
||||||
is_linking_libcpp: bool = false,
|
is_linking_libcpp: bool = false,
|
||||||
|
|
||||||
no_builtin: bool = false,
|
|
||||||
|
|
||||||
/// Populated during the make phase when there is a long-lived compiler process.
|
/// Populated during the make phase when there is a long-lived compiler process.
|
||||||
/// Managed by the build runner, not user build script.
|
/// Managed by the build runner, not user build script.
|
||||||
zig_process: ?*Step.ZigProcess,
|
zig_process: ?*Step.ZigProcess,
|
||||||
|
|
@ -1646,10 +1644,6 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (compile.no_builtin) {
|
|
||||||
try zig_args.append("-fno-builtin");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (b.sysroot) |sysroot| {
|
if (b.sysroot) |sysroot| {
|
||||||
try zig_args.appendSlice(&[_][]const u8{ "--sysroot", sysroot });
|
try zig_args.appendSlice(&[_][]const u8{ "--sysroot", sysroot });
|
||||||
}
|
}
|
||||||
|
|
|
||||||
22
src/Air.zig
22
src/Air.zig
|
|
@ -1673,6 +1673,7 @@ pub fn mustLower(air: Air, inst: Air.Inst.Index, ip: *const InternPool) bool {
|
||||||
const data = air.instructions.items(.data)[@intFromEnum(inst)];
|
const data = air.instructions.items(.data)[@intFromEnum(inst)];
|
||||||
return switch (air.instructions.items(.tag)[@intFromEnum(inst)]) {
|
return switch (air.instructions.items(.tag)[@intFromEnum(inst)]) {
|
||||||
.arg,
|
.arg,
|
||||||
|
.assembly,
|
||||||
.block,
|
.block,
|
||||||
.loop,
|
.loop,
|
||||||
.repeat,
|
.repeat,
|
||||||
|
|
@ -1816,12 +1817,8 @@ pub fn mustLower(air: Air, inst: Air.Inst.Index, ip: *const InternPool) bool {
|
||||||
.cmp_vector_optimized,
|
.cmp_vector_optimized,
|
||||||
.is_null,
|
.is_null,
|
||||||
.is_non_null,
|
.is_non_null,
|
||||||
.is_null_ptr,
|
|
||||||
.is_non_null_ptr,
|
|
||||||
.is_err,
|
.is_err,
|
||||||
.is_non_err,
|
.is_non_err,
|
||||||
.is_err_ptr,
|
|
||||||
.is_non_err_ptr,
|
|
||||||
.bool_and,
|
.bool_and,
|
||||||
.bool_or,
|
.bool_or,
|
||||||
.fptrunc,
|
.fptrunc,
|
||||||
|
|
@ -1834,7 +1831,6 @@ pub fn mustLower(air: Air, inst: Air.Inst.Index, ip: *const InternPool) bool {
|
||||||
.unwrap_errunion_payload,
|
.unwrap_errunion_payload,
|
||||||
.unwrap_errunion_err,
|
.unwrap_errunion_err,
|
||||||
.unwrap_errunion_payload_ptr,
|
.unwrap_errunion_payload_ptr,
|
||||||
.unwrap_errunion_err_ptr,
|
|
||||||
.wrap_errunion_payload,
|
.wrap_errunion_payload,
|
||||||
.wrap_errunion_err,
|
.wrap_errunion_err,
|
||||||
.struct_field_ptr,
|
.struct_field_ptr,
|
||||||
|
|
@ -1879,17 +1875,13 @@ pub fn mustLower(air: Air, inst: Air.Inst.Index, ip: *const InternPool) bool {
|
||||||
.work_group_id,
|
.work_group_id,
|
||||||
=> false,
|
=> false,
|
||||||
|
|
||||||
.assembly => {
|
.is_non_null_ptr, .is_null_ptr, .is_non_err_ptr, .is_err_ptr => air.typeOf(data.un_op, ip).isVolatilePtrIp(ip),
|
||||||
const extra = air.extraData(Air.Asm, data.ty_pl.payload);
|
.load, .unwrap_errunion_err_ptr => air.typeOf(data.ty_op.operand, ip).isVolatilePtrIp(ip),
|
||||||
const is_volatile = @as(u1, @truncate(extra.data.flags >> 31)) != 0;
|
|
||||||
return is_volatile or if (extra.data.outputs_len == 1)
|
|
||||||
@as(Air.Inst.Ref, @enumFromInt(air.extra[extra.end])) != .none
|
|
||||||
else
|
|
||||||
extra.data.outputs_len > 1;
|
|
||||||
},
|
|
||||||
.load => air.typeOf(data.ty_op.operand, ip).isVolatilePtrIp(ip),
|
|
||||||
.slice_elem_val, .ptr_elem_val => air.typeOf(data.bin_op.lhs, ip).isVolatilePtrIp(ip),
|
.slice_elem_val, .ptr_elem_val => air.typeOf(data.bin_op.lhs, ip).isVolatilePtrIp(ip),
|
||||||
.atomic_load => air.typeOf(data.atomic_load.ptr, ip).isVolatilePtrIp(ip),
|
.atomic_load => switch (data.atomic_load.order) {
|
||||||
|
.unordered, .monotonic => air.typeOf(data.atomic_load.ptr, ip).isVolatilePtrIp(ip),
|
||||||
|
else => true, // Stronger memory orderings have inter-thread side effects.
|
||||||
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1769,8 +1769,15 @@ fn finishAirBookkeeping(func: *Func) void {
|
||||||
fn finishAirResult(func: *Func, inst: Air.Inst.Index, result: MCValue) void {
|
fn finishAirResult(func: *Func, inst: Air.Inst.Index, result: MCValue) void {
|
||||||
if (func.liveness.isUnused(inst)) switch (result) {
|
if (func.liveness.isUnused(inst)) switch (result) {
|
||||||
.none, .dead, .unreach => {},
|
.none, .dead, .unreach => {},
|
||||||
else => unreachable, // Why didn't the result die?
|
// Why didn't the result die?
|
||||||
|
.register => |r| if (r != .zero) unreachable,
|
||||||
|
else => unreachable,
|
||||||
} else {
|
} else {
|
||||||
|
switch (result) {
|
||||||
|
.register => |r| if (r == .zero) unreachable, // Why did we discard a used result?
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
|
||||||
tracking_log.debug("%{d} => {} (birth)", .{ inst, result });
|
tracking_log.debug("%{d} => {} (birth)", .{ inst, result });
|
||||||
func.inst_tracking.putAssumeCapacityNoClobber(inst, InstTracking.init(result));
|
func.inst_tracking.putAssumeCapacityNoClobber(inst, InstTracking.init(result));
|
||||||
// In some cases, an operand may be reused as the result.
|
// In some cases, an operand may be reused as the result.
|
||||||
|
|
@ -7729,9 +7736,12 @@ fn airAtomicLoad(func: *Func, inst: Air.Inst.Index) !void {
|
||||||
const ptr_mcv = try func.resolveInst(atomic_load.ptr);
|
const ptr_mcv = try func.resolveInst(atomic_load.ptr);
|
||||||
|
|
||||||
const bit_size = elem_ty.bitSize(zcu);
|
const bit_size = elem_ty.bitSize(zcu);
|
||||||
if (bit_size > 64) return func.fail("TODO: airAtomicStore > 64 bits", .{});
|
if (bit_size > 64) return func.fail("TODO: airAtomicLoad > 64 bits", .{});
|
||||||
|
|
||||||
const result_mcv = try func.allocRegOrMem(elem_ty, inst, true);
|
const result_mcv: MCValue = if (func.liveness.isUnused(inst))
|
||||||
|
.{ .register = .zero }
|
||||||
|
else
|
||||||
|
try func.allocRegOrMem(elem_ty, inst, true);
|
||||||
assert(result_mcv == .register); // should be less than 8 bytes
|
assert(result_mcv == .register); // should be less than 8 bytes
|
||||||
|
|
||||||
if (order == .seq_cst) {
|
if (order == .seq_cst) {
|
||||||
|
|
@ -7747,11 +7757,10 @@ fn airAtomicLoad(func: *Func, inst: Air.Inst.Index) !void {
|
||||||
try func.load(result_mcv, ptr_mcv, ptr_ty);
|
try func.load(result_mcv, ptr_mcv, ptr_ty);
|
||||||
|
|
||||||
switch (order) {
|
switch (order) {
|
||||||
// Don't guarnetee other memory operations to be ordered after the load.
|
// Don't guarantee other memory operations to be ordered after the load.
|
||||||
.unordered => {},
|
.unordered, .monotonic => {},
|
||||||
.monotonic => {},
|
// Make sure all previous reads happen before any reading or writing occurs.
|
||||||
// Make sure all previous reads happen before any reading or writing accurs.
|
.acquire, .seq_cst => {
|
||||||
.seq_cst, .acquire => {
|
|
||||||
_ = try func.addInst(.{
|
_ = try func.addInst(.{
|
||||||
.tag = .fence,
|
.tag = .fence,
|
||||||
.data = .{ .fence = .{
|
.data = .{ .fence = .{
|
||||||
|
|
@ -7793,6 +7802,17 @@ fn airAtomicStore(func: *Func, inst: Air.Inst.Index, order: std.builtin.AtomicOr
|
||||||
}
|
}
|
||||||
|
|
||||||
try func.store(ptr_mcv, val_mcv, ptr_ty);
|
try func.store(ptr_mcv, val_mcv, ptr_ty);
|
||||||
|
|
||||||
|
if (order == .seq_cst) {
|
||||||
|
_ = try func.addInst(.{
|
||||||
|
.tag = .fence,
|
||||||
|
.data = .{ .fence = .{
|
||||||
|
.pred = .rw,
|
||||||
|
.succ = .rw,
|
||||||
|
} },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return func.finishAir(inst, .unreach, .{ bin_op.lhs, bin_op.rhs, .none });
|
return func.finishAir(inst, .unreach, .{ bin_op.lhs, bin_op.rhs, .none });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -106219,23 +106219,29 @@ fn airAtomicRmw(self: *CodeGen, inst: Air.Inst.Index) !void {
|
||||||
|
|
||||||
fn airAtomicLoad(self: *CodeGen, inst: Air.Inst.Index) !void {
|
fn airAtomicLoad(self: *CodeGen, inst: Air.Inst.Index) !void {
|
||||||
const atomic_load = self.air.instructions.items(.data)[@intFromEnum(inst)].atomic_load;
|
const atomic_load = self.air.instructions.items(.data)[@intFromEnum(inst)].atomic_load;
|
||||||
|
const result: MCValue = result: {
|
||||||
|
const ptr_ty = self.typeOf(atomic_load.ptr);
|
||||||
|
const ptr_mcv = try self.resolveInst(atomic_load.ptr);
|
||||||
|
const ptr_lock = switch (ptr_mcv) {
|
||||||
|
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
|
||||||
|
else => null,
|
||||||
|
};
|
||||||
|
defer if (ptr_lock) |lock| self.register_manager.unlockReg(lock);
|
||||||
|
|
||||||
const ptr_ty = self.typeOf(atomic_load.ptr);
|
const unused = self.liveness.isUnused(inst);
|
||||||
const ptr_mcv = try self.resolveInst(atomic_load.ptr);
|
|
||||||
const ptr_lock = switch (ptr_mcv) {
|
|
||||||
.register => |reg| self.register_manager.lockRegAssumeUnused(reg),
|
|
||||||
else => null,
|
|
||||||
};
|
|
||||||
defer if (ptr_lock) |lock| self.register_manager.unlockReg(lock);
|
|
||||||
|
|
||||||
const dst_mcv =
|
const dst_mcv: MCValue = if (unused)
|
||||||
if (self.reuseOperand(inst, atomic_load.ptr, 0, ptr_mcv))
|
.{ .register = try self.register_manager.allocReg(null, self.regSetForType(ptr_ty.childType(self.pt.zcu))) }
|
||||||
|
else if (self.reuseOperand(inst, atomic_load.ptr, 0, ptr_mcv))
|
||||||
ptr_mcv
|
ptr_mcv
|
||||||
else
|
else
|
||||||
try self.allocRegOrMem(inst, true);
|
try self.allocRegOrMem(inst, true);
|
||||||
|
|
||||||
try self.load(dst_mcv, ptr_ty, ptr_mcv);
|
try self.load(dst_mcv, ptr_ty, ptr_mcv);
|
||||||
return self.finishAir(inst, dst_mcv, .{ atomic_load.ptr, .none, .none });
|
|
||||||
|
break :result if (unused) .unreach else dst_mcv;
|
||||||
|
};
|
||||||
|
return self.finishAir(inst, result, .{ atomic_load.ptr, .none, .none });
|
||||||
}
|
}
|
||||||
|
|
||||||
fn airAtomicStore(self: *CodeGen, inst: Air.Inst.Index, order: std.builtin.AtomicOrder) !void {
|
fn airAtomicStore(self: *CodeGen, inst: Air.Inst.Index, order: std.builtin.AtomicOrder) !void {
|
||||||
|
|
|
||||||
|
|
@ -1341,7 +1341,10 @@ pub const Object = struct {
|
||||||
try attributes.addParamAttr(llvm_arg_i, .@"noalias", &o.builder);
|
try attributes.addParamAttr(llvm_arg_i, .@"noalias", &o.builder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (param_ty.zigTypeTag(zcu) != .optional) {
|
if (param_ty.zigTypeTag(zcu) != .optional and
|
||||||
|
!ptr_info.flags.is_allowzero and
|
||||||
|
ptr_info.flags.address_space == .generic)
|
||||||
|
{
|
||||||
try attributes.addParamAttr(llvm_arg_i, .nonnull, &o.builder);
|
try attributes.addParamAttr(llvm_arg_i, .nonnull, &o.builder);
|
||||||
}
|
}
|
||||||
if (ptr_info.flags.is_const) {
|
if (ptr_info.flags.is_const) {
|
||||||
|
|
@ -1419,8 +1422,6 @@ pub const Object = struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function_index.setAttributes(try attributes.finish(&o.builder), &o.builder);
|
|
||||||
|
|
||||||
const file, const subprogram = if (!wip.strip) debug_info: {
|
const file, const subprogram = if (!wip.strip) debug_info: {
|
||||||
const file = try o.getDebugFile(file_scope);
|
const file = try o.getDebugFile(file_scope);
|
||||||
|
|
||||||
|
|
@ -1517,6 +1518,17 @@ pub const Object = struct {
|
||||||
else => |e| return e,
|
else => |e| return e,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// If we saw any loads or stores involving `allowzero` pointers, we need to mark the whole
|
||||||
|
// function as considering null pointers valid so that LLVM's optimizers don't remove these
|
||||||
|
// operations on the assumption that they're undefined behavior.
|
||||||
|
if (fg.allowzero_access) {
|
||||||
|
try attributes.addFnAttr(.null_pointer_is_valid, &o.builder);
|
||||||
|
} else {
|
||||||
|
_ = try attributes.removeFnAttr(.null_pointer_is_valid);
|
||||||
|
}
|
||||||
|
|
||||||
|
function_index.setAttributes(try attributes.finish(&o.builder), &o.builder);
|
||||||
|
|
||||||
if (fg.fuzz) |*f| {
|
if (fg.fuzz) |*f| {
|
||||||
{
|
{
|
||||||
const array_llvm_ty = try o.builder.arrayType(f.pcs.items.len, .i8);
|
const array_llvm_ty = try o.builder.arrayType(f.pcs.items.len, .i8);
|
||||||
|
|
@ -4349,7 +4361,10 @@ pub const Object = struct {
|
||||||
try attributes.addParamAttr(llvm_arg_i, .@"noalias", &o.builder);
|
try attributes.addParamAttr(llvm_arg_i, .@"noalias", &o.builder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!param_ty.isPtrLikeOptional(zcu) and !ptr_info.flags.is_allowzero) {
|
if (!param_ty.isPtrLikeOptional(zcu) and
|
||||||
|
!ptr_info.flags.is_allowzero and
|
||||||
|
ptr_info.flags.address_space == .generic)
|
||||||
|
{
|
||||||
try attributes.addParamAttr(llvm_arg_i, .nonnull, &o.builder);
|
try attributes.addParamAttr(llvm_arg_i, .nonnull, &o.builder);
|
||||||
}
|
}
|
||||||
switch (fn_info.cc) {
|
switch (fn_info.cc) {
|
||||||
|
|
@ -4667,6 +4682,15 @@ pub const FuncGen = struct {
|
||||||
|
|
||||||
disable_intrinsics: bool,
|
disable_intrinsics: bool,
|
||||||
|
|
||||||
|
/// Have we seen loads or stores involving `allowzero` pointers?
|
||||||
|
allowzero_access: bool = false,
|
||||||
|
|
||||||
|
pub fn maybeMarkAllowZeroAccess(self: *FuncGen, info: InternPool.Key.PtrType) void {
|
||||||
|
// LLVM already considers null pointers to be valid in non-generic address spaces, so avoid
|
||||||
|
// pessimizing optimization for functions with accesses to such pointers.
|
||||||
|
if (info.flags.address_space == .generic and info.flags.is_allowzero) self.allowzero_access = true;
|
||||||
|
}
|
||||||
|
|
||||||
const Fuzz = struct {
|
const Fuzz = struct {
|
||||||
counters_variable: Builder.Variable.Index,
|
counters_variable: Builder.Variable.Index,
|
||||||
pcs: std.ArrayListUnmanaged(Builder.Constant),
|
pcs: std.ArrayListUnmanaged(Builder.Constant),
|
||||||
|
|
@ -5392,7 +5416,10 @@ pub const FuncGen = struct {
|
||||||
try attributes.addParamAttr(llvm_arg_i, .@"noalias", &o.builder);
|
try attributes.addParamAttr(llvm_arg_i, .@"noalias", &o.builder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (param_ty.zigTypeTag(zcu) != .optional) {
|
if (param_ty.zigTypeTag(zcu) != .optional and
|
||||||
|
!ptr_info.flags.is_allowzero and
|
||||||
|
ptr_info.flags.address_space == .generic)
|
||||||
|
{
|
||||||
try attributes.addParamAttr(llvm_arg_i, .nonnull, &o.builder);
|
try attributes.addParamAttr(llvm_arg_i, .nonnull, &o.builder);
|
||||||
}
|
}
|
||||||
if (ptr_info.flags.is_const) {
|
if (ptr_info.flags.is_const) {
|
||||||
|
|
@ -5519,7 +5546,7 @@ pub const FuncGen = struct {
|
||||||
ptr_ty.ptrAlignment(zcu).toLlvm(),
|
ptr_ty.ptrAlignment(zcu).toLlvm(),
|
||||||
try o.builder.intValue(.i8, 0xaa),
|
try o.builder.intValue(.i8, 0xaa),
|
||||||
len,
|
len,
|
||||||
if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal,
|
.normal,
|
||||||
self.disable_intrinsics,
|
self.disable_intrinsics,
|
||||||
);
|
);
|
||||||
const owner_mod = self.ng.ownerModule();
|
const owner_mod = self.ng.ownerModule();
|
||||||
|
|
@ -5754,8 +5781,8 @@ pub const FuncGen = struct {
|
||||||
// of optionals that are not pointers.
|
// of optionals that are not pointers.
|
||||||
const is_by_ref = isByRef(scalar_ty, zcu);
|
const is_by_ref = isByRef(scalar_ty, zcu);
|
||||||
const opt_llvm_ty = try o.lowerType(scalar_ty);
|
const opt_llvm_ty = try o.lowerType(scalar_ty);
|
||||||
const lhs_non_null = try self.optCmpNull(.ne, opt_llvm_ty, lhs, is_by_ref);
|
const lhs_non_null = try self.optCmpNull(.ne, opt_llvm_ty, lhs, is_by_ref, .normal);
|
||||||
const rhs_non_null = try self.optCmpNull(.ne, opt_llvm_ty, rhs, is_by_ref);
|
const rhs_non_null = try self.optCmpNull(.ne, opt_llvm_ty, rhs, is_by_ref, .normal);
|
||||||
const llvm_i2 = try o.builder.intType(2);
|
const llvm_i2 = try o.builder.intType(2);
|
||||||
const lhs_non_null_i2 = try self.wip.cast(.zext, lhs_non_null, llvm_i2, "");
|
const lhs_non_null_i2 = try self.wip.cast(.zext, lhs_non_null, llvm_i2, "");
|
||||||
const rhs_non_null_i2 = try self.wip.cast(.zext, rhs_non_null, llvm_i2, "");
|
const rhs_non_null_i2 = try self.wip.cast(.zext, rhs_non_null, llvm_i2, "");
|
||||||
|
|
@ -6206,6 +6233,9 @@ pub const FuncGen = struct {
|
||||||
const body: []const Air.Inst.Index = @ptrCast(self.air.extra[extra.end..][0..extra.data.body_len]);
|
const body: []const Air.Inst.Index = @ptrCast(self.air.extra[extra.end..][0..extra.data.body_len]);
|
||||||
const err_union_ty = self.typeOf(extra.data.ptr).childType(zcu);
|
const err_union_ty = self.typeOf(extra.data.ptr).childType(zcu);
|
||||||
const is_unused = self.liveness.isUnused(inst);
|
const is_unused = self.liveness.isUnused(inst);
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(self.typeOf(extra.data.ptr).ptrInfo(zcu));
|
||||||
|
|
||||||
return lowerTry(self, err_union_ptr, body, err_union_ty, true, true, is_unused, err_cold);
|
return lowerTry(self, err_union_ptr, body, err_union_ty, true, true, is_unused, err_cold);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -6229,10 +6259,13 @@ pub const FuncGen = struct {
|
||||||
|
|
||||||
if (!err_union_ty.errorUnionSet(zcu).errorSetIsEmpty(zcu)) {
|
if (!err_union_ty.errorUnionSet(zcu).errorSetIsEmpty(zcu)) {
|
||||||
const loaded = loaded: {
|
const loaded = loaded: {
|
||||||
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
|
if (err_union_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
if (!payload_has_bits) {
|
if (!payload_has_bits) {
|
||||||
// TODO add alignment to this load
|
// TODO add alignment to this load
|
||||||
break :loaded if (operand_is_ptr)
|
break :loaded if (operand_is_ptr)
|
||||||
try fg.wip.load(.normal, error_type, err_union, .default, "")
|
try fg.wip.load(access_kind, error_type, err_union, .default, "")
|
||||||
else
|
else
|
||||||
err_union;
|
err_union;
|
||||||
}
|
}
|
||||||
|
|
@ -6242,7 +6275,7 @@ pub const FuncGen = struct {
|
||||||
try fg.wip.gepStruct(err_union_llvm_ty, err_union, err_field_index, "");
|
try fg.wip.gepStruct(err_union_llvm_ty, err_union, err_field_index, "");
|
||||||
// TODO add alignment to this load
|
// TODO add alignment to this load
|
||||||
break :loaded try fg.wip.load(
|
break :loaded try fg.wip.load(
|
||||||
.normal,
|
if (operand_is_ptr) access_kind else .normal,
|
||||||
error_type,
|
error_type,
|
||||||
err_field_ptr,
|
err_field_ptr,
|
||||||
.default,
|
.default,
|
||||||
|
|
@ -6751,10 +6784,14 @@ pub const FuncGen = struct {
|
||||||
if (self.canElideLoad(body_tail))
|
if (self.canElideLoad(body_tail))
|
||||||
return ptr;
|
return ptr;
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(slice_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
const elem_alignment = elem_ty.abiAlignment(zcu).toLlvm();
|
const elem_alignment = elem_ty.abiAlignment(zcu).toLlvm();
|
||||||
return self.loadByRef(ptr, elem_ty, elem_alignment, .normal);
|
return self.loadByRef(ptr, elem_ty, elem_alignment, if (slice_ty.isVolatilePtr(zcu)) .@"volatile" else .normal);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(slice_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
return self.load(ptr, slice_ty);
|
return self.load(ptr, slice_ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -6824,10 +6861,15 @@ pub const FuncGen = struct {
|
||||||
&.{rhs}, "");
|
&.{rhs}, "");
|
||||||
if (isByRef(elem_ty, zcu)) {
|
if (isByRef(elem_ty, zcu)) {
|
||||||
if (self.canElideLoad(body_tail)) return ptr;
|
if (self.canElideLoad(body_tail)) return ptr;
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
const elem_alignment = elem_ty.abiAlignment(zcu).toLlvm();
|
const elem_alignment = elem_ty.abiAlignment(zcu).toLlvm();
|
||||||
return self.loadByRef(ptr, elem_ty, elem_alignment, .normal);
|
return self.loadByRef(ptr, elem_ty, elem_alignment, if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
return self.load(ptr, ptr_ty);
|
return self.load(ptr, ptr_ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -7235,6 +7277,8 @@ pub const FuncGen = struct {
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(output_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
// Pass any non-return outputs indirectly, if the constraint accepts a memory location
|
// Pass any non-return outputs indirectly, if the constraint accepts a memory location
|
||||||
is_indirect.* = constraintAllowsMemory(constraint);
|
is_indirect.* = constraintAllowsMemory(constraint);
|
||||||
if (is_indirect.*) {
|
if (is_indirect.*) {
|
||||||
|
|
@ -7341,10 +7385,11 @@ pub const FuncGen = struct {
|
||||||
|
|
||||||
// In the case of indirect inputs, LLVM requires the callsite to have
|
// In the case of indirect inputs, LLVM requires the callsite to have
|
||||||
// an elementtype(<ty>) attribute.
|
// an elementtype(<ty>) attribute.
|
||||||
llvm_param_attrs[llvm_param_i] = if (constraint[0] == '*')
|
llvm_param_attrs[llvm_param_i] = if (constraint[0] == '*') blk: {
|
||||||
try o.lowerPtrElemTy(if (is_by_ref) arg_ty else arg_ty.childType(zcu))
|
if (!is_by_ref) self.maybeMarkAllowZeroAccess(arg_ty.ptrInfo(zcu));
|
||||||
else
|
|
||||||
.none;
|
break :blk try o.lowerPtrElemTy(if (is_by_ref) arg_ty else arg_ty.childType(zcu));
|
||||||
|
} else .none;
|
||||||
|
|
||||||
llvm_param_i += 1;
|
llvm_param_i += 1;
|
||||||
total_i += 1;
|
total_i += 1;
|
||||||
|
|
@ -7367,7 +7412,13 @@ pub const FuncGen = struct {
|
||||||
llvm_param_types[llvm_param_i] = llvm_rw_val.typeOfWip(&self.wip);
|
llvm_param_types[llvm_param_i] = llvm_rw_val.typeOfWip(&self.wip);
|
||||||
} else {
|
} else {
|
||||||
const alignment = rw_ty.abiAlignment(zcu).toLlvm();
|
const alignment = rw_ty.abiAlignment(zcu).toLlvm();
|
||||||
const loaded = try self.wip.load(.normal, llvm_elem_ty, llvm_rw_val, alignment, "");
|
const loaded = try self.wip.load(
|
||||||
|
if (rw_ty.isVolatilePtr(zcu)) .@"volatile" else .normal,
|
||||||
|
llvm_elem_ty,
|
||||||
|
llvm_rw_val,
|
||||||
|
alignment,
|
||||||
|
"",
|
||||||
|
);
|
||||||
llvm_param_values[llvm_param_i] = loaded;
|
llvm_param_values[llvm_param_i] = loaded;
|
||||||
llvm_param_types[llvm_param_i] = llvm_elem_ty;
|
llvm_param_types[llvm_param_i] = llvm_elem_ty;
|
||||||
}
|
}
|
||||||
|
|
@ -7530,9 +7581,13 @@ pub const FuncGen = struct {
|
||||||
if (output != .none) {
|
if (output != .none) {
|
||||||
const output_ptr = try self.resolveInst(output);
|
const output_ptr = try self.resolveInst(output);
|
||||||
const output_ptr_ty = self.typeOf(output);
|
const output_ptr_ty = self.typeOf(output);
|
||||||
|
|
||||||
const alignment = output_ptr_ty.ptrAlignment(zcu).toLlvm();
|
const alignment = output_ptr_ty.ptrAlignment(zcu).toLlvm();
|
||||||
_ = try self.wip.store(.normal, output_value, output_ptr, alignment);
|
_ = try self.wip.store(
|
||||||
|
if (output_ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal,
|
||||||
|
output_value,
|
||||||
|
output_ptr,
|
||||||
|
alignment,
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
ret_val = output_value;
|
ret_val = output_value;
|
||||||
}
|
}
|
||||||
|
|
@ -7557,9 +7612,15 @@ pub const FuncGen = struct {
|
||||||
const optional_ty = if (operand_is_ptr) operand_ty.childType(zcu) else operand_ty;
|
const optional_ty = if (operand_is_ptr) operand_ty.childType(zcu) else operand_ty;
|
||||||
const optional_llvm_ty = try o.lowerType(optional_ty);
|
const optional_llvm_ty = try o.lowerType(optional_ty);
|
||||||
const payload_ty = optional_ty.optionalChild(zcu);
|
const payload_ty = optional_ty.optionalChild(zcu);
|
||||||
|
|
||||||
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
|
if (operand_is_ptr and operand_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
|
if (operand_is_ptr) self.maybeMarkAllowZeroAccess(operand_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
if (optional_ty.optionalReprIsPayload(zcu)) {
|
if (optional_ty.optionalReprIsPayload(zcu)) {
|
||||||
const loaded = if (operand_is_ptr)
|
const loaded = if (operand_is_ptr)
|
||||||
try self.wip.load(.normal, optional_llvm_ty, operand, .default, "")
|
try self.wip.load(access_kind, optional_llvm_ty, operand, .default, "")
|
||||||
else
|
else
|
||||||
operand;
|
operand;
|
||||||
if (payload_ty.isSlice(zcu)) {
|
if (payload_ty.isSlice(zcu)) {
|
||||||
|
|
@ -7577,14 +7638,14 @@ pub const FuncGen = struct {
|
||||||
|
|
||||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
||||||
const loaded = if (operand_is_ptr)
|
const loaded = if (operand_is_ptr)
|
||||||
try self.wip.load(.normal, optional_llvm_ty, operand, .default, "")
|
try self.wip.load(access_kind, optional_llvm_ty, operand, .default, "")
|
||||||
else
|
else
|
||||||
operand;
|
operand;
|
||||||
return self.wip.icmp(cond, loaded, try o.builder.intValue(.i8, 0), "");
|
return self.wip.icmp(cond, loaded, try o.builder.intValue(.i8, 0), "");
|
||||||
}
|
}
|
||||||
|
|
||||||
const is_by_ref = operand_is_ptr or isByRef(optional_ty, zcu);
|
const is_by_ref = operand_is_ptr or isByRef(optional_ty, zcu);
|
||||||
return self.optCmpNull(cond, optional_llvm_ty, operand, is_by_ref);
|
return self.optCmpNull(cond, optional_llvm_ty, operand, is_by_ref, access_kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn airIsErr(
|
fn airIsErr(
|
||||||
|
|
@ -7604,6 +7665,9 @@ pub const FuncGen = struct {
|
||||||
const error_type = try o.errorIntType();
|
const error_type = try o.errorIntType();
|
||||||
const zero = try o.builder.intValue(error_type, 0);
|
const zero = try o.builder.intValue(error_type, 0);
|
||||||
|
|
||||||
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
|
if (operand_is_ptr and operand_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
if (err_union_ty.errorUnionSet(zcu).errorSetIsEmpty(zcu)) {
|
if (err_union_ty.errorUnionSet(zcu).errorSetIsEmpty(zcu)) {
|
||||||
const val: Builder.Constant = switch (cond) {
|
const val: Builder.Constant = switch (cond) {
|
||||||
.eq => .true, // 0 == 0
|
.eq => .true, // 0 == 0
|
||||||
|
|
@ -7613,9 +7677,11 @@ pub const FuncGen = struct {
|
||||||
return val.toValue();
|
return val.toValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (operand_is_ptr) self.maybeMarkAllowZeroAccess(operand_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
||||||
const loaded = if (operand_is_ptr)
|
const loaded = if (operand_is_ptr)
|
||||||
try self.wip.load(.normal, try o.lowerType(err_union_ty), operand, .default, "")
|
try self.wip.load(access_kind, try o.lowerType(err_union_ty), operand, .default, "")
|
||||||
else
|
else
|
||||||
operand;
|
operand;
|
||||||
return self.wip.icmp(cond, loaded, zero, "");
|
return self.wip.icmp(cond, loaded, zero, "");
|
||||||
|
|
@ -7627,7 +7693,7 @@ pub const FuncGen = struct {
|
||||||
const err_union_llvm_ty = try o.lowerType(err_union_ty);
|
const err_union_llvm_ty = try o.lowerType(err_union_ty);
|
||||||
const err_field_ptr =
|
const err_field_ptr =
|
||||||
try self.wip.gepStruct(err_union_llvm_ty, operand, err_field_index, "");
|
try self.wip.gepStruct(err_union_llvm_ty, operand, err_field_index, "");
|
||||||
break :loaded try self.wip.load(.normal, error_type, err_field_ptr, .default, "");
|
break :loaded try self.wip.load(access_kind, error_type, err_field_ptr, .default, "");
|
||||||
} else try self.wip.extractValue(operand, &.{err_field_index}, "");
|
} else try self.wip.extractValue(operand, &.{err_field_index}, "");
|
||||||
return self.wip.icmp(cond, loaded, zero, "");
|
return self.wip.icmp(cond, loaded, zero, "");
|
||||||
}
|
}
|
||||||
|
|
@ -7660,12 +7726,19 @@ pub const FuncGen = struct {
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
|
const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
|
||||||
const operand = try self.resolveInst(ty_op.operand);
|
const operand = try self.resolveInst(ty_op.operand);
|
||||||
const optional_ty = self.typeOf(ty_op.operand).childType(zcu);
|
const optional_ptr_ty = self.typeOf(ty_op.operand);
|
||||||
|
const optional_ty = optional_ptr_ty.childType(zcu);
|
||||||
const payload_ty = optional_ty.optionalChild(zcu);
|
const payload_ty = optional_ty.optionalChild(zcu);
|
||||||
const non_null_bit = try o.builder.intValue(.i8, 1);
|
const non_null_bit = try o.builder.intValue(.i8, 1);
|
||||||
|
|
||||||
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
|
if (optional_ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
||||||
|
self.maybeMarkAllowZeroAccess(optional_ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
// We have a pointer to a i8. We need to set it to 1 and then return the same pointer.
|
// We have a pointer to a i8. We need to set it to 1 and then return the same pointer.
|
||||||
_ = try self.wip.store(.normal, non_null_bit, operand, .default);
|
_ = try self.wip.store(access_kind, non_null_bit, operand, .default);
|
||||||
return operand;
|
return operand;
|
||||||
}
|
}
|
||||||
if (optional_ty.optionalReprIsPayload(zcu)) {
|
if (optional_ty.optionalReprIsPayload(zcu)) {
|
||||||
|
|
@ -7677,8 +7750,11 @@ pub const FuncGen = struct {
|
||||||
// First set the non-null bit.
|
// First set the non-null bit.
|
||||||
const optional_llvm_ty = try o.lowerType(optional_ty);
|
const optional_llvm_ty = try o.lowerType(optional_ty);
|
||||||
const non_null_ptr = try self.wip.gepStruct(optional_llvm_ty, operand, 1, "");
|
const non_null_ptr = try self.wip.gepStruct(optional_llvm_ty, operand, 1, "");
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(optional_ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
// TODO set alignment on this store
|
// TODO set alignment on this store
|
||||||
_ = try self.wip.store(.normal, non_null_bit, non_null_ptr, .default);
|
_ = try self.wip.store(access_kind, non_null_bit, non_null_ptr, .default);
|
||||||
|
|
||||||
// Then return the payload pointer (only if it's used).
|
// Then return the payload pointer (only if it's used).
|
||||||
if (self.liveness.isUnused(inst)) return .none;
|
if (self.liveness.isUnused(inst)) return .none;
|
||||||
|
|
@ -7764,18 +7840,26 @@ pub const FuncGen = struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
|
if (operand_is_ptr and operand_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
const payload_ty = err_union_ty.errorUnionPayload(zcu);
|
const payload_ty = err_union_ty.errorUnionPayload(zcu);
|
||||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
||||||
if (!operand_is_ptr) return operand;
|
if (!operand_is_ptr) return operand;
|
||||||
return self.wip.load(.normal, error_type, operand, .default, "");
|
|
||||||
|
self.maybeMarkAllowZeroAccess(operand_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
|
return self.wip.load(access_kind, error_type, operand, .default, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
const offset = try errUnionErrorOffset(payload_ty, pt);
|
const offset = try errUnionErrorOffset(payload_ty, pt);
|
||||||
|
|
||||||
if (operand_is_ptr or isByRef(err_union_ty, zcu)) {
|
if (operand_is_ptr or isByRef(err_union_ty, zcu)) {
|
||||||
|
if (operand_is_ptr) self.maybeMarkAllowZeroAccess(operand_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
const err_union_llvm_ty = try o.lowerType(err_union_ty);
|
const err_union_llvm_ty = try o.lowerType(err_union_ty);
|
||||||
const err_field_ptr = try self.wip.gepStruct(err_union_llvm_ty, operand, offset, "");
|
const err_field_ptr = try self.wip.gepStruct(err_union_llvm_ty, operand, offset, "");
|
||||||
return self.wip.load(.normal, error_type, err_field_ptr, .default, "");
|
return self.wip.load(access_kind, error_type, err_field_ptr, .default, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
return self.wip.extractValue(operand, &.{offset}, "");
|
return self.wip.extractValue(operand, &.{offset}, "");
|
||||||
|
|
@ -7787,22 +7871,31 @@ pub const FuncGen = struct {
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
|
const ty_op = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
|
||||||
const operand = try self.resolveInst(ty_op.operand);
|
const operand = try self.resolveInst(ty_op.operand);
|
||||||
const err_union_ty = self.typeOf(ty_op.operand).childType(zcu);
|
const err_union_ptr_ty = self.typeOf(ty_op.operand);
|
||||||
|
const err_union_ty = err_union_ptr_ty.childType(zcu);
|
||||||
|
|
||||||
const payload_ty = err_union_ty.errorUnionPayload(zcu);
|
const payload_ty = err_union_ty.errorUnionPayload(zcu);
|
||||||
const non_error_val = try o.builder.intValue(try o.errorIntType(), 0);
|
const non_error_val = try o.builder.intValue(try o.errorIntType(), 0);
|
||||||
|
|
||||||
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
|
if (err_union_ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
||||||
_ = try self.wip.store(.normal, non_error_val, operand, .default);
|
self.maybeMarkAllowZeroAccess(err_union_ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
|
_ = try self.wip.store(access_kind, non_error_val, operand, .default);
|
||||||
return operand;
|
return operand;
|
||||||
}
|
}
|
||||||
const err_union_llvm_ty = try o.lowerType(err_union_ty);
|
const err_union_llvm_ty = try o.lowerType(err_union_ty);
|
||||||
{
|
{
|
||||||
|
self.maybeMarkAllowZeroAccess(err_union_ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
const err_int_ty = try pt.errorIntType();
|
const err_int_ty = try pt.errorIntType();
|
||||||
const error_alignment = err_int_ty.abiAlignment(zcu).toLlvm();
|
const error_alignment = err_int_ty.abiAlignment(zcu).toLlvm();
|
||||||
const error_offset = try errUnionErrorOffset(payload_ty, pt);
|
const error_offset = try errUnionErrorOffset(payload_ty, pt);
|
||||||
// First set the non-error value.
|
// First set the non-error value.
|
||||||
const non_null_ptr = try self.wip.gepStruct(err_union_llvm_ty, operand, error_offset, "");
|
const non_null_ptr = try self.wip.gepStruct(err_union_llvm_ty, operand, error_offset, "");
|
||||||
_ = try self.wip.store(.normal, non_error_val, non_null_ptr, error_alignment);
|
_ = try self.wip.store(access_kind, non_error_val, non_null_ptr, error_alignment);
|
||||||
}
|
}
|
||||||
// Then return the payload pointer (only if it is used).
|
// Then return the payload pointer (only if it is used).
|
||||||
if (self.liveness.isUnused(inst)) return .none;
|
if (self.liveness.isUnused(inst)) return .none;
|
||||||
|
|
@ -8017,6 +8110,10 @@ pub const FuncGen = struct {
|
||||||
const index = try self.resolveInst(extra.lhs);
|
const index = try self.resolveInst(extra.lhs);
|
||||||
const operand = try self.resolveInst(extra.rhs);
|
const operand = try self.resolveInst(extra.rhs);
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(vector_ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
|
// TODO: Emitting a load here is a violation of volatile semantics. Not fixable in general.
|
||||||
|
// https://github.com/ziglang/zig/issues/18652#issuecomment-2452844908
|
||||||
const access_kind: Builder.MemoryAccessKind =
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
if (vector_ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
if (vector_ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
const elem_llvm_ty = try o.lowerType(vector_ptr_ty.childType(zcu));
|
const elem_llvm_ty = try o.lowerType(vector_ptr_ty.childType(zcu));
|
||||||
|
|
@ -9482,6 +9579,8 @@ pub const FuncGen = struct {
|
||||||
return .none;
|
return .none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(ptr_info);
|
||||||
|
|
||||||
const len = try o.builder.intValue(try o.lowerType(Type.usize), operand_ty.abiSize(zcu));
|
const len = try o.builder.intValue(try o.lowerType(Type.usize), operand_ty.abiSize(zcu));
|
||||||
_ = try self.wip.callMemSet(
|
_ = try self.wip.callMemSet(
|
||||||
dest_ptr,
|
dest_ptr,
|
||||||
|
|
@ -9497,6 +9596,8 @@ pub const FuncGen = struct {
|
||||||
return .none;
|
return .none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
const src_operand = try self.resolveInst(bin_op.rhs);
|
const src_operand = try self.resolveInst(bin_op.rhs);
|
||||||
try self.store(dest_ptr, ptr_ty, src_operand, .none);
|
try self.store(dest_ptr, ptr_ty, src_operand, .none);
|
||||||
return .none;
|
return .none;
|
||||||
|
|
@ -9539,6 +9640,9 @@ pub const FuncGen = struct {
|
||||||
if (!canElideLoad(fg, body_tail)) break :elide;
|
if (!canElideLoad(fg, body_tail)) break :elide;
|
||||||
return ptr;
|
return ptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fg.maybeMarkAllowZeroAccess(ptr_info);
|
||||||
|
|
||||||
return fg.load(ptr, ptr_ty);
|
return fg.load(ptr, ptr_ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -9598,6 +9702,8 @@ pub const FuncGen = struct {
|
||||||
new_value = try self.wip.conv(signedness, new_value, llvm_abi_ty, "");
|
new_value = try self.wip.conv(signedness, new_value, llvm_abi_ty, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
const result = try self.wip.cmpxchg(
|
const result = try self.wip.cmpxchg(
|
||||||
kind,
|
kind,
|
||||||
if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal,
|
if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal,
|
||||||
|
|
@ -9649,6 +9755,8 @@ pub const FuncGen = struct {
|
||||||
if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
const ptr_alignment = ptr_ty.ptrAlignment(zcu).toLlvm();
|
const ptr_alignment = ptr_ty.ptrAlignment(zcu).toLlvm();
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
if (llvm_abi_ty != .none) {
|
if (llvm_abi_ty != .none) {
|
||||||
// operand needs widening and truncating or bitcasting.
|
// operand needs widening and truncating or bitcasting.
|
||||||
return self.wip.cast(if (is_float) .bitcast else .trunc, try self.wip.atomicrmw(
|
return self.wip.cast(if (is_float) .bitcast else .trunc, try self.wip.atomicrmw(
|
||||||
|
|
@ -9712,6 +9820,8 @@ pub const FuncGen = struct {
|
||||||
if (info.flags.is_volatile) .@"volatile" else .normal;
|
if (info.flags.is_volatile) .@"volatile" else .normal;
|
||||||
const elem_llvm_ty = try o.lowerType(elem_ty);
|
const elem_llvm_ty = try o.lowerType(elem_ty);
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(info);
|
||||||
|
|
||||||
if (llvm_abi_ty != .none) {
|
if (llvm_abi_ty != .none) {
|
||||||
// operand needs widening and truncating
|
// operand needs widening and truncating
|
||||||
const loaded = try self.wip.loadAtomic(
|
const loaded = try self.wip.loadAtomic(
|
||||||
|
|
@ -9761,6 +9871,9 @@ pub const FuncGen = struct {
|
||||||
"",
|
"",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
try self.store(ptr, ptr_ty, element, ordering);
|
try self.store(ptr, ptr_ty, element, ordering);
|
||||||
return .none;
|
return .none;
|
||||||
}
|
}
|
||||||
|
|
@ -9778,6 +9891,8 @@ pub const FuncGen = struct {
|
||||||
const access_kind: Builder.MemoryAccessKind =
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
if (ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
if (try self.air.value(bin_op.rhs, pt)) |elem_val| {
|
if (try self.air.value(bin_op.rhs, pt)) |elem_val| {
|
||||||
if (elem_val.isUndefDeep(zcu)) {
|
if (elem_val.isUndefDeep(zcu)) {
|
||||||
// Even if safety is disabled, we still emit a memset to undefined since it conveys
|
// Even if safety is disabled, we still emit a memset to undefined since it conveys
|
||||||
|
|
@ -9916,6 +10031,9 @@ pub const FuncGen = struct {
|
||||||
const access_kind: Builder.MemoryAccessKind = if (src_ptr_ty.isVolatilePtr(zcu) or
|
const access_kind: Builder.MemoryAccessKind = if (src_ptr_ty.isVolatilePtr(zcu) or
|
||||||
dest_ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
dest_ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(dest_ptr_ty.ptrInfo(zcu));
|
||||||
|
self.maybeMarkAllowZeroAccess(src_ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
_ = try self.wip.callMemCpy(
|
_ = try self.wip.callMemCpy(
|
||||||
dest_ptr,
|
dest_ptr,
|
||||||
dest_ptr_ty.ptrAlignment(zcu).toLlvm(),
|
dest_ptr_ty.ptrAlignment(zcu).toLlvm(),
|
||||||
|
|
@ -9959,20 +10077,27 @@ pub const FuncGen = struct {
|
||||||
const pt = o.pt;
|
const pt = o.pt;
|
||||||
const zcu = pt.zcu;
|
const zcu = pt.zcu;
|
||||||
const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
|
const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
|
||||||
const un_ty = self.typeOf(bin_op.lhs).childType(zcu);
|
const un_ptr_ty = self.typeOf(bin_op.lhs);
|
||||||
|
const un_ty = un_ptr_ty.childType(zcu);
|
||||||
const layout = un_ty.unionGetLayout(zcu);
|
const layout = un_ty.unionGetLayout(zcu);
|
||||||
if (layout.tag_size == 0) return .none;
|
if (layout.tag_size == 0) return .none;
|
||||||
|
|
||||||
|
const access_kind: Builder.MemoryAccessKind =
|
||||||
|
if (un_ptr_ty.isVolatilePtr(zcu)) .@"volatile" else .normal;
|
||||||
|
|
||||||
|
self.maybeMarkAllowZeroAccess(un_ptr_ty.ptrInfo(zcu));
|
||||||
|
|
||||||
const union_ptr = try self.resolveInst(bin_op.lhs);
|
const union_ptr = try self.resolveInst(bin_op.lhs);
|
||||||
const new_tag = try self.resolveInst(bin_op.rhs);
|
const new_tag = try self.resolveInst(bin_op.rhs);
|
||||||
if (layout.payload_size == 0) {
|
if (layout.payload_size == 0) {
|
||||||
// TODO alignment on this store
|
// TODO alignment on this store
|
||||||
_ = try self.wip.store(.normal, new_tag, union_ptr, .default);
|
_ = try self.wip.store(access_kind, new_tag, union_ptr, .default);
|
||||||
return .none;
|
return .none;
|
||||||
}
|
}
|
||||||
const tag_index = @intFromBool(layout.tag_align.compare(.lt, layout.payload_align));
|
const tag_index = @intFromBool(layout.tag_align.compare(.lt, layout.payload_align));
|
||||||
const tag_field_ptr = try self.wip.gepStruct(try o.lowerType(un_ty), union_ptr, tag_index, "");
|
const tag_field_ptr = try self.wip.gepStruct(try o.lowerType(un_ty), union_ptr, tag_index, "");
|
||||||
// TODO alignment on this store
|
// TODO alignment on this store
|
||||||
_ = try self.wip.store(.normal, new_tag, tag_field_ptr, .default);
|
_ = try self.wip.store(access_kind, new_tag, tag_field_ptr, .default);
|
||||||
return .none;
|
return .none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -10869,12 +10994,13 @@ pub const FuncGen = struct {
|
||||||
opt_llvm_ty: Builder.Type,
|
opt_llvm_ty: Builder.Type,
|
||||||
opt_handle: Builder.Value,
|
opt_handle: Builder.Value,
|
||||||
is_by_ref: bool,
|
is_by_ref: bool,
|
||||||
|
access_kind: Builder.MemoryAccessKind,
|
||||||
) Allocator.Error!Builder.Value {
|
) Allocator.Error!Builder.Value {
|
||||||
const o = self.ng.object;
|
const o = self.ng.object;
|
||||||
const field = b: {
|
const field = b: {
|
||||||
if (is_by_ref) {
|
if (is_by_ref) {
|
||||||
const field_ptr = try self.wip.gepStruct(opt_llvm_ty, opt_handle, 1, "");
|
const field_ptr = try self.wip.gepStruct(opt_llvm_ty, opt_handle, 1, "");
|
||||||
break :b try self.wip.load(.normal, .i8, field_ptr, .default, "");
|
break :b try self.wip.load(access_kind, .i8, field_ptr, .default, "");
|
||||||
}
|
}
|
||||||
break :b try self.wip.extractValue(opt_handle, &.{1}, "");
|
break :b try self.wip.extractValue(opt_handle, &.{1}, "");
|
||||||
};
|
};
|
||||||
|
|
@ -11183,7 +11309,7 @@ pub const FuncGen = struct {
|
||||||
const vec_elem_ty = try o.lowerType(elem_ty);
|
const vec_elem_ty = try o.lowerType(elem_ty);
|
||||||
const vec_ty = try o.builder.vectorType(.normal, info.packed_offset.host_size, vec_elem_ty);
|
const vec_ty = try o.builder.vectorType(.normal, info.packed_offset.host_size, vec_elem_ty);
|
||||||
|
|
||||||
const loaded_vector = try self.wip.load(access_kind, vec_ty, ptr, ptr_alignment, "");
|
const loaded_vector = try self.wip.load(.normal, vec_ty, ptr, ptr_alignment, "");
|
||||||
|
|
||||||
const modified_vector = try self.wip.insertElement(loaded_vector, elem, index_u32, "");
|
const modified_vector = try self.wip.insertElement(loaded_vector, elem, index_u32, "");
|
||||||
|
|
||||||
|
|
@ -11196,7 +11322,7 @@ pub const FuncGen = struct {
|
||||||
const containing_int_ty = try o.builder.intType(@intCast(info.packed_offset.host_size * 8));
|
const containing_int_ty = try o.builder.intType(@intCast(info.packed_offset.host_size * 8));
|
||||||
assert(ordering == .none);
|
assert(ordering == .none);
|
||||||
const containing_int =
|
const containing_int =
|
||||||
try self.wip.load(access_kind, containing_int_ty, ptr, ptr_alignment, "");
|
try self.wip.load(.normal, containing_int_ty, ptr, ptr_alignment, "");
|
||||||
const elem_bits = ptr_ty.childType(zcu).bitSize(zcu);
|
const elem_bits = ptr_ty.childType(zcu).bitSize(zcu);
|
||||||
const shift_amt = try o.builder.intConst(containing_int_ty, info.packed_offset.bit_offset);
|
const shift_amt = try o.builder.intConst(containing_int_ty, info.packed_offset.bit_offset);
|
||||||
// Convert to equally-sized integer type in order to perform the bit
|
// Convert to equally-sized integer type in order to perform the bit
|
||||||
|
|
|
||||||
|
|
@ -471,8 +471,6 @@ const usage_build_generic =
|
||||||
\\ -fno-dll-export-fns Force-disable marking exported functions as DLL exports
|
\\ -fno-dll-export-fns Force-disable marking exported functions as DLL exports
|
||||||
\\ -freference-trace[=num] Show num lines of reference trace per compile error
|
\\ -freference-trace[=num] Show num lines of reference trace per compile error
|
||||||
\\ -fno-reference-trace Disable reference trace
|
\\ -fno-reference-trace Disable reference trace
|
||||||
\\ -fbuiltin Enable implicit builtin knowledge of functions
|
|
||||||
\\ -fno-builtin Disable implicit builtin knowledge of functions
|
|
||||||
\\ -ffunction-sections Places each function in a separate section
|
\\ -ffunction-sections Places each function in a separate section
|
||||||
\\ -fno-function-sections All functions go into same section
|
\\ -fno-function-sections All functions go into same section
|
||||||
\\ -fdata-sections Places each data in a separate section
|
\\ -fdata-sections Places each data in a separate section
|
||||||
|
|
@ -534,6 +532,8 @@ const usage_build_generic =
|
||||||
\\ -fno-sanitize-thread Disable Thread Sanitizer
|
\\ -fno-sanitize-thread Disable Thread Sanitizer
|
||||||
\\ -ffuzz Enable fuzz testing instrumentation
|
\\ -ffuzz Enable fuzz testing instrumentation
|
||||||
\\ -fno-fuzz Disable fuzz testing instrumentation
|
\\ -fno-fuzz Disable fuzz testing instrumentation
|
||||||
|
\\ -fbuiltin Enable implicit builtin knowledge of functions
|
||||||
|
\\ -fno-builtin Disable implicit builtin knowledge of functions
|
||||||
\\ -funwind-tables Always produce unwind table entries for all functions
|
\\ -funwind-tables Always produce unwind table entries for all functions
|
||||||
\\ -fasync-unwind-tables Always produce asynchronous unwind table entries for all functions
|
\\ -fasync-unwind-tables Always produce asynchronous unwind table entries for all functions
|
||||||
\\ -fno-unwind-tables Never produce unwind table entries
|
\\ -fno-unwind-tables Never produce unwind table entries
|
||||||
|
|
|
||||||
122
test/llvm_ir.zig
Normal file
122
test/llvm_ir.zig
Normal file
|
|
@ -0,0 +1,122 @@
|
||||||
|
pub fn addCases(cases: *tests.LlvmIrContext) void {
|
||||||
|
cases.addMatches("nonnull ptr load",
|
||||||
|
\\export fn entry(ptr: *i16) i16 {
|
||||||
|
\\ return ptr.*;
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"ptr nonnull",
|
||||||
|
"load i16, ptr %0",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("nonnull ptr store",
|
||||||
|
\\export fn entry(ptr: *i16) void {
|
||||||
|
\\ ptr.* = 42;
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"ptr nonnull",
|
||||||
|
"store i16 42, ptr %0",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("unused acquire atomic ptr load",
|
||||||
|
\\export fn entry(ptr: *i16) void {
|
||||||
|
\\ _ = @atomicLoad(i16, ptr, .acquire);
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"load atomic i16, ptr %0 acquire",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("unused unordered atomic volatile ptr load",
|
||||||
|
\\export fn entry(ptr: *volatile i16) void {
|
||||||
|
\\ _ = @atomicLoad(i16, ptr, .unordered);
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"load atomic volatile i16, ptr %0 unordered",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("unused volatile ptr load",
|
||||||
|
\\export fn entry(ptr: *volatile i16) void {
|
||||||
|
\\ _ = ptr.*;
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"load volatile i16, ptr %0",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("dead volatile ptr store",
|
||||||
|
\\export fn entry(ptr: *volatile i16) void {
|
||||||
|
\\ ptr.* = 123;
|
||||||
|
\\ ptr.* = 321;
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"store volatile i16 123, ptr %0",
|
||||||
|
"store volatile i16 321, ptr %0",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("unused volatile slice load",
|
||||||
|
\\export fn entry(ptr: *volatile i16) void {
|
||||||
|
\\ entry2(ptr[0..1]);
|
||||||
|
\\}
|
||||||
|
\\fn entry2(ptr: []volatile i16) void {
|
||||||
|
\\ _ = ptr[0];
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"load volatile i16, ptr",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("dead volatile slice store",
|
||||||
|
\\export fn entry(ptr: *volatile i16) void {
|
||||||
|
\\ entry2(ptr[0..1]);
|
||||||
|
\\}
|
||||||
|
\\fn entry2(ptr: []volatile i16) void {
|
||||||
|
\\ ptr[0] = 123;
|
||||||
|
\\ ptr[0] = 321;
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"store volatile i16 123, ptr",
|
||||||
|
"store volatile i16 321, ptr",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("allowzero ptr load",
|
||||||
|
\\export fn entry(ptr: *allowzero i16) i16 {
|
||||||
|
\\ return ptr.*;
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"null_pointer_is_valid",
|
||||||
|
"load i16, ptr %0",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("allowzero ptr store",
|
||||||
|
\\export fn entry(ptr: *allowzero i16) void {
|
||||||
|
\\ ptr.* = 42;
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"null_pointer_is_valid",
|
||||||
|
"store i16 42, ptr %0",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("allowzero slice load",
|
||||||
|
\\export fn entry(ptr: *allowzero i16) i16 {
|
||||||
|
\\ return entry2(ptr[0..1]);
|
||||||
|
\\}
|
||||||
|
\\fn entry2(ptr: []allowzero i16) i16 {
|
||||||
|
\\ return ptr[0];
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"null_pointer_is_valid",
|
||||||
|
"load i16, ptr",
|
||||||
|
}, .{});
|
||||||
|
|
||||||
|
cases.addMatches("allowzero slice store",
|
||||||
|
\\export fn entry(ptr: *allowzero i16) void {
|
||||||
|
\\ entry2(ptr[0..1]);
|
||||||
|
\\}
|
||||||
|
\\fn entry2(ptr: []allowzero i16) void {
|
||||||
|
\\ ptr[0] = 42;
|
||||||
|
\\}
|
||||||
|
, &.{
|
||||||
|
"null_pointer_is_valid",
|
||||||
|
"store i16 42, ptr",
|
||||||
|
}, .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
const tests = @import("tests.zig");
|
||||||
|
|
@ -2442,8 +2442,10 @@ fn addTest(
|
||||||
db_argv2: []const []const u8,
|
db_argv2: []const []const u8,
|
||||||
expected_output: []const []const u8,
|
expected_output: []const []const u8,
|
||||||
) void {
|
) void {
|
||||||
for (db.options.test_filters) |test_filter| {
|
if (db.options.test_filters.len > 0) {
|
||||||
if (std.mem.indexOf(u8, name, test_filter)) |_| return;
|
for (db.options.test_filters) |test_filter| {
|
||||||
|
if (std.mem.indexOf(u8, name, test_filter) != null) break;
|
||||||
|
} else return;
|
||||||
}
|
}
|
||||||
if (db.options.test_target_filters.len > 0) {
|
if (db.options.test_target_filters.len > 0) {
|
||||||
const triple_txt = target.resolved.result.zigTriple(db.b.allocator) catch @panic("OOM");
|
const triple_txt = target.resolved.result.zigTriple(db.b.allocator) catch @panic("OOM");
|
||||||
|
|
|
||||||
132
test/src/LlvmIr.zig
Normal file
132
test/src/LlvmIr.zig
Normal file
|
|
@ -0,0 +1,132 @@
|
||||||
|
b: *std.Build,
|
||||||
|
options: Options,
|
||||||
|
root_step: *std.Build.Step,
|
||||||
|
|
||||||
|
pub const Options = struct {
|
||||||
|
enable_llvm: bool,
|
||||||
|
test_filters: []const []const u8,
|
||||||
|
test_target_filters: []const []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
const TestCase = struct {
|
||||||
|
name: []const u8,
|
||||||
|
source: []const u8,
|
||||||
|
check: union(enum) {
|
||||||
|
matches: []const []const u8,
|
||||||
|
exact: []const u8,
|
||||||
|
},
|
||||||
|
params: Params,
|
||||||
|
|
||||||
|
pub const Params = struct {
|
||||||
|
code_model: std.builtin.CodeModel = .default,
|
||||||
|
dll_export_fns: ?bool = null,
|
||||||
|
dwarf_format: ?std.dwarf.Format = null,
|
||||||
|
error_tracing: ?bool = null,
|
||||||
|
no_builtin: ?bool = null,
|
||||||
|
omit_frame_pointer: ?bool = null,
|
||||||
|
// For most cases, we want to test the LLVM IR that we output; we don't want to be in the
|
||||||
|
// business of testing LLVM's optimization passes. `Debug` gets us the closest to that as it
|
||||||
|
// disables the vast majority of passes in LLVM.
|
||||||
|
optimize: std.builtin.OptimizeMode = .Debug,
|
||||||
|
pic: ?bool = null,
|
||||||
|
pie: ?bool = null,
|
||||||
|
red_zone: ?bool = null,
|
||||||
|
sanitize_thread: ?bool = null,
|
||||||
|
single_threaded: ?bool = null,
|
||||||
|
stack_check: ?bool = null,
|
||||||
|
stack_protector: ?bool = null,
|
||||||
|
strip: ?bool = null,
|
||||||
|
target: std.Target.Query = .{},
|
||||||
|
unwind_tables: ?std.builtin.UnwindTables = null,
|
||||||
|
valgrind: ?bool = null,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn addMatches(
|
||||||
|
self: *LlvmIr,
|
||||||
|
name: []const u8,
|
||||||
|
source: []const u8,
|
||||||
|
matches: []const []const u8,
|
||||||
|
params: TestCase.Params,
|
||||||
|
) void {
|
||||||
|
self.addCase(.{
|
||||||
|
.name = name,
|
||||||
|
.source = source,
|
||||||
|
.check = .{ .matches = matches },
|
||||||
|
.params = params,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn addExact(
|
||||||
|
self: *LlvmIr,
|
||||||
|
name: []const u8,
|
||||||
|
source: []const u8,
|
||||||
|
expected: []const []const u8,
|
||||||
|
params: TestCase.Params,
|
||||||
|
) void {
|
||||||
|
self.addCase(.{
|
||||||
|
.name = name,
|
||||||
|
.source = source,
|
||||||
|
.check = .{ .exact = expected },
|
||||||
|
.params = params,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn addCase(self: *LlvmIr, case: TestCase) void {
|
||||||
|
const target = self.b.resolveTargetQuery(case.params.target);
|
||||||
|
if (self.options.test_target_filters.len > 0) {
|
||||||
|
const triple_txt = target.result.zigTriple(self.b.allocator) catch @panic("OOM");
|
||||||
|
for (self.options.test_target_filters) |filter| {
|
||||||
|
if (std.mem.indexOf(u8, triple_txt, filter) != null) break;
|
||||||
|
} else return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const name = std.fmt.allocPrint(self.b.allocator, "check llvm-ir {s}", .{case.name}) catch @panic("OOM");
|
||||||
|
if (self.options.test_filters.len > 0) {
|
||||||
|
for (self.options.test_filters) |filter| {
|
||||||
|
if (std.mem.indexOf(u8, name, filter) != null) break;
|
||||||
|
} else return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const obj = self.b.addObject(.{
|
||||||
|
.name = "test",
|
||||||
|
.root_source_file = self.b.addWriteFiles().add("test.zig", case.source),
|
||||||
|
.use_llvm = true,
|
||||||
|
|
||||||
|
.code_model = case.params.code_model,
|
||||||
|
.error_tracing = case.params.error_tracing,
|
||||||
|
.omit_frame_pointer = case.params.omit_frame_pointer,
|
||||||
|
.optimize = case.params.optimize,
|
||||||
|
.pic = case.params.pic,
|
||||||
|
.sanitize_thread = case.params.sanitize_thread,
|
||||||
|
.single_threaded = case.params.single_threaded,
|
||||||
|
.strip = case.params.strip,
|
||||||
|
.target = target,
|
||||||
|
.unwind_tables = case.params.unwind_tables,
|
||||||
|
});
|
||||||
|
|
||||||
|
obj.dll_export_fns = case.params.dll_export_fns;
|
||||||
|
obj.pie = case.params.pie;
|
||||||
|
|
||||||
|
obj.root_module.dwarf_format = case.params.dwarf_format;
|
||||||
|
obj.root_module.no_builtin = case.params.no_builtin;
|
||||||
|
obj.root_module.red_zone = case.params.red_zone;
|
||||||
|
obj.root_module.stack_check = case.params.stack_check;
|
||||||
|
obj.root_module.stack_protector = case.params.stack_protector;
|
||||||
|
obj.root_module.valgrind = case.params.valgrind;
|
||||||
|
|
||||||
|
// This is not very sophisticated at the moment. Eventually, we should move towards something
|
||||||
|
// like LLVM's `FileCheck` utility (https://llvm.org/docs/CommandGuide/FileCheck.html), though
|
||||||
|
// likely a more simplified version as we probably don't want a full-blown regex engine in the
|
||||||
|
// standard library...
|
||||||
|
const check = self.b.addCheckFile(obj.getEmittedLlvmIr(), switch (case.check) {
|
||||||
|
.matches => |m| .{ .expected_matches = m },
|
||||||
|
.exact => |e| .{ .expected_exact = e },
|
||||||
|
});
|
||||||
|
check.setName(name);
|
||||||
|
|
||||||
|
self.root_step.dependOn(&check.step);
|
||||||
|
}
|
||||||
|
|
||||||
|
const LlvmIr = @This();
|
||||||
|
const std = @import("std");
|
||||||
|
|
@ -11,6 +11,7 @@ const stack_traces = @import("stack_traces.zig");
|
||||||
const assemble_and_link = @import("assemble_and_link.zig");
|
const assemble_and_link = @import("assemble_and_link.zig");
|
||||||
const translate_c = @import("translate_c.zig");
|
const translate_c = @import("translate_c.zig");
|
||||||
const run_translated_c = @import("run_translated_c.zig");
|
const run_translated_c = @import("run_translated_c.zig");
|
||||||
|
const llvm_ir = @import("llvm_ir.zig");
|
||||||
|
|
||||||
// Implementations
|
// Implementations
|
||||||
pub const TranslateCContext = @import("src/TranslateC.zig");
|
pub const TranslateCContext = @import("src/TranslateC.zig");
|
||||||
|
|
@ -18,6 +19,7 @@ pub const RunTranslatedCContext = @import("src/RunTranslatedC.zig");
|
||||||
pub const CompareOutputContext = @import("src/CompareOutput.zig");
|
pub const CompareOutputContext = @import("src/CompareOutput.zig");
|
||||||
pub const StackTracesContext = @import("src/StackTrace.zig");
|
pub const StackTracesContext = @import("src/StackTrace.zig");
|
||||||
pub const DebuggerContext = @import("src/Debugger.zig");
|
pub const DebuggerContext = @import("src/Debugger.zig");
|
||||||
|
pub const LlvmIrContext = @import("src/LlvmIr.zig");
|
||||||
|
|
||||||
const TestTarget = struct {
|
const TestTarget = struct {
|
||||||
linkage: ?std.builtin.LinkMode = null,
|
linkage: ?std.builtin.LinkMode = null,
|
||||||
|
|
@ -1825,7 +1827,7 @@ pub fn addModuleTests(b: *std.Build, options: ModuleTestOptions) *Step {
|
||||||
.zig_lib_dir = b.path("lib"),
|
.zig_lib_dir = b.path("lib"),
|
||||||
});
|
});
|
||||||
these_tests.linkage = test_target.linkage;
|
these_tests.linkage = test_target.linkage;
|
||||||
if (options.no_builtin) these_tests.no_builtin = true;
|
if (options.no_builtin) these_tests.root_module.no_builtin = false;
|
||||||
if (options.build_options) |build_options| {
|
if (options.build_options) |build_options| {
|
||||||
these_tests.root_module.addOptions("build_options", build_options);
|
these_tests.root_module.addOptions("build_options", build_options);
|
||||||
}
|
}
|
||||||
|
|
@ -2125,3 +2127,22 @@ pub fn addIncrementalTests(b: *std.Build, test_step: *Step) !void {
|
||||||
test_step.dependOn(&run.step);
|
test_step.dependOn(&run.step);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn addLlvmIrTests(b: *std.Build, options: LlvmIrContext.Options) ?*Step {
|
||||||
|
const step = b.step("test-llvm-ir", "Run the LLVM IR tests");
|
||||||
|
|
||||||
|
if (!options.enable_llvm) {
|
||||||
|
step.dependOn(&b.addFail("test-llvm-ir requires -Denable-llvm").step);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var context: LlvmIrContext = .{
|
||||||
|
.b = b,
|
||||||
|
.options = options,
|
||||||
|
.root_step = step,
|
||||||
|
};
|
||||||
|
|
||||||
|
llvm_ir.addCases(&context);
|
||||||
|
|
||||||
|
return step;
|
||||||
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue