wip: progress towards compiling tests

This commit is contained in:
mlugg 2023-05-07 15:23:12 +01:00 committed by Andrew Kelley
parent 9d9e1a2991
commit c1ca16d779
5 changed files with 213 additions and 134 deletions

View file

@ -934,10 +934,13 @@ pub const Decl = struct {
pub fn isExtern(decl: Decl) bool {
assert(decl.has_tv);
return switch (decl.val.tag()) {
return switch (decl.val.ip_index) {
.none => switch (decl.val.tag()) {
.extern_fn => true,
.variable => decl.val.castTag(.variable).?.data.init.ip_index == .unreachable_value,
else => false,
},
else => false,
};
}
@ -6833,6 +6836,10 @@ pub fn intType(mod: *Module, signedness: std.builtin.Signedness, bits: u16) Allo
}
pub fn arrayType(mod: *Module, info: InternPool.Key.ArrayType) Allocator.Error!Type {
if (std.debug.runtime_safety and info.sentinel != .none) {
const sent_ty = mod.intern_pool.indexToKey(info.sentinel).typeOf();
assert(sent_ty == info.child);
}
const i = try intern(mod, .{ .array_type = info });
return i.toType();
}
@ -6848,6 +6855,10 @@ pub fn optionalType(mod: *Module, child_type: InternPool.Index) Allocator.Error!
}
pub fn ptrType(mod: *Module, info: InternPool.Key.PtrType) Allocator.Error!Type {
if (std.debug.runtime_safety and info.sentinel != .none) {
const sent_ty = mod.intern_pool.indexToKey(info.sentinel).typeOf();
assert(sent_ty == info.elem_type);
}
const i = try intern(mod, .{ .ptr_type = info });
return i.toType();
}

View file

@ -5146,7 +5146,7 @@ fn addStrLit(sema: *Sema, block: *Block, zir_bytes: []const u8) CompileError!Air
defer anon_decl.deinit();
const decl_index = try anon_decl.finish(
try Type.array(anon_decl.arena(), gop.key_ptr.len, Value.zero, Type.u8, mod),
try Type.array(anon_decl.arena(), gop.key_ptr.len, try mod.intValue(Type.u8, 0), Type.u8, mod),
try Value.Tag.str_lit.create(anon_decl.arena(), gop.key_ptr.*),
0, // default alignment
);
@ -15567,7 +15567,7 @@ fn zirSizeOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
=> {},
}
const val = try ty.lazyAbiSize(mod, sema.arena);
if (val.tag() == .lazy_size) {
if (val.ip_index == .none and val.tag() == .lazy_size) {
try sema.queueFullTypeResolution(ty);
}
return sema.addConstant(Type.comptime_int, val);
@ -16006,8 +16006,8 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
sema.arena,
@enumToInt(info.signedness),
);
// bits: comptime_int,
field_values[1] = try mod.intValue(Type.comptime_int, info.bits);
// bits: u16,
field_values[1] = try mod.intValue(Type.u16, info.bits);
return sema.addConstant(
type_info_ty,
@ -16019,8 +16019,8 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
},
.Float => {
const field_values = try sema.arena.alloc(Value, 1);
// bits: comptime_int,
field_values[0] = try mod.intValue(Type.comptime_int, ty.bitSize(mod));
// bits: u16,
field_values[0] = try mod.intValue(Type.u16, ty.bitSize(mod));
return sema.addConstant(
type_info_ty,
@ -25957,7 +25957,21 @@ fn coerceExtra(
if (!opts.report_err) return error.NotCoercible;
return sema.fail(block, inst_src, "type '{}' cannot represent integer value '{}'", .{ dest_ty.fmt(sema.mod), val.fmtValue(inst_ty, sema.mod) });
}
return try sema.addConstant(dest_ty, val);
const key = mod.intern_pool.indexToKey(val.ip_index);
// If the int is represented as a bigint, copy it so we can safely pass it to `mod.intern`
const int_storage: InternPool.Key.Int.Storage = switch (key.int.storage) {
.u64 => |x| .{ .u64 = x },
.i64 => |x| .{ .i64 = x },
.big_int => |big_int| .{ .big_int = .{
.limbs = try sema.arena.dupe(std.math.big.Limb, big_int.limbs),
.positive = big_int.positive,
} },
};
const new_val = try mod.intern(.{ .int = .{
.ty = dest_ty.ip_index,
.storage = int_storage,
} });
return try sema.addConstant(dest_ty, new_val.toValue());
}
if (dest_ty.zigTypeTag(mod) == .ComptimeInt) {
if (!opts.report_err) return error.NotCoercible;
@ -31061,7 +31075,8 @@ pub fn resolveFnTypes(sema: *Sema, fn_info: Type.Payload.Function.Data) CompileE
/// Make it so that calling hash() and eql() on `val` will not assert due
/// to a type not having its layout resolved.
fn resolveLazyValue(sema: *Sema, val: Value) CompileError!void {
switch (val.tag()) {
switch (val.ip_index) {
.none => switch (val.tag()) {
.lazy_align => {
const ty = val.castTag(.lazy_align).?.data;
return sema.resolveTypeLayout(ty);
@ -31096,6 +31111,8 @@ fn resolveLazyValue(sema: *Sema, val: Value) CompileError!void {
return sema.resolveLazyValue(slice.len);
},
else => return,
},
else => return,
}
}
@ -31200,7 +31217,7 @@ fn resolveStructLayout(sema: *Sema, ty: Type) CompileError!void {
};
for (struct_obj.fields.values(), 0..) |field, i| {
optimized_order[i] = if (!(try sema.typeHasRuntimeBits(field.ty)))
optimized_order[i] = if (try sema.typeHasRuntimeBits(field.ty))
@intCast(u32, i)
else
Module.Struct.omitted_field;

View file

@ -2481,7 +2481,7 @@ pub const DeclGen = struct {
log.debug("gen: {s} type: {}, value: {}", .{
decl.name, decl.ty.fmtDebug(), decl.val.fmtDebug(),
});
assert(decl.val.tag() != .function);
assert(decl.val.ip_index != .none or decl.val.tag() != .function);
if (decl.val.castTag(.extern_fn)) |extern_fn| {
_ = try dg.resolveLlvmFunction(extern_fn.data.owner_decl);
} else {

View file

@ -2471,7 +2471,7 @@ pub const Type = struct {
pub fn lazyAbiSize(ty: Type, mod: *Module, arena: Allocator) !Value {
switch (try ty.abiSizeAdvanced(mod, .{ .lazy = arena })) {
.val => |val| return val,
.scalar => |x| return mod.intValue(ty, x),
.scalar => |x| return mod.intValue(Type.comptime_int, x),
}
}
@ -2504,8 +2504,20 @@ pub const Type = struct {
if (int_type.bits == 0) return AbiSizeAdvanced{ .scalar = 0 };
return AbiSizeAdvanced{ .scalar = intAbiSize(int_type.bits, target) };
},
.ptr_type => @panic("TODO"),
.array_type => @panic("TODO"),
.ptr_type => |ptr_type| switch (ptr_type.size) {
.Slice => return .{ .scalar = @divExact(target.ptrBitWidth(), 8) * 2 },
else => return .{ .scalar = @divExact(target.ptrBitWidth(), 8) },
},
.array_type => |array_type| {
const len = array_type.len + @boolToInt(array_type.sentinel != .none);
switch (try array_type.child.toType().abiSizeAdvanced(mod, strat)) {
.scalar => |elem_size| return .{ .scalar = len * elem_size },
.val => switch (strat) {
.sema, .eager => unreachable,
.lazy => |arena| return .{ .val = try Value.Tag.lazy_size.create(arena, ty) },
},
}
},
.vector_type => |vector_type| {
const opt_sema = switch (strat) {
.sema => |sema| sema,
@ -2528,7 +2540,7 @@ pub const Type = struct {
return AbiSizeAdvanced{ .scalar = result };
},
.opt_type => @panic("TODO"),
.opt_type => return ty.abiSizeAdvancedOptional(mod, strat),
.error_union_type => @panic("TODO"),
.simple_type => |t| switch (t) {
.bool,
@ -2698,39 +2710,7 @@ pub const Type = struct {
.error_set_single,
=> return AbiSizeAdvanced{ .scalar = 2 },
.optional => {
const child_type = ty.optionalChild(mod);
if (child_type.isNoReturn()) {
return AbiSizeAdvanced{ .scalar = 0 };
}
if (!(child_type.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) {
error.NeedLazy => return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(strat.lazy, ty) },
else => |e| return e,
})) return AbiSizeAdvanced{ .scalar = 1 };
if (ty.optionalReprIsPayload(mod)) {
return abiSizeAdvanced(child_type, mod, strat);
}
const payload_size = switch (try child_type.abiSizeAdvanced(mod, strat)) {
.scalar => |elem_size| elem_size,
.val => switch (strat) {
.sema => unreachable,
.eager => unreachable,
.lazy => |arena| return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(arena, ty) },
},
};
// Optional types are represented as a struct with the child type as the first
// field and a boolean as the second. Since the child type's abi alignment is
// guaranteed to be >= that of bool's (1 byte) the added size is exactly equal
// to the child type's ABI alignment.
return AbiSizeAdvanced{
.scalar = child_type.abiAlignment(mod) + payload_size,
};
},
.optional => return ty.abiSizeAdvancedOptional(mod, strat),
.error_union => {
// This code needs to be kept in sync with the equivalent switch prong
@ -2791,6 +2771,44 @@ pub const Type = struct {
return AbiSizeAdvanced{ .scalar = union_obj.abiSize(mod, have_tag) };
}
fn abiSizeAdvancedOptional(
ty: Type,
mod: *const Module,
strat: AbiAlignmentAdvancedStrat,
) Module.CompileError!AbiSizeAdvanced {
const child_ty = ty.optionalChild(mod);
if (child_ty.isNoReturn()) {
return AbiSizeAdvanced{ .scalar = 0 };
}
if (!(child_ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) {
error.NeedLazy => return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(strat.lazy, ty) },
else => |e| return e,
})) return AbiSizeAdvanced{ .scalar = 1 };
if (ty.optionalReprIsPayload(mod)) {
return abiSizeAdvanced(child_ty, mod, strat);
}
const payload_size = switch (try child_ty.abiSizeAdvanced(mod, strat)) {
.scalar => |elem_size| elem_size,
.val => switch (strat) {
.sema => unreachable,
.eager => unreachable,
.lazy => |arena| return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(arena, ty) },
},
};
// Optional types are represented as a struct with the child type as the first
// field and a boolean as the second. Since the child type's abi alignment is
// guaranteed to be >= that of bool's (1 byte) the added size is exactly equal
// to the child type's ABI alignment.
return AbiSizeAdvanced{
.scalar = child_ty.abiAlignment(mod) + payload_size,
};
}
fn intAbiSize(bits: u16, target: Target) u64 {
const alignment = intAbiAlignment(bits, target);
return std.mem.alignForwardGeneric(u64, @intCast(u16, (@as(u17, bits) + 7) / 8), alignment);
@ -2819,8 +2837,19 @@ pub const Type = struct {
if (ty.ip_index != .none) switch (mod.intern_pool.indexToKey(ty.ip_index)) {
.int_type => |int_type| return int_type.bits,
.ptr_type => @panic("TODO"),
.array_type => @panic("TODO"),
.ptr_type => |ptr_type| switch (ptr_type.size) {
.Slice => return target.ptrBitWidth() * 2,
else => return target.ptrBitWidth() * 2,
},
.array_type => |array_type| {
const len = array_type.len + @boolToInt(array_type.sentinel != .none);
if (len == 0) return 0;
const elem_ty = array_type.child.toType();
const elem_size = std.math.max(elem_ty.abiAlignment(mod), elem_ty.abiSize(mod));
if (elem_size == 0) return 0;
const elem_bit_size = try bitSizeAdvanced(elem_ty, mod, opt_sema);
return (len - 1) * 8 * elem_size + elem_bit_size;
},
.vector_type => |vector_type| {
const child_ty = vector_type.child.toType();
const elem_bit_size = try bitSizeAdvanced(child_ty, mod, opt_sema);
@ -3208,6 +3237,20 @@ pub const Type = struct {
/// See also `isPtrLikeOptional`.
pub fn optionalReprIsPayload(ty: Type, mod: *const Module) bool {
if (ty.ip_index != .none) return switch (mod.intern_pool.indexToKey(ty.ip_index)) {
.opt_type => |child| switch (child.toType().zigTypeTag(mod)) {
.Pointer => {
const info = child.toType().ptrInfo(mod);
switch (info.size) {
.C => return false,
else => return !info.@"allowzero",
}
},
.ErrorSet => true,
else => false,
},
else => false,
};
switch (ty.tag()) {
.optional => {
const child_ty = ty.castTag(.optional).?.data;

View file

@ -1832,7 +1832,8 @@ pub const Value = struct {
}
}
switch (lhs.tag()) {
switch (lhs.ip_index) {
.none => switch (lhs.tag()) {
.repeated => return lhs.castTag(.repeated).?.data.compareAllWithZeroAdvancedExtra(op, mod, opt_sema),
.aggregate => {
for (lhs.castTag(.aggregate).?.data) |elem_val| {
@ -1840,7 +1841,6 @@ pub const Value = struct {
}
return true;
},
.empty_array => return true,
.str_lit => {
const str_lit = lhs.castTag(.str_lit).?.data;
const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
@ -1862,6 +1862,8 @@ pub const Value = struct {
.float_80 => if (std.math.isNan(lhs.castTag(.float_80).?.data)) return op == .neq,
.float_128 => if (std.math.isNan(lhs.castTag(.float_128).?.data)) return op == .neq,
else => {},
},
else => {},
}
return (try orderAgainstZeroAdvanced(lhs, mod, opt_sema)).compare(op);
}
@ -2404,7 +2406,8 @@ pub const Value = struct {
};
pub fn isComptimeMutablePtr(val: Value) bool {
return switch (val.tag()) {
return switch (val.ip_index) {
.none => switch (val.tag()) {
.decl_ref_mut, .comptime_field_ptr => true,
.elem_ptr => isComptimeMutablePtr(val.castTag(.elem_ptr).?.data.array_ptr),
.field_ptr => isComptimeMutablePtr(val.castTag(.field_ptr).?.data.container_ptr),
@ -2412,13 +2415,16 @@ pub const Value = struct {
.opt_payload_ptr => isComptimeMutablePtr(val.castTag(.opt_payload_ptr).?.data.container_ptr),
.slice => isComptimeMutablePtr(val.castTag(.slice).?.data.ptr),
else => false,
},
else => false,
};
}
pub fn canMutateComptimeVarState(val: Value) bool {
if (val.isComptimeMutablePtr()) return true;
switch (val.tag()) {
return switch (val.ip_index) {
.none => switch (val.tag()) {
.repeated => return val.castTag(.repeated).?.data.canMutateComptimeVarState(),
.eu_payload => return val.castTag(.eu_payload).?.data.canMutateComptimeVarState(),
.eu_payload_ptr => return val.castTag(.eu_payload_ptr).?.data.container_ptr.canMutateComptimeVarState(),
@ -2434,7 +2440,9 @@ pub const Value = struct {
.@"union" => return val.cast(Payload.Union).?.data.val.canMutateComptimeVarState(),
.slice => return val.castTag(.slice).?.data.ptr.canMutateComptimeVarState(),
else => return false,
}
},
else => return false,
};
}
/// Gets the decl referenced by this pointer. If the pointer does not point