mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 13:54:21 +00:00
Merge pull request #11128 from topolarity/comptime-memory-reinterp
stage2: Track parent type for `.elem_ptr`, `.field_ptr`, and `.*_payload_ptr`
This commit is contained in:
commit
9eceba2485
9 changed files with 672 additions and 328 deletions
|
|
@ -852,7 +852,7 @@ pub const ErrorSet = struct {
|
|||
}
|
||||
};
|
||||
|
||||
pub const RequiresComptime = enum { no, yes, unknown, wip };
|
||||
pub const PropertyBoolean = enum { no, yes, unknown, wip };
|
||||
|
||||
/// Represents the data that a struct declaration provides.
|
||||
pub const Struct = struct {
|
||||
|
|
@ -884,7 +884,7 @@ pub const Struct = struct {
|
|||
/// If false, resolving the fields is necessary to determine whether the type has only
|
||||
/// one possible value.
|
||||
known_non_opv: bool,
|
||||
requires_comptime: RequiresComptime = .unknown,
|
||||
requires_comptime: PropertyBoolean = .unknown,
|
||||
|
||||
pub const Fields = std.StringArrayHashMapUnmanaged(Field);
|
||||
|
||||
|
|
@ -1089,6 +1089,8 @@ pub const EnumFull = struct {
|
|||
namespace: Namespace,
|
||||
/// Offset from `owner_decl`, points to the enum decl AST node.
|
||||
node_offset: i32,
|
||||
/// true if zig inferred this tag type, false if user specified it
|
||||
tag_ty_inferred: bool,
|
||||
|
||||
pub const NameMap = std.StringArrayHashMapUnmanaged(void);
|
||||
pub const ValueMap = std.ArrayHashMapUnmanaged(Value, void, Value.ArrayHashContext, false);
|
||||
|
|
@ -1132,7 +1134,7 @@ pub const Union = struct {
|
|||
// which `have_layout` does not ensure.
|
||||
fully_resolved,
|
||||
},
|
||||
requires_comptime: RequiresComptime = .unknown,
|
||||
requires_comptime: PropertyBoolean = .unknown,
|
||||
|
||||
pub const Field = struct {
|
||||
/// undefined until `status` is `have_field_types` or `have_layout`.
|
||||
|
|
|
|||
440
src/Sema.zig
440
src/Sema.zig
|
|
@ -1615,6 +1615,9 @@ fn zirCoerceResultPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE
|
|||
try pointee_ty.copy(anon_decl.arena()),
|
||||
Value.undef,
|
||||
);
|
||||
if (iac.data.alignment != 0) {
|
||||
try sema.resolveTypeLayout(block, src, pointee_ty);
|
||||
}
|
||||
const ptr_ty = try Type.ptr(sema.arena, target, .{
|
||||
.pointee_type = pointee_ty,
|
||||
.@"align" = iac.data.alignment,
|
||||
|
|
@ -1884,7 +1887,8 @@ fn zirEnumDecl(
|
|||
|
||||
enum_obj.* = .{
|
||||
.owner_decl = new_decl,
|
||||
.tag_ty = Type.initTag(.@"null"),
|
||||
.tag_ty = Type.@"null",
|
||||
.tag_ty_inferred = true,
|
||||
.fields = .{},
|
||||
.values = .{},
|
||||
.node_offset = src.node_offset,
|
||||
|
|
@ -1907,6 +1911,7 @@ fn zirEnumDecl(
|
|||
// TODO better source location
|
||||
const ty = try sema.resolveType(block, src, tag_type_ref);
|
||||
enum_obj.tag_ty = try ty.copy(new_decl_arena_allocator);
|
||||
enum_obj.tag_ty_inferred = false;
|
||||
}
|
||||
try new_decl.finalizeNewArena(&new_decl_arena);
|
||||
return sema.analyzeDeclVal(block, src, new_decl);
|
||||
|
|
@ -1956,16 +1961,16 @@ fn zirEnumDecl(
|
|||
|
||||
try wip_captures.finalize();
|
||||
|
||||
const tag_ty = blk: {
|
||||
if (tag_type_ref != .none) {
|
||||
// TODO better source location
|
||||
const ty = try sema.resolveType(block, src, tag_type_ref);
|
||||
break :blk try ty.copy(new_decl_arena_allocator);
|
||||
}
|
||||
if (tag_type_ref != .none) {
|
||||
// TODO better source location
|
||||
const ty = try sema.resolveType(block, src, tag_type_ref);
|
||||
enum_obj.tag_ty = try ty.copy(new_decl_arena_allocator);
|
||||
enum_obj.tag_ty_inferred = false;
|
||||
} else {
|
||||
const bits = std.math.log2_int_ceil(usize, fields_len);
|
||||
break :blk try Type.Tag.int_unsigned.create(new_decl_arena_allocator, bits);
|
||||
};
|
||||
enum_obj.tag_ty = tag_ty;
|
||||
enum_obj.tag_ty = try Type.Tag.int_unsigned.create(new_decl_arena_allocator, bits);
|
||||
enum_obj.tag_ty_inferred = true;
|
||||
}
|
||||
}
|
||||
|
||||
try enum_obj.fields.ensureTotalCapacity(new_decl_arena_allocator, fields_len);
|
||||
|
|
@ -2417,13 +2422,13 @@ fn zirAllocExtended(
|
|||
try sema.validateVarType(block, ty_src, var_ty, false);
|
||||
}
|
||||
const target = sema.mod.getTarget();
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
try sema.resolveTypeLayout(block, src, var_ty);
|
||||
const ptr_type = try Type.ptr(sema.arena, target, .{
|
||||
.pointee_type = var_ty,
|
||||
.@"align" = alignment,
|
||||
.@"addrspace" = target_util.defaultAddressSpace(target, .local),
|
||||
});
|
||||
try sema.requireRuntimeBlock(block, src);
|
||||
try sema.resolveTypeLayout(block, src, var_ty);
|
||||
return block.addTy(.alloc, ptr_type);
|
||||
}
|
||||
|
||||
|
|
@ -5568,7 +5573,10 @@ fn analyzeOptionalPayloadPtr(
|
|||
}
|
||||
return sema.addConstant(
|
||||
child_pointer,
|
||||
try Value.Tag.opt_payload_ptr.create(sema.arena, ptr_val),
|
||||
try Value.Tag.opt_payload_ptr.create(sema.arena, .{
|
||||
.container_ptr = ptr_val,
|
||||
.container_ty = optional_ptr_ty.childType(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (try sema.pointerDeref(block, src, ptr_val, optional_ptr_ty)) |val| {
|
||||
|
|
@ -5578,7 +5586,10 @@ fn analyzeOptionalPayloadPtr(
|
|||
// The same Value represents the pointer to the optional and the payload.
|
||||
return sema.addConstant(
|
||||
child_pointer,
|
||||
try Value.Tag.opt_payload_ptr.create(sema.arena, ptr_val),
|
||||
try Value.Tag.opt_payload_ptr.create(sema.arena, .{
|
||||
.container_ptr = ptr_val,
|
||||
.container_ty = optional_ptr_ty.childType(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -5733,7 +5744,10 @@ fn analyzeErrUnionPayloadPtr(
|
|||
}
|
||||
return sema.addConstant(
|
||||
operand_pointer_ty,
|
||||
try Value.Tag.eu_payload_ptr.create(sema.arena, ptr_val),
|
||||
try Value.Tag.eu_payload_ptr.create(sema.arena, .{
|
||||
.container_ptr = ptr_val,
|
||||
.container_ty = operand_ty.elemType(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (try sema.pointerDeref(block, src, ptr_val, operand_ty)) |val| {
|
||||
|
|
@ -5743,7 +5757,10 @@ fn analyzeErrUnionPayloadPtr(
|
|||
|
||||
return sema.addConstant(
|
||||
operand_pointer_ty,
|
||||
try Value.Tag.eu_payload_ptr.create(sema.arena, ptr_val),
|
||||
try Value.Tag.eu_payload_ptr.create(sema.arena, .{
|
||||
.container_ptr = ptr_val,
|
||||
.container_ty = operand_ty.elemType(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -6652,6 +6669,7 @@ fn zirSwitchCapture(
|
|||
field_ty_ptr,
|
||||
try Value.Tag.field_ptr.create(sema.arena, .{
|
||||
.container_ptr = op_ptr_val,
|
||||
.container_ty = operand_ty,
|
||||
.field_index = field_index,
|
||||
}),
|
||||
);
|
||||
|
|
@ -9638,7 +9656,7 @@ fn analyzePtrArithmetic(
|
|||
if (air_tag == .ptr_sub) {
|
||||
return sema.fail(block, op_src, "TODO implement Sema comptime pointer subtraction", .{});
|
||||
}
|
||||
const new_ptr_val = try ptr_val.elemPtr(sema.arena, offset_int);
|
||||
const new_ptr_val = try ptr_val.elemPtr(ptr_ty, sema.arena, offset_int);
|
||||
return sema.addConstant(new_ptr_ty, new_ptr_val);
|
||||
} else break :rs offset_src;
|
||||
} else break :rs ptr_src;
|
||||
|
|
@ -12592,6 +12610,7 @@ fn zirReify(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.I
|
|||
enum_obj.* = .{
|
||||
.owner_decl = new_decl,
|
||||
.tag_ty = Type.initTag(.@"null"),
|
||||
.tag_ty_inferred = true,
|
||||
.fields = .{},
|
||||
.values = .{},
|
||||
.node_offset = src.node_offset,
|
||||
|
|
@ -15903,6 +15922,7 @@ fn finishFieldCallBind(
|
|||
ptr_field_ty,
|
||||
try Value.Tag.field_ptr.create(arena, .{
|
||||
.container_ptr = struct_ptr_val,
|
||||
.container_ty = ptr_ty.childType(),
|
||||
.field_index = field_index,
|
||||
}),
|
||||
);
|
||||
|
|
@ -16065,6 +16085,7 @@ fn structFieldPtrByIndex(
|
|||
ptr_field_ty,
|
||||
try Value.Tag.field_ptr.create(sema.arena, .{
|
||||
.container_ptr = struct_ptr_val,
|
||||
.container_ty = struct_ptr_ty.childType(),
|
||||
.field_index = field_index,
|
||||
}),
|
||||
);
|
||||
|
|
@ -16241,6 +16262,7 @@ fn unionFieldPtr(
|
|||
ptr_field_ty,
|
||||
try Value.Tag.field_ptr.create(arena, .{
|
||||
.container_ptr = union_ptr_val,
|
||||
.container_ty = union_ty,
|
||||
.field_index = field_index,
|
||||
}),
|
||||
);
|
||||
|
|
@ -16333,7 +16355,7 @@ fn elemPtr(
|
|||
const runtime_src = if (maybe_slice_val) |slice_val| rs: {
|
||||
const index_val = maybe_index_val orelse break :rs elem_index_src;
|
||||
const index = @intCast(usize, index_val.toUnsignedInt());
|
||||
const elem_ptr = try slice_val.elemPtr(sema.arena, index);
|
||||
const elem_ptr = try slice_val.elemPtr(array_ty, sema.arena, index);
|
||||
return sema.addConstant(result_ty, elem_ptr);
|
||||
} else array_ptr_src;
|
||||
|
||||
|
|
@ -16348,7 +16370,7 @@ fn elemPtr(
|
|||
const ptr_val = maybe_ptr_val orelse break :rs array_ptr_src;
|
||||
const index_val = maybe_index_val orelse break :rs elem_index_src;
|
||||
const index = @intCast(usize, index_val.toUnsignedInt());
|
||||
const elem_ptr = try ptr_val.elemPtr(sema.arena, index);
|
||||
const elem_ptr = try ptr_val.elemPtr(array_ty, sema.arena, index);
|
||||
return sema.addConstant(result_ty, elem_ptr);
|
||||
};
|
||||
|
||||
|
|
@ -16473,6 +16495,7 @@ fn tupleFieldPtr(
|
|||
ptr_field_ty,
|
||||
try Value.Tag.field_ptr.create(sema.arena, .{
|
||||
.container_ptr = tuple_ptr_val,
|
||||
.container_ty = tuple_ty,
|
||||
.field_index = field_index,
|
||||
}),
|
||||
);
|
||||
|
|
@ -16563,7 +16586,7 @@ fn elemPtrArray(
|
|||
const index_u64 = index_val.toUnsignedInt();
|
||||
// @intCast here because it would have been impossible to construct a value that
|
||||
// required a larger index.
|
||||
const elem_ptr = try array_ptr_val.elemPtr(sema.arena, @intCast(usize, index_u64));
|
||||
const elem_ptr = try array_ptr_val.elemPtr(array_ptr_ty, sema.arena, @intCast(usize, index_u64));
|
||||
return sema.addConstant(result_ty, elem_ptr);
|
||||
}
|
||||
}
|
||||
|
|
@ -17569,6 +17592,14 @@ fn beginComptimePtrMutation(
|
|||
src: LazySrcLoc,
|
||||
ptr_val: Value,
|
||||
) CompileError!ComptimePtrMutationKit {
|
||||
|
||||
// TODO: Update this to behave like `beginComptimePtrLoad` and properly check/use
|
||||
// `container_ty` and `array_ty`, instead of trusting that the parent decl type
|
||||
// matches the type used to derive the elem_ptr/field_ptr/etc.
|
||||
//
|
||||
// This is needed because the types will not match if the pointer we're mutating
|
||||
// through is reinterpreting comptime memory.
|
||||
|
||||
switch (ptr_val.tag()) {
|
||||
.decl_ref_mut => {
|
||||
const decl_ref_mut = ptr_val.castTag(.decl_ref_mut).?.data;
|
||||
|
|
@ -17757,8 +17788,8 @@ fn beginComptimePtrMutation(
|
|||
}
|
||||
},
|
||||
.eu_payload_ptr => {
|
||||
const eu_ptr_val = ptr_val.castTag(.eu_payload_ptr).?.data;
|
||||
var parent = try beginComptimePtrMutation(sema, block, src, eu_ptr_val);
|
||||
const eu_ptr = ptr_val.castTag(.eu_payload_ptr).?.data;
|
||||
var parent = try beginComptimePtrMutation(sema, block, src, eu_ptr.container_ptr);
|
||||
const payload_ty = parent.ty.errorUnionPayload();
|
||||
switch (parent.val.tag()) {
|
||||
else => {
|
||||
|
|
@ -17790,8 +17821,8 @@ fn beginComptimePtrMutation(
|
|||
}
|
||||
},
|
||||
.opt_payload_ptr => {
|
||||
const opt_ptr_val = ptr_val.castTag(.opt_payload_ptr).?.data;
|
||||
var parent = try beginComptimePtrMutation(sema, block, src, opt_ptr_val);
|
||||
const opt_ptr = ptr_val.castTag(.opt_payload_ptr).?.data;
|
||||
var parent = try beginComptimePtrMutation(sema, block, src, opt_ptr.container_ptr);
|
||||
const payload_ty = try parent.ty.optionalChildAlloc(sema.arena);
|
||||
switch (parent.val.tag()) {
|
||||
.undef, .null_value => {
|
||||
|
|
@ -17829,163 +17860,191 @@ fn beginComptimePtrMutation(
|
|||
}
|
||||
}
|
||||
|
||||
const ComptimePtrLoadKit = struct {
|
||||
/// The Value of the Decl that owns this memory.
|
||||
root_val: Value,
|
||||
/// The Type of the Decl that owns this memory.
|
||||
root_ty: Type,
|
||||
/// Parent Value.
|
||||
val: Value,
|
||||
/// The Type of the parent Value.
|
||||
ty: Type,
|
||||
const TypedValueAndOffset = struct {
|
||||
tv: TypedValue,
|
||||
/// The starting byte offset of `val` from `root_val`.
|
||||
/// If the type does not have a well-defined memory layout, this is null.
|
||||
byte_offset: ?usize,
|
||||
/// Whether the `root_val` could be mutated by further
|
||||
byte_offset: usize,
|
||||
};
|
||||
|
||||
const ComptimePtrLoadKit = struct {
|
||||
/// The Value and Type corresponding to the pointee of the provided pointer.
|
||||
/// If a direct dereference is not possible, this is null.
|
||||
pointee: ?TypedValue,
|
||||
/// The largest parent Value containing `pointee` and having a well-defined memory layout.
|
||||
/// This is used for bitcasting, if direct dereferencing failed (i.e. `pointee` is null).
|
||||
parent: ?TypedValueAndOffset,
|
||||
/// Whether the `pointee` could be mutated by further
|
||||
/// semantic analysis and a copy must be performed.
|
||||
is_mutable: bool,
|
||||
/// If the root decl could not be used as `parent`, this is the type that
|
||||
/// caused that by not having a well-defined layout
|
||||
ty_without_well_defined_layout: ?Type,
|
||||
};
|
||||
|
||||
const ComptimePtrLoadError = CompileError || error{
|
||||
RuntimeLoad,
|
||||
};
|
||||
|
||||
/// If `maybe_array_ty` is provided, it will be used to directly dereference an
|
||||
/// .elem_ptr of type T to a value of [N]T, if necessary.
|
||||
fn beginComptimePtrLoad(
|
||||
sema: *Sema,
|
||||
block: *Block,
|
||||
src: LazySrcLoc,
|
||||
ptr_val: Value,
|
||||
maybe_array_ty: ?Type,
|
||||
) ComptimePtrLoadError!ComptimePtrLoadKit {
|
||||
const target = sema.mod.getTarget();
|
||||
switch (ptr_val.tag()) {
|
||||
.decl_ref => {
|
||||
const decl = ptr_val.castTag(.decl_ref).?.data;
|
||||
const decl_val = try decl.value();
|
||||
if (decl_val.tag() == .variable) return error.RuntimeLoad;
|
||||
return ComptimePtrLoadKit{
|
||||
.root_val = decl_val,
|
||||
.root_ty = decl.ty,
|
||||
.val = decl_val,
|
||||
.ty = decl.ty,
|
||||
.byte_offset = 0,
|
||||
.is_mutable = false,
|
||||
var deref: ComptimePtrLoadKit = switch (ptr_val.tag()) {
|
||||
.decl_ref,
|
||||
.decl_ref_mut,
|
||||
=> blk: {
|
||||
const decl = switch (ptr_val.tag()) {
|
||||
.decl_ref => ptr_val.castTag(.decl_ref).?.data,
|
||||
.decl_ref_mut => ptr_val.castTag(.decl_ref_mut).?.data.decl,
|
||||
else => unreachable,
|
||||
};
|
||||
const is_mutable = ptr_val.tag() == .decl_ref_mut;
|
||||
const decl_tv = try decl.typedValue();
|
||||
if (decl_tv.val.tag() == .variable) return error.RuntimeLoad;
|
||||
|
||||
const layout_defined = decl.ty.hasWellDefinedLayout();
|
||||
break :blk ComptimePtrLoadKit{
|
||||
.parent = if (layout_defined) .{ .tv = decl_tv, .byte_offset = 0 } else null,
|
||||
.pointee = decl_tv,
|
||||
.is_mutable = is_mutable,
|
||||
.ty_without_well_defined_layout = if (!layout_defined) decl.ty else null,
|
||||
};
|
||||
},
|
||||
.decl_ref_mut => {
|
||||
const decl = ptr_val.castTag(.decl_ref_mut).?.data.decl;
|
||||
const decl_val = try decl.value();
|
||||
if (decl_val.tag() == .variable) return error.RuntimeLoad;
|
||||
return ComptimePtrLoadKit{
|
||||
.root_val = decl_val,
|
||||
.root_ty = decl.ty,
|
||||
.val = decl_val,
|
||||
.ty = decl.ty,
|
||||
.byte_offset = 0,
|
||||
.is_mutable = true,
|
||||
};
|
||||
},
|
||||
.elem_ptr => {
|
||||
|
||||
.elem_ptr => blk: {
|
||||
const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
|
||||
const parent = try beginComptimePtrLoad(sema, block, src, elem_ptr.array_ptr);
|
||||
switch (parent.ty.zigTypeTag()) {
|
||||
.Array, .Vector => {
|
||||
const check_len = parent.ty.arrayLenIncludingSentinel();
|
||||
if (elem_ptr.index >= check_len) {
|
||||
// TODO have the parent include the decl so we can say "declared here"
|
||||
return sema.fail(block, src, "comptime load of index {d} out of bounds of array length {d}", .{
|
||||
elem_ptr.index, check_len,
|
||||
});
|
||||
const elem_ty = elem_ptr.elem_ty;
|
||||
var deref = try beginComptimePtrLoad(sema, block, src, elem_ptr.array_ptr, null);
|
||||
|
||||
if (elem_ptr.index != 0) {
|
||||
if (elem_ty.hasWellDefinedLayout()) {
|
||||
if (deref.parent) |*parent| {
|
||||
// Update the byte offset (in-place)
|
||||
const elem_size = try sema.typeAbiSize(block, src, elem_ty);
|
||||
const offset = parent.byte_offset + elem_size * elem_ptr.index;
|
||||
parent.byte_offset = try sema.usizeCast(block, src, offset);
|
||||
}
|
||||
const elem_ty = parent.ty.childType();
|
||||
const byte_offset: ?usize = bo: {
|
||||
if (try sema.typeRequiresComptime(block, src, elem_ty)) {
|
||||
break :bo null;
|
||||
} else {
|
||||
if (parent.byte_offset) |off| {
|
||||
try sema.resolveTypeLayout(block, src, elem_ty);
|
||||
const elem_size = elem_ty.abiSize(target);
|
||||
break :bo try sema.usizeCast(block, src, off + elem_size * elem_ptr.index);
|
||||
} else {
|
||||
break :bo null;
|
||||
}
|
||||
}
|
||||
};
|
||||
return ComptimePtrLoadKit{
|
||||
.root_val = parent.root_val,
|
||||
.root_ty = parent.root_ty,
|
||||
.val = try parent.val.elemValue(sema.arena, elem_ptr.index),
|
||||
.ty = elem_ty,
|
||||
.byte_offset = byte_offset,
|
||||
.is_mutable = parent.is_mutable,
|
||||
};
|
||||
},
|
||||
else => {
|
||||
if (elem_ptr.index != 0) {
|
||||
// TODO have the parent include the decl so we can say "declared here"
|
||||
return sema.fail(block, src, "out of bounds comptime load of index {d}", .{
|
||||
elem_ptr.index,
|
||||
});
|
||||
}
|
||||
return ComptimePtrLoadKit{
|
||||
.root_val = parent.root_val,
|
||||
.root_ty = parent.root_ty,
|
||||
.val = parent.val,
|
||||
.ty = parent.ty,
|
||||
.byte_offset = parent.byte_offset,
|
||||
.is_mutable = parent.is_mutable,
|
||||
};
|
||||
},
|
||||
}
|
||||
},
|
||||
.field_ptr => {
|
||||
const field_ptr = ptr_val.castTag(.field_ptr).?.data;
|
||||
const parent = try beginComptimePtrLoad(sema, block, src, field_ptr.container_ptr);
|
||||
const field_index = @intCast(u32, field_ptr.field_index);
|
||||
const byte_offset: ?usize = bo: {
|
||||
if (try sema.typeRequiresComptime(block, src, parent.ty)) {
|
||||
break :bo null;
|
||||
} else {
|
||||
if (parent.byte_offset) |off| {
|
||||
try sema.resolveTypeLayout(block, src, parent.ty);
|
||||
const field_offset = parent.ty.structFieldOffset(field_index, target);
|
||||
break :bo try sema.usizeCast(block, src, off + field_offset);
|
||||
} else {
|
||||
break :bo null;
|
||||
}
|
||||
deref.parent = null;
|
||||
deref.ty_without_well_defined_layout = elem_ty;
|
||||
}
|
||||
}
|
||||
|
||||
// If we're loading an elem_ptr that was derived from a different type
|
||||
// than the true type of the underlying decl, we cannot deref directly
|
||||
const ty_matches = if (deref.pointee != null and deref.pointee.?.ty.isArrayLike()) x: {
|
||||
const deref_elem_ty = deref.pointee.?.ty.childType();
|
||||
break :x (try sema.coerceInMemoryAllowed(block, deref_elem_ty, elem_ty, false, target, src, src)) == .ok or
|
||||
(try sema.coerceInMemoryAllowed(block, elem_ty, deref_elem_ty, false, target, src, src)) == .ok;
|
||||
} else false;
|
||||
if (!ty_matches) {
|
||||
deref.pointee = null;
|
||||
break :blk deref;
|
||||
}
|
||||
|
||||
var array_tv = deref.pointee.?;
|
||||
const check_len = array_tv.ty.arrayLenIncludingSentinel();
|
||||
if (elem_ptr.index >= check_len) {
|
||||
// TODO have the deref include the decl so we can say "declared here"
|
||||
return sema.fail(block, src, "comptime load of index {d} out of bounds of array length {d}", .{
|
||||
elem_ptr.index, check_len,
|
||||
});
|
||||
}
|
||||
|
||||
if (maybe_array_ty) |load_ty| {
|
||||
// It's possible that we're loading a [N]T, in which case we'd like to slice
|
||||
// the pointee array directly from our parent array.
|
||||
if (load_ty.isArrayLike() and load_ty.childType().eql(elem_ty)) {
|
||||
const N = try sema.usizeCast(block, src, load_ty.arrayLenIncludingSentinel());
|
||||
deref.pointee = if (elem_ptr.index + N <= check_len) TypedValue{
|
||||
.ty = try Type.array(sema.arena, N, null, elem_ty),
|
||||
.val = try array_tv.val.sliceArray(sema.arena, elem_ptr.index, elem_ptr.index + N),
|
||||
} else null;
|
||||
break :blk deref;
|
||||
}
|
||||
}
|
||||
|
||||
deref.pointee = .{
|
||||
.ty = elem_ty,
|
||||
.val = try array_tv.val.elemValue(sema.arena, elem_ptr.index),
|
||||
};
|
||||
return ComptimePtrLoadKit{
|
||||
.root_val = parent.root_val,
|
||||
.root_ty = parent.root_ty,
|
||||
.val = try parent.val.fieldValue(sema.arena, field_index),
|
||||
.ty = parent.ty.structFieldType(field_index),
|
||||
.byte_offset = byte_offset,
|
||||
.is_mutable = parent.is_mutable,
|
||||
};
|
||||
break :blk deref;
|
||||
},
|
||||
.eu_payload_ptr => {
|
||||
const err_union_ptr = ptr_val.castTag(.eu_payload_ptr).?.data;
|
||||
const parent = try beginComptimePtrLoad(sema, block, src, err_union_ptr);
|
||||
return ComptimePtrLoadKit{
|
||||
.root_val = parent.root_val,
|
||||
.root_ty = parent.root_ty,
|
||||
.val = parent.val.castTag(.eu_payload).?.data,
|
||||
.ty = parent.ty.errorUnionPayload(),
|
||||
.byte_offset = null,
|
||||
.is_mutable = parent.is_mutable,
|
||||
};
|
||||
|
||||
.field_ptr => blk: {
|
||||
const field_ptr = ptr_val.castTag(.field_ptr).?.data;
|
||||
const field_index = @intCast(u32, field_ptr.field_index);
|
||||
const field_ty = field_ptr.container_ty.structFieldType(field_index);
|
||||
var deref = try beginComptimePtrLoad(sema, block, src, field_ptr.container_ptr, field_ptr.container_ty);
|
||||
|
||||
if (field_ptr.container_ty.hasWellDefinedLayout()) {
|
||||
if (deref.parent) |*parent| {
|
||||
// Update the byte offset (in-place)
|
||||
try sema.resolveTypeLayout(block, src, field_ptr.container_ty);
|
||||
const field_offset = field_ptr.container_ty.structFieldOffset(field_index, target);
|
||||
parent.byte_offset = try sema.usizeCast(block, src, parent.byte_offset + field_offset);
|
||||
}
|
||||
} else {
|
||||
deref.parent = null;
|
||||
deref.ty_without_well_defined_layout = field_ptr.container_ty;
|
||||
}
|
||||
|
||||
if (deref.pointee) |*tv| {
|
||||
const coerce_in_mem_ok =
|
||||
(try sema.coerceInMemoryAllowed(block, field_ptr.container_ty, tv.ty, false, target, src, src)) == .ok or
|
||||
(try sema.coerceInMemoryAllowed(block, tv.ty, field_ptr.container_ty, false, target, src, src)) == .ok;
|
||||
if (coerce_in_mem_ok) {
|
||||
deref.pointee = TypedValue{
|
||||
.ty = field_ty,
|
||||
.val = try tv.val.fieldValue(sema.arena, field_index),
|
||||
};
|
||||
break :blk deref;
|
||||
}
|
||||
}
|
||||
deref.pointee = null;
|
||||
break :blk deref;
|
||||
},
|
||||
.opt_payload_ptr => {
|
||||
const opt_ptr = ptr_val.castTag(.opt_payload_ptr).?.data;
|
||||
const parent = try beginComptimePtrLoad(sema, block, src, opt_ptr);
|
||||
return ComptimePtrLoadKit{
|
||||
.root_val = parent.root_val,
|
||||
.root_ty = parent.root_ty,
|
||||
.val = parent.val.castTag(.opt_payload).?.data,
|
||||
.ty = try parent.ty.optionalChildAlloc(sema.arena),
|
||||
.byte_offset = null,
|
||||
.is_mutable = parent.is_mutable,
|
||||
|
||||
.opt_payload_ptr,
|
||||
.eu_payload_ptr,
|
||||
=> blk: {
|
||||
const payload_ptr = ptr_val.cast(Value.Payload.PayloadPtr).?.data;
|
||||
const payload_ty = switch (ptr_val.tag()) {
|
||||
.eu_payload_ptr => payload_ptr.container_ty.errorUnionPayload(),
|
||||
.opt_payload_ptr => try payload_ptr.container_ty.optionalChildAlloc(sema.arena),
|
||||
else => unreachable,
|
||||
};
|
||||
var deref = try beginComptimePtrLoad(sema, block, src, payload_ptr.container_ptr, payload_ptr.container_ty);
|
||||
|
||||
// eu_payload_ptr and opt_payload_ptr never have a well-defined layout
|
||||
if (deref.parent != null) {
|
||||
deref.parent = null;
|
||||
deref.ty_without_well_defined_layout = payload_ptr.container_ty;
|
||||
}
|
||||
|
||||
if (deref.pointee) |*tv| {
|
||||
const coerce_in_mem_ok =
|
||||
(try sema.coerceInMemoryAllowed(block, payload_ptr.container_ty, tv.ty, false, target, src, src)) == .ok or
|
||||
(try sema.coerceInMemoryAllowed(block, tv.ty, payload_ptr.container_ty, false, target, src, src)) == .ok;
|
||||
if (coerce_in_mem_ok) {
|
||||
const payload_val = switch (ptr_val.tag()) {
|
||||
.eu_payload_ptr => tv.val.castTag(.eu_payload).?.data,
|
||||
.opt_payload_ptr => tv.val.castTag(.opt_payload).?.data,
|
||||
else => unreachable,
|
||||
};
|
||||
tv.* = TypedValue{ .ty = payload_ty, .val = payload_val };
|
||||
break :blk deref;
|
||||
}
|
||||
}
|
||||
deref.pointee = null;
|
||||
break :blk deref;
|
||||
},
|
||||
|
||||
.zero,
|
||||
|
|
@ -18000,7 +18059,14 @@ fn beginComptimePtrLoad(
|
|||
=> return error.RuntimeLoad,
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
if (deref.pointee) |tv| {
|
||||
if (deref.parent == null and tv.ty.hasWellDefinedLayout()) {
|
||||
deref.parent = .{ .tv = tv, .byte_offset = 0 };
|
||||
}
|
||||
}
|
||||
return deref;
|
||||
}
|
||||
|
||||
fn bitCast(
|
||||
|
|
@ -21085,39 +21151,53 @@ pub fn analyzeAddrspace(
|
|||
/// Asserts the value is a pointer and dereferences it.
|
||||
/// Returns `null` if the pointer contents cannot be loaded at comptime.
|
||||
fn pointerDeref(sema: *Sema, block: *Block, src: LazySrcLoc, ptr_val: Value, ptr_ty: Type) CompileError!?Value {
|
||||
const target = sema.mod.getTarget();
|
||||
const load_ty = ptr_ty.childType();
|
||||
const parent = sema.beginComptimePtrLoad(block, src, ptr_val) catch |err| switch (err) {
|
||||
const target = sema.mod.getTarget();
|
||||
const deref = sema.beginComptimePtrLoad(block, src, ptr_val, load_ty) catch |err| switch (err) {
|
||||
error.RuntimeLoad => return null,
|
||||
else => |e| return e,
|
||||
};
|
||||
// We have a Value that lines up in virtual memory exactly with what we want to load.
|
||||
// If the Type is in-memory coercable to `load_ty`, it may be returned without modifications.
|
||||
const coerce_in_mem_ok =
|
||||
(try sema.coerceInMemoryAllowed(block, load_ty, parent.ty, false, target, src, src)) == .ok or
|
||||
(try sema.coerceInMemoryAllowed(block, parent.ty, load_ty, false, target, src, src)) == .ok;
|
||||
if (coerce_in_mem_ok) {
|
||||
if (parent.is_mutable) {
|
||||
// The decl whose value we are obtaining here may be overwritten with
|
||||
// a different value upon further semantic analysis, which would
|
||||
// invalidate this memory. So we must copy here.
|
||||
return try parent.val.copy(sema.arena);
|
||||
|
||||
if (deref.pointee) |tv| {
|
||||
const coerce_in_mem_ok =
|
||||
(try sema.coerceInMemoryAllowed(block, load_ty, tv.ty, false, target, src, src)) == .ok or
|
||||
(try sema.coerceInMemoryAllowed(block, tv.ty, load_ty, false, target, src, src)) == .ok;
|
||||
if (coerce_in_mem_ok) {
|
||||
// We have a Value that lines up in virtual memory exactly with what we want to load,
|
||||
// and it is in-memory coercible to load_ty. It may be returned without modifications.
|
||||
if (deref.is_mutable) {
|
||||
// The decl whose value we are obtaining here may be overwritten with
|
||||
// a different value upon further semantic analysis, which would
|
||||
// invalidate this memory. So we must copy here.
|
||||
return try tv.val.copy(sema.arena);
|
||||
}
|
||||
return tv.val;
|
||||
}
|
||||
return parent.val;
|
||||
}
|
||||
|
||||
// The type is not in-memory coercable, so it must be bitcasted according
|
||||
// to the pointer type we are performing the load through.
|
||||
// The type is not in-memory coercible or the direct dereference failed, so it must
|
||||
// be bitcast according to the pointer type we are performing the load through.
|
||||
if (!load_ty.hasWellDefinedLayout())
|
||||
return sema.fail(block, src, "comptime dereference requires {} to have a well-defined layout, but it does not.", .{load_ty});
|
||||
|
||||
// TODO emit a compile error if the types are not allowed to be bitcasted
|
||||
const load_sz = try sema.typeAbiSize(block, src, load_ty);
|
||||
|
||||
if (parent.ty.abiSize(target) >= load_ty.abiSize(target)) {
|
||||
// The Type it is stored as in the compiler has an ABI size greater or equal to
|
||||
// the ABI size of `load_ty`. We may perform the bitcast based on
|
||||
// `parent.val` alone (more efficient).
|
||||
return try sema.bitCastVal(block, src, parent.val, parent.ty, load_ty, 0);
|
||||
// Try the smaller bit-cast first, since that's more efficient than using the larger `parent`
|
||||
if (deref.pointee) |tv| if (load_sz <= try sema.typeAbiSize(block, src, tv.ty))
|
||||
return try sema.bitCastVal(block, src, tv.val, tv.ty, load_ty, 0);
|
||||
|
||||
// If that fails, try to bit-cast from the largest parent value with a well-defined layout
|
||||
if (deref.parent) |parent| if (load_sz + parent.byte_offset <= try sema.typeAbiSize(block, src, parent.tv.ty))
|
||||
return try sema.bitCastVal(block, src, parent.tv.val, parent.tv.ty, load_ty, parent.byte_offset);
|
||||
|
||||
if (deref.ty_without_well_defined_layout) |bad_ty| {
|
||||
// We got no parent for bit-casting, or the parent we got was too small. Either way, the problem
|
||||
// is that some type we encountered when de-referencing does not have a well-defined layout.
|
||||
return sema.fail(block, src, "comptime dereference requires {} to have a well-defined layout, but it does not.", .{bad_ty});
|
||||
} else {
|
||||
return try sema.bitCastVal(block, src, parent.root_val, parent.root_ty, load_ty, parent.byte_offset.?);
|
||||
// If all encountered types had well-defined layouts, the parent is the root decl and it just
|
||||
// wasn't big enough for the load.
|
||||
return sema.fail(block, src, "dereference of {} exceeds bounds of containing decl of type {}", .{ ptr_ty, deref.parent.?.tv.ty });
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -21395,6 +21475,12 @@ pub fn typeHasRuntimeBits(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type)
|
|||
return true;
|
||||
}
|
||||
|
||||
fn typeAbiSize(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !u64 {
|
||||
try sema.resolveTypeLayout(block, src, ty);
|
||||
const target = sema.mod.getTarget();
|
||||
return ty.abiSize(target);
|
||||
}
|
||||
|
||||
fn typeAbiAlignment(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !u32 {
|
||||
try sema.resolveTypeLayout(block, src, ty);
|
||||
const target = sema.mod.getTarget();
|
||||
|
|
|
|||
|
|
@ -1518,26 +1518,8 @@ pub const DeclGen = struct {
|
|||
const llvm_int = llvm_usize.constInt(tv.val.toUnsignedInt(), .False);
|
||||
return llvm_int.constIntToPtr(try dg.llvmType(tv.ty));
|
||||
},
|
||||
.field_ptr, .opt_payload_ptr, .eu_payload_ptr => {
|
||||
const parent = try dg.lowerParentPtr(tv.val, tv.ty);
|
||||
return parent.llvm_ptr.constBitCast(try dg.llvmType(tv.ty));
|
||||
},
|
||||
.elem_ptr => {
|
||||
const elem_ptr = tv.val.castTag(.elem_ptr).?.data;
|
||||
const parent = try dg.lowerParentPtr(elem_ptr.array_ptr, tv.ty);
|
||||
const llvm_usize = try dg.llvmType(Type.usize);
|
||||
if (parent.llvm_ptr.typeOf().getElementType().getTypeKind() == .Array) {
|
||||
const indices: [2]*const llvm.Value = .{
|
||||
llvm_usize.constInt(0, .False),
|
||||
llvm_usize.constInt(elem_ptr.index, .False),
|
||||
};
|
||||
return parent.llvm_ptr.constInBoundsGEP(&indices, indices.len);
|
||||
} else {
|
||||
const indices: [1]*const llvm.Value = .{
|
||||
llvm_usize.constInt(elem_ptr.index, .False),
|
||||
};
|
||||
return parent.llvm_ptr.constInBoundsGEP(&indices, indices.len);
|
||||
}
|
||||
.field_ptr, .opt_payload_ptr, .eu_payload_ptr, .elem_ptr => {
|
||||
return dg.lowerParentPtr(tv.val, tv.ty.childType());
|
||||
},
|
||||
.null_value, .zero => {
|
||||
const llvm_type = try dg.llvmType(tv.ty);
|
||||
|
|
@ -2786,7 +2768,7 @@ pub const DeclGen = struct {
|
|||
llvm_ptr: *const llvm.Value,
|
||||
};
|
||||
|
||||
fn lowerParentPtrDecl(dg: *DeclGen, ptr_val: Value, decl: *Module.Decl) Error!ParentPtr {
|
||||
fn lowerParentPtrDecl(dg: *DeclGen, ptr_val: Value, decl: *Module.Decl, ptr_child_ty: Type) Error!*const llvm.Value {
|
||||
decl.markAlive();
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
|
|
@ -2794,123 +2776,107 @@ pub const DeclGen = struct {
|
|||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const llvm_ptr = try dg.lowerDeclRefValue(.{ .ty = ptr_ty, .val = ptr_val }, decl);
|
||||
return ParentPtr{
|
||||
.llvm_ptr = llvm_ptr,
|
||||
.ty = decl.ty,
|
||||
};
|
||||
|
||||
if (ptr_child_ty.eql(decl.ty)) {
|
||||
return llvm_ptr;
|
||||
} else {
|
||||
return llvm_ptr.constBitCast((try dg.llvmType(ptr_child_ty)).pointerType(0));
|
||||
}
|
||||
}
|
||||
|
||||
fn lowerParentPtr(dg: *DeclGen, ptr_val: Value, base_ty: Type) Error!ParentPtr {
|
||||
switch (ptr_val.tag()) {
|
||||
fn lowerParentPtr(dg: *DeclGen, ptr_val: Value, ptr_child_ty: Type) Error!*const llvm.Value {
|
||||
var bitcast_needed: bool = undefined;
|
||||
const llvm_ptr = switch (ptr_val.tag()) {
|
||||
.decl_ref_mut => {
|
||||
const decl = ptr_val.castTag(.decl_ref_mut).?.data.decl;
|
||||
return dg.lowerParentPtrDecl(ptr_val, decl);
|
||||
return dg.lowerParentPtrDecl(ptr_val, decl, ptr_child_ty);
|
||||
},
|
||||
.decl_ref => {
|
||||
const decl = ptr_val.castTag(.decl_ref).?.data;
|
||||
return dg.lowerParentPtrDecl(ptr_val, decl);
|
||||
return dg.lowerParentPtrDecl(ptr_val, decl, ptr_child_ty);
|
||||
},
|
||||
.variable => {
|
||||
const decl = ptr_val.castTag(.variable).?.data.owner_decl;
|
||||
return dg.lowerParentPtrDecl(ptr_val, decl);
|
||||
return dg.lowerParentPtrDecl(ptr_val, decl, ptr_child_ty);
|
||||
},
|
||||
.int_i64 => {
|
||||
const int = ptr_val.castTag(.int_i64).?.data;
|
||||
const llvm_usize = try dg.llvmType(Type.usize);
|
||||
const llvm_int = llvm_usize.constInt(@bitCast(u64, int), .False);
|
||||
return ParentPtr{
|
||||
.llvm_ptr = llvm_int.constIntToPtr(try dg.llvmType(base_ty)),
|
||||
.ty = base_ty,
|
||||
};
|
||||
return llvm_int.constIntToPtr((try dg.llvmType(ptr_child_ty)).pointerType(0));
|
||||
},
|
||||
.int_u64 => {
|
||||
const int = ptr_val.castTag(.int_u64).?.data;
|
||||
const llvm_usize = try dg.llvmType(Type.usize);
|
||||
const llvm_int = llvm_usize.constInt(int, .False);
|
||||
return ParentPtr{
|
||||
.llvm_ptr = llvm_int.constIntToPtr(try dg.llvmType(base_ty)),
|
||||
.ty = base_ty,
|
||||
};
|
||||
return llvm_int.constIntToPtr((try dg.llvmType(ptr_child_ty)).pointerType(0));
|
||||
},
|
||||
.field_ptr => {
|
||||
.field_ptr => blk: {
|
||||
const field_ptr = ptr_val.castTag(.field_ptr).?.data;
|
||||
const parent = try dg.lowerParentPtr(field_ptr.container_ptr, base_ty);
|
||||
const parent_llvm_ptr = try dg.lowerParentPtr(field_ptr.container_ptr, field_ptr.container_ty);
|
||||
const parent_ty = field_ptr.container_ty;
|
||||
|
||||
const field_index = @intCast(u32, field_ptr.field_index);
|
||||
const llvm_u32 = dg.context.intType(32);
|
||||
const target = dg.module.getTarget();
|
||||
switch (parent.ty.zigTypeTag()) {
|
||||
switch (parent_ty.zigTypeTag()) {
|
||||
.Union => {
|
||||
const fields = parent.ty.unionFields();
|
||||
const layout = parent.ty.unionGetLayout(target);
|
||||
const field_ty = fields.values()[field_index].ty;
|
||||
bitcast_needed = true;
|
||||
|
||||
const layout = parent_ty.unionGetLayout(target);
|
||||
if (layout.payload_size == 0) {
|
||||
// In this case a pointer to the union and a pointer to any
|
||||
// (void) payload is the same.
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr,
|
||||
.ty = field_ty,
|
||||
};
|
||||
break :blk parent_llvm_ptr;
|
||||
}
|
||||
if (layout.tag_size == 0) {
|
||||
const indices: [2]*const llvm.Value = .{
|
||||
llvm_u32.constInt(0, .False),
|
||||
llvm_u32.constInt(0, .False),
|
||||
};
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr.constInBoundsGEP(&indices, indices.len),
|
||||
.ty = field_ty,
|
||||
};
|
||||
}
|
||||
const llvm_pl_index = @boolToInt(layout.tag_align >= layout.payload_align);
|
||||
const llvm_pl_index = if (layout.tag_size == 0)
|
||||
0
|
||||
else
|
||||
@boolToInt(layout.tag_align >= layout.payload_align);
|
||||
const indices: [2]*const llvm.Value = .{
|
||||
llvm_u32.constInt(0, .False),
|
||||
llvm_u32.constInt(llvm_pl_index, .False),
|
||||
};
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr.constInBoundsGEP(&indices, indices.len),
|
||||
.ty = field_ty,
|
||||
};
|
||||
break :blk parent_llvm_ptr.constInBoundsGEP(&indices, indices.len);
|
||||
},
|
||||
.Struct => {
|
||||
const field_ty = parent_ty.structFieldType(field_index);
|
||||
bitcast_needed = !field_ty.eql(ptr_child_ty);
|
||||
|
||||
var ty_buf: Type.Payload.Pointer = undefined;
|
||||
const llvm_field_index = llvmFieldIndex(parent.ty, field_index, target, &ty_buf).?;
|
||||
const llvm_field_index = llvmFieldIndex(parent_ty, field_index, target, &ty_buf).?;
|
||||
const indices: [2]*const llvm.Value = .{
|
||||
llvm_u32.constInt(0, .False),
|
||||
llvm_u32.constInt(llvm_field_index, .False),
|
||||
};
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr.constInBoundsGEP(&indices, indices.len),
|
||||
.ty = parent.ty.structFieldType(field_index),
|
||||
};
|
||||
break :blk parent_llvm_ptr.constInBoundsGEP(&indices, indices.len);
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
.elem_ptr => {
|
||||
.elem_ptr => blk: {
|
||||
const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
|
||||
const parent = try dg.lowerParentPtr(elem_ptr.array_ptr, base_ty);
|
||||
const parent_llvm_ptr = try dg.lowerParentPtr(elem_ptr.array_ptr, elem_ptr.elem_ty);
|
||||
bitcast_needed = !elem_ptr.elem_ty.eql(ptr_child_ty);
|
||||
|
||||
const llvm_usize = try dg.llvmType(Type.usize);
|
||||
const indices: [2]*const llvm.Value = .{
|
||||
llvm_usize.constInt(0, .False),
|
||||
const indices: [1]*const llvm.Value = .{
|
||||
llvm_usize.constInt(elem_ptr.index, .False),
|
||||
};
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr.constInBoundsGEP(&indices, indices.len),
|
||||
.ty = parent.ty.childType(),
|
||||
};
|
||||
break :blk parent_llvm_ptr.constInBoundsGEP(&indices, indices.len);
|
||||
},
|
||||
.opt_payload_ptr => {
|
||||
.opt_payload_ptr => blk: {
|
||||
const opt_payload_ptr = ptr_val.castTag(.opt_payload_ptr).?.data;
|
||||
const parent = try dg.lowerParentPtr(opt_payload_ptr, base_ty);
|
||||
const parent_llvm_ptr = try dg.lowerParentPtr(opt_payload_ptr.container_ptr, opt_payload_ptr.container_ty);
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = parent.ty.optionalChild(&buf);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime() or parent.ty.isPtrLikeOptional()) {
|
||||
|
||||
const payload_ty = opt_payload_ptr.container_ty.optionalChild(&buf);
|
||||
bitcast_needed = !payload_ty.eql(ptr_child_ty);
|
||||
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime() or payload_ty.isPtrLikeOptional()) {
|
||||
// In this case, we represent pointer to optional the same as pointer
|
||||
// to the payload.
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr,
|
||||
.ty = payload_ty,
|
||||
};
|
||||
break :blk parent_llvm_ptr;
|
||||
}
|
||||
|
||||
const llvm_u32 = dg.context.intType(32);
|
||||
|
|
@ -2918,22 +2884,19 @@ pub const DeclGen = struct {
|
|||
llvm_u32.constInt(0, .False),
|
||||
llvm_u32.constInt(0, .False),
|
||||
};
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr.constInBoundsGEP(&indices, indices.len),
|
||||
.ty = payload_ty,
|
||||
};
|
||||
break :blk parent_llvm_ptr.constInBoundsGEP(&indices, indices.len);
|
||||
},
|
||||
.eu_payload_ptr => {
|
||||
.eu_payload_ptr => blk: {
|
||||
const eu_payload_ptr = ptr_val.castTag(.eu_payload_ptr).?.data;
|
||||
const parent = try dg.lowerParentPtr(eu_payload_ptr, base_ty);
|
||||
const payload_ty = parent.ty.errorUnionPayload();
|
||||
const parent_llvm_ptr = try dg.lowerParentPtr(eu_payload_ptr.container_ptr, eu_payload_ptr.container_ty);
|
||||
|
||||
const payload_ty = eu_payload_ptr.container_ty.errorUnionPayload();
|
||||
bitcast_needed = !payload_ty.eql(ptr_child_ty);
|
||||
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
|
||||
// In this case, we represent pointer to error union the same as pointer
|
||||
// to the payload.
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr,
|
||||
.ty = payload_ty,
|
||||
};
|
||||
break :blk parent_llvm_ptr;
|
||||
}
|
||||
|
||||
const llvm_u32 = dg.context.intType(32);
|
||||
|
|
@ -2941,12 +2904,14 @@ pub const DeclGen = struct {
|
|||
llvm_u32.constInt(0, .False),
|
||||
llvm_u32.constInt(1, .False),
|
||||
};
|
||||
return ParentPtr{
|
||||
.llvm_ptr = parent.llvm_ptr.constInBoundsGEP(&indices, indices.len),
|
||||
.ty = payload_ty,
|
||||
};
|
||||
break :blk parent_llvm_ptr.constInBoundsGEP(&indices, indices.len);
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
if (bitcast_needed) {
|
||||
return llvm_ptr.constBitCast((try dg.llvmType(ptr_child_ty)).pointerType(0));
|
||||
} else {
|
||||
return llvm_ptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
144
src/type.zig
144
src/type.zig
|
|
@ -2173,6 +2173,128 @@ pub const Type = extern union {
|
|||
};
|
||||
}
|
||||
|
||||
/// true if and only if the type has a well-defined memory layout
|
||||
/// readFrom/writeToMemory are supported only for types with a well-
|
||||
/// defined memory layout
|
||||
pub fn hasWellDefinedLayout(ty: Type) bool {
|
||||
return switch (ty.tag()) {
|
||||
.u1,
|
||||
.u8,
|
||||
.i8,
|
||||
.u16,
|
||||
.i16,
|
||||
.u32,
|
||||
.i32,
|
||||
.u64,
|
||||
.i64,
|
||||
.u128,
|
||||
.i128,
|
||||
.usize,
|
||||
.isize,
|
||||
.c_short,
|
||||
.c_ushort,
|
||||
.c_int,
|
||||
.c_uint,
|
||||
.c_long,
|
||||
.c_ulong,
|
||||
.c_longlong,
|
||||
.c_ulonglong,
|
||||
.c_longdouble,
|
||||
.f16,
|
||||
.f32,
|
||||
.f64,
|
||||
.f80,
|
||||
.f128,
|
||||
.bool,
|
||||
.void,
|
||||
.manyptr_u8,
|
||||
.manyptr_const_u8,
|
||||
.manyptr_const_u8_sentinel_0,
|
||||
.array_u8,
|
||||
.array_u8_sentinel_0,
|
||||
.int_signed,
|
||||
.int_unsigned,
|
||||
.pointer,
|
||||
.single_const_pointer,
|
||||
.single_mut_pointer,
|
||||
.many_const_pointer,
|
||||
.many_mut_pointer,
|
||||
.c_const_pointer,
|
||||
.c_mut_pointer,
|
||||
.single_const_pointer_to_comptime_int,
|
||||
.enum_numbered,
|
||||
.vector,
|
||||
.optional_single_mut_pointer,
|
||||
.optional_single_const_pointer,
|
||||
=> true,
|
||||
|
||||
.anyopaque,
|
||||
.anyerror,
|
||||
.noreturn,
|
||||
.@"null",
|
||||
.@"anyframe",
|
||||
.@"undefined",
|
||||
.atomic_order,
|
||||
.atomic_rmw_op,
|
||||
.calling_convention,
|
||||
.address_space,
|
||||
.float_mode,
|
||||
.reduce_op,
|
||||
.call_options,
|
||||
.prefetch_options,
|
||||
.export_options,
|
||||
.extern_options,
|
||||
.error_set,
|
||||
.error_set_single,
|
||||
.error_set_inferred,
|
||||
.error_set_merged,
|
||||
.@"opaque",
|
||||
.generic_poison,
|
||||
.type,
|
||||
.comptime_int,
|
||||
.comptime_float,
|
||||
.enum_literal,
|
||||
.type_info,
|
||||
// These are function bodies, not function pointers.
|
||||
.fn_noreturn_no_args,
|
||||
.fn_void_no_args,
|
||||
.fn_naked_noreturn_no_args,
|
||||
.fn_ccc_void_no_args,
|
||||
.function,
|
||||
.const_slice_u8,
|
||||
.const_slice_u8_sentinel_0,
|
||||
.const_slice,
|
||||
.mut_slice,
|
||||
.enum_simple,
|
||||
.error_union,
|
||||
.anyerror_void_error_union,
|
||||
.anyframe_T,
|
||||
.tuple,
|
||||
.anon_struct,
|
||||
.empty_struct_literal,
|
||||
.empty_struct,
|
||||
=> false,
|
||||
|
||||
.enum_full,
|
||||
.enum_nonexhaustive,
|
||||
=> !ty.cast(Payload.EnumFull).?.data.tag_ty_inferred,
|
||||
|
||||
.var_args_param => unreachable,
|
||||
.inferred_alloc_mut => unreachable,
|
||||
.inferred_alloc_const => unreachable,
|
||||
.bound_fn => unreachable,
|
||||
|
||||
.array,
|
||||
.array_sentinel,
|
||||
=> ty.childType().hasWellDefinedLayout(),
|
||||
|
||||
.optional => ty.isPtrLikeOptional(),
|
||||
.@"struct" => ty.castTag(.@"struct").?.data.layout != .Auto,
|
||||
.@"union" => ty.castTag(.@"union").?.data.layout != .Auto,
|
||||
.union_tagged => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn hasRuntimeBits(ty: Type) bool {
|
||||
return hasRuntimeBitsAdvanced(ty, false);
|
||||
}
|
||||
|
|
@ -3156,13 +3278,12 @@ pub const Type = extern union {
|
|||
=> return true,
|
||||
|
||||
.optional => {
|
||||
var buf: Payload.ElemType = undefined;
|
||||
const child_type = self.optionalChild(&buf);
|
||||
const child_ty = self.castTag(.optional).?.data;
|
||||
// optionals of zero sized types behave like bools, not pointers
|
||||
if (!child_type.hasRuntimeBits()) return false;
|
||||
if (child_type.zigTypeTag() != .Pointer) return false;
|
||||
if (!child_ty.hasRuntimeBits()) return false;
|
||||
if (child_ty.zigTypeTag() != .Pointer) return false;
|
||||
|
||||
const info = child_type.ptrInfo().data;
|
||||
const info = child_ty.ptrInfo().data;
|
||||
switch (info.size) {
|
||||
.Slice, .C => return false,
|
||||
.Many, .One => return !info.@"allowzero",
|
||||
|
|
@ -3263,6 +3384,7 @@ pub const Type = extern union {
|
|||
/// For ?[*]T, returns T.
|
||||
/// For *T, returns T.
|
||||
/// For [*]T, returns T.
|
||||
/// For [N]T, returns T.
|
||||
/// For []T, returns T.
|
||||
pub fn elemType2(ty: Type) Type {
|
||||
return switch (ty.tag()) {
|
||||
|
|
@ -4256,6 +4378,13 @@ pub const Type = extern union {
|
|||
};
|
||||
}
|
||||
|
||||
pub fn isArrayLike(ty: Type) bool {
|
||||
return switch (ty.zigTypeTag()) {
|
||||
.Array, .Vector => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isIndexable(ty: Type) bool {
|
||||
return switch (ty.zigTypeTag()) {
|
||||
.Array, .Vector => true,
|
||||
|
|
@ -4642,7 +4771,7 @@ pub const Type = extern union {
|
|||
return field_offset.offset;
|
||||
}
|
||||
|
||||
return std.mem.alignForwardGeneric(u64, it.offset, it.big_align);
|
||||
return std.mem.alignForwardGeneric(u64, it.offset, @maximum(it.big_align, 1));
|
||||
},
|
||||
|
||||
.tuple, .anon_struct => {
|
||||
|
|
@ -4665,7 +4794,7 @@ pub const Type = extern union {
|
|||
if (i == index) return offset;
|
||||
offset += field_ty.abiSize(target);
|
||||
}
|
||||
offset = std.mem.alignForwardGeneric(u64, offset, big_align);
|
||||
offset = std.mem.alignForwardGeneric(u64, offset, @maximum(big_align, 1));
|
||||
return offset;
|
||||
},
|
||||
|
||||
|
|
@ -5345,6 +5474,7 @@ pub const Type = extern union {
|
|||
pub const @"type" = initTag(.type);
|
||||
pub const @"anyerror" = initTag(.anyerror);
|
||||
pub const @"anyopaque" = initTag(.anyopaque);
|
||||
pub const @"null" = initTag(.@"null");
|
||||
|
||||
pub fn ptr(arena: Allocator, target: Target, data: Payload.Pointer.Data) !Type {
|
||||
var d = data;
|
||||
|
|
|
|||
121
src/value.zig
121
src/value.zig
|
|
@ -268,12 +268,14 @@ pub const Value = extern union {
|
|||
|
||||
.repeated,
|
||||
.eu_payload,
|
||||
.eu_payload_ptr,
|
||||
.opt_payload,
|
||||
.opt_payload_ptr,
|
||||
.empty_array_sentinel,
|
||||
=> Payload.SubValue,
|
||||
|
||||
.eu_payload_ptr,
|
||||
.opt_payload_ptr,
|
||||
=> Payload.PayloadPtr,
|
||||
|
||||
.bytes,
|
||||
.enum_literal,
|
||||
=> Payload.Bytes,
|
||||
|
|
@ -479,6 +481,20 @@ pub const Value = extern union {
|
|||
.variable => return self.copyPayloadShallow(arena, Payload.Variable),
|
||||
.decl_ref => return self.copyPayloadShallow(arena, Payload.Decl),
|
||||
.decl_ref_mut => return self.copyPayloadShallow(arena, Payload.DeclRefMut),
|
||||
.eu_payload_ptr,
|
||||
.opt_payload_ptr,
|
||||
=> {
|
||||
const payload = self.cast(Payload.PayloadPtr).?;
|
||||
const new_payload = try arena.create(Payload.PayloadPtr);
|
||||
new_payload.* = .{
|
||||
.base = payload.base,
|
||||
.data = .{
|
||||
.container_ptr = try payload.data.container_ptr.copy(arena),
|
||||
.container_ty = try payload.data.container_ty.copy(arena),
|
||||
},
|
||||
};
|
||||
return Value{ .ptr_otherwise = &new_payload.base };
|
||||
},
|
||||
.elem_ptr => {
|
||||
const payload = self.castTag(.elem_ptr).?;
|
||||
const new_payload = try arena.create(Payload.ElemPtr);
|
||||
|
|
@ -486,6 +502,7 @@ pub const Value = extern union {
|
|||
.base = payload.base,
|
||||
.data = .{
|
||||
.array_ptr = try payload.data.array_ptr.copy(arena),
|
||||
.elem_ty = try payload.data.elem_ty.copy(arena),
|
||||
.index = payload.data.index,
|
||||
},
|
||||
};
|
||||
|
|
@ -498,6 +515,7 @@ pub const Value = extern union {
|
|||
.base = payload.base,
|
||||
.data = .{
|
||||
.container_ptr = try payload.data.container_ptr.copy(arena),
|
||||
.container_ty = try payload.data.container_ty.copy(arena),
|
||||
.field_index = payload.data.field_index,
|
||||
},
|
||||
};
|
||||
|
|
@ -506,9 +524,7 @@ pub const Value = extern union {
|
|||
.bytes => return self.copyPayloadShallow(arena, Payload.Bytes),
|
||||
.repeated,
|
||||
.eu_payload,
|
||||
.eu_payload_ptr,
|
||||
.opt_payload,
|
||||
.opt_payload_ptr,
|
||||
.empty_array_sentinel,
|
||||
=> {
|
||||
const payload = self.cast(Payload.SubValue).?;
|
||||
|
|
@ -740,11 +756,11 @@ pub const Value = extern union {
|
|||
.inferred_alloc_comptime => return out_stream.writeAll("(inferred comptime allocation value)"),
|
||||
.eu_payload_ptr => {
|
||||
try out_stream.writeAll("(eu_payload_ptr)");
|
||||
val = val.castTag(.eu_payload_ptr).?.data;
|
||||
val = val.castTag(.eu_payload_ptr).?.data.container_ptr;
|
||||
},
|
||||
.opt_payload_ptr => {
|
||||
try out_stream.writeAll("(opt_payload_ptr)");
|
||||
val = val.castTag(.opt_payload_ptr).?.data;
|
||||
val = val.castTag(.opt_payload_ptr).?.data.container_ptr;
|
||||
},
|
||||
.bound_fn => {
|
||||
const bound_func = val.castTag(.bound_fn).?.data;
|
||||
|
|
@ -2162,8 +2178,8 @@ pub const Value = extern union {
|
|||
.decl_ref_mut => true,
|
||||
.elem_ptr => isComptimeMutablePtr(val.castTag(.elem_ptr).?.data.array_ptr),
|
||||
.field_ptr => isComptimeMutablePtr(val.castTag(.field_ptr).?.data.container_ptr),
|
||||
.eu_payload_ptr => isComptimeMutablePtr(val.castTag(.eu_payload_ptr).?.data),
|
||||
.opt_payload_ptr => isComptimeMutablePtr(val.castTag(.opt_payload_ptr).?.data),
|
||||
.eu_payload_ptr => isComptimeMutablePtr(val.castTag(.eu_payload_ptr).?.data.container_ptr),
|
||||
.opt_payload_ptr => isComptimeMutablePtr(val.castTag(.opt_payload_ptr).?.data.container_ptr),
|
||||
|
||||
else => false,
|
||||
};
|
||||
|
|
@ -2174,9 +2190,9 @@ pub const Value = extern union {
|
|||
switch (val.tag()) {
|
||||
.repeated => return val.castTag(.repeated).?.data.canMutateComptimeVarState(),
|
||||
.eu_payload => return val.castTag(.eu_payload).?.data.canMutateComptimeVarState(),
|
||||
.eu_payload_ptr => return val.castTag(.eu_payload_ptr).?.data.canMutateComptimeVarState(),
|
||||
.eu_payload_ptr => return val.castTag(.eu_payload_ptr).?.data.container_ptr.canMutateComptimeVarState(),
|
||||
.opt_payload => return val.castTag(.opt_payload).?.data.canMutateComptimeVarState(),
|
||||
.opt_payload_ptr => return val.castTag(.opt_payload_ptr).?.data.canMutateComptimeVarState(),
|
||||
.opt_payload_ptr => return val.castTag(.opt_payload_ptr).?.data.container_ptr.canMutateComptimeVarState(),
|
||||
.aggregate => {
|
||||
const fields = val.castTag(.aggregate).?.data;
|
||||
for (fields) |field| {
|
||||
|
|
@ -2239,12 +2255,12 @@ pub const Value = extern union {
|
|||
.eu_payload_ptr => {
|
||||
const err_union_ptr = ptr_val.castTag(.eu_payload_ptr).?.data;
|
||||
std.hash.autoHash(hasher, Value.Tag.eu_payload_ptr);
|
||||
hashPtr(err_union_ptr, hasher);
|
||||
hashPtr(err_union_ptr.container_ptr, hasher);
|
||||
},
|
||||
.opt_payload_ptr => {
|
||||
const opt_ptr = ptr_val.castTag(.opt_payload_ptr).?.data;
|
||||
std.hash.autoHash(hasher, Value.Tag.opt_payload_ptr);
|
||||
hashPtr(opt_ptr, hasher);
|
||||
hashPtr(opt_ptr.container_ptr, hasher);
|
||||
},
|
||||
|
||||
.zero,
|
||||
|
|
@ -2272,12 +2288,14 @@ pub const Value = extern union {
|
|||
|
||||
.repeated,
|
||||
.eu_payload,
|
||||
.eu_payload_ptr,
|
||||
.opt_payload,
|
||||
.opt_payload_ptr,
|
||||
.empty_array_sentinel,
|
||||
=> return markReferencedDeclsAlive(val.cast(Payload.SubValue).?.data),
|
||||
|
||||
.eu_payload_ptr,
|
||||
.opt_payload_ptr,
|
||||
=> return markReferencedDeclsAlive(val.cast(Payload.PayloadPtr).?.data.container_ptr),
|
||||
|
||||
.slice => {
|
||||
const slice = val.cast(Payload.Slice).?.data;
|
||||
markReferencedDeclsAlive(slice.ptr);
|
||||
|
|
@ -2394,6 +2412,29 @@ pub const Value = extern union {
|
|||
}
|
||||
}
|
||||
|
||||
// Asserts that the provided start/end are in-bounds.
|
||||
pub fn sliceArray(val: Value, arena: Allocator, start: usize, end: usize) error{OutOfMemory}!Value {
|
||||
return switch (val.tag()) {
|
||||
.empty_array_sentinel => if (start == 0 and end == 1) val else Value.initTag(.empty_array),
|
||||
.bytes => Tag.bytes.create(arena, val.castTag(.bytes).?.data[start..end]),
|
||||
.aggregate => Tag.aggregate.create(arena, val.castTag(.aggregate).?.data[start..end]),
|
||||
.slice => sliceArray(val.castTag(.slice).?.data.ptr, arena, start, end),
|
||||
|
||||
.decl_ref => sliceArray(val.castTag(.decl_ref).?.data.val, arena, start, end),
|
||||
.decl_ref_mut => sliceArray(val.castTag(.decl_ref_mut).?.data.decl.val, arena, start, end),
|
||||
.elem_ptr => blk: {
|
||||
const elem_ptr = val.castTag(.elem_ptr).?.data;
|
||||
break :blk sliceArray(elem_ptr.array_ptr, arena, start + elem_ptr.index, end + elem_ptr.index);
|
||||
},
|
||||
|
||||
.repeated,
|
||||
.the_only_possible_value,
|
||||
=> val,
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fieldValue(val: Value, allocator: Allocator, index: usize) error{OutOfMemory}!Value {
|
||||
_ = allocator;
|
||||
switch (val.tag()) {
|
||||
|
|
@ -2422,36 +2463,28 @@ pub const Value = extern union {
|
|||
}
|
||||
|
||||
/// Returns a pointer to the element value at the index.
|
||||
pub fn elemPtr(val: Value, arena: Allocator, index: usize) Allocator.Error!Value {
|
||||
switch (val.tag()) {
|
||||
.elem_ptr => {
|
||||
const elem_ptr = val.castTag(.elem_ptr).?.data;
|
||||
pub fn elemPtr(val: Value, ty: Type, arena: Allocator, index: usize) Allocator.Error!Value {
|
||||
const elem_ty = ty.elemType2();
|
||||
const ptr_val = switch (val.tag()) {
|
||||
.slice => val.castTag(.slice).?.data.ptr,
|
||||
else => val,
|
||||
};
|
||||
|
||||
if (ptr_val.tag() == .elem_ptr) {
|
||||
const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
|
||||
if (elem_ptr.elem_ty.eql(elem_ty)) {
|
||||
return Tag.elem_ptr.create(arena, .{
|
||||
.array_ptr = elem_ptr.array_ptr,
|
||||
.elem_ty = elem_ptr.elem_ty,
|
||||
.index = elem_ptr.index + index,
|
||||
});
|
||||
},
|
||||
.slice => {
|
||||
const ptr_val = val.castTag(.slice).?.data.ptr;
|
||||
switch (ptr_val.tag()) {
|
||||
.elem_ptr => {
|
||||
const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
|
||||
return Tag.elem_ptr.create(arena, .{
|
||||
.array_ptr = elem_ptr.array_ptr,
|
||||
.index = elem_ptr.index + index,
|
||||
});
|
||||
},
|
||||
else => return Tag.elem_ptr.create(arena, .{
|
||||
.array_ptr = ptr_val,
|
||||
.index = index,
|
||||
}),
|
||||
}
|
||||
},
|
||||
else => return Tag.elem_ptr.create(arena, .{
|
||||
.array_ptr = val,
|
||||
.index = index,
|
||||
}),
|
||||
}
|
||||
}
|
||||
return Tag.elem_ptr.create(arena, .{
|
||||
.array_ptr = ptr_val,
|
||||
.elem_ty = elem_ty,
|
||||
.index = index,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn isUndef(self: Value) bool {
|
||||
|
|
@ -4144,12 +4177,21 @@ pub const Value = extern union {
|
|||
};
|
||||
};
|
||||
|
||||
pub const PayloadPtr = struct {
|
||||
base: Payload,
|
||||
data: struct {
|
||||
container_ptr: Value,
|
||||
container_ty: Type,
|
||||
},
|
||||
};
|
||||
|
||||
pub const ElemPtr = struct {
|
||||
pub const base_tag = Tag.elem_ptr;
|
||||
|
||||
base: Payload = Payload{ .tag = base_tag },
|
||||
data: struct {
|
||||
array_ptr: Value,
|
||||
elem_ty: Type,
|
||||
index: usize,
|
||||
},
|
||||
};
|
||||
|
|
@ -4160,6 +4202,7 @@ pub const Value = extern union {
|
|||
base: Payload = Payload{ .tag = base_tag },
|
||||
data: struct {
|
||||
container_ptr: Value,
|
||||
container_ty: Type,
|
||||
field_index: usize,
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -62,6 +62,7 @@ test {
|
|||
_ = @import("behavior/bugs/11100.zig");
|
||||
_ = @import("behavior/bugs/10970.zig");
|
||||
_ = @import("behavior/bugs/11046.zig");
|
||||
_ = @import("behavior/bugs/11139.zig");
|
||||
_ = @import("behavior/bugs/11165.zig");
|
||||
_ = @import("behavior/call.zig");
|
||||
_ = @import("behavior/cast.zig");
|
||||
|
|
|
|||
25
test/behavior/bugs/11139.zig
Normal file
25
test/behavior/bugs/11139.zig
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
test "store array of array of structs at comptime" {
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
try expect(storeArrayOfArrayOfStructs() == 15);
|
||||
comptime try expect(storeArrayOfArrayOfStructs() == 15);
|
||||
}
|
||||
|
||||
fn storeArrayOfArrayOfStructs() u8 {
|
||||
const S = struct {
|
||||
x: u8,
|
||||
};
|
||||
|
||||
var cases = [_][1]S{
|
||||
[_]S{
|
||||
S{ .x = 15 },
|
||||
},
|
||||
};
|
||||
return cases[0][0].x;
|
||||
}
|
||||
|
|
@ -871,7 +871,7 @@ test "peer cast [N:x]T to [N]T" {
|
|||
}
|
||||
|
||||
test "peer cast *[N:x]T to *[N]T" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
|
|
@ -887,7 +887,9 @@ test "peer cast *[N:x]T to *[N]T" {
|
|||
}
|
||||
|
||||
test "peer cast [*:x]T to [*]T" {
|
||||
if (builtin.zig_backend != .stage1) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
|
|
|
|||
|
|
@ -21,8 +21,47 @@ fn testReinterpretBytesAsInteger() !void {
|
|||
try expect(@ptrCast(*align(1) const u32, bytes[1..5]).* == expected);
|
||||
}
|
||||
|
||||
test "reinterpret an array over multiple elements, with no well-defined layout" {
|
||||
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
|
||||
try testReinterpretWithOffsetAndNoWellDefinedLayout();
|
||||
comptime try testReinterpretWithOffsetAndNoWellDefinedLayout();
|
||||
}
|
||||
|
||||
fn testReinterpretWithOffsetAndNoWellDefinedLayout() !void {
|
||||
const bytes: ?[5]?u8 = [5]?u8{ 0x12, 0x34, 0x56, 0x78, 0x9a };
|
||||
const ptr = &bytes.?[1];
|
||||
const copy: [4]?u8 = @ptrCast(*const [4]?u8, ptr).*;
|
||||
_ = copy;
|
||||
//try expect(@ptrCast(*align(1)?u8, bytes[1..5]).* == );
|
||||
}
|
||||
|
||||
test "reinterpret bytes inside auto-layout struct as integer with nonzero offset" {
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
|
||||
try testReinterpretStructWrappedBytesAsInteger();
|
||||
comptime try testReinterpretStructWrappedBytesAsInteger();
|
||||
}
|
||||
|
||||
fn testReinterpretStructWrappedBytesAsInteger() !void {
|
||||
const S = struct { bytes: [5:0]u8 };
|
||||
const obj = S{ .bytes = "\x12\x34\x56\x78\xab".* };
|
||||
const expected = switch (native_endian) {
|
||||
.Little => 0xab785634,
|
||||
.Big => 0x345678ab,
|
||||
};
|
||||
try expect(@ptrCast(*align(1) const u32, obj.bytes[1..5]).* == expected);
|
||||
}
|
||||
|
||||
test "reinterpret bytes of an array into an extern struct" {
|
||||
if (builtin.zig_backend != .stage1) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
|
||||
try testReinterpretBytesAsExternStruct();
|
||||
comptime try testReinterpretBytesAsExternStruct();
|
||||
|
|
@ -42,6 +81,57 @@ fn testReinterpretBytesAsExternStruct() !void {
|
|||
try expect(val == 5);
|
||||
}
|
||||
|
||||
test "reinterpret bytes of an extern struct into another" {
|
||||
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
|
||||
try testReinterpretExternStructAsExternStruct();
|
||||
comptime try testReinterpretExternStructAsExternStruct();
|
||||
}
|
||||
|
||||
fn testReinterpretExternStructAsExternStruct() !void {
|
||||
const S1 = extern struct {
|
||||
a: u8,
|
||||
b: u16,
|
||||
c: u8,
|
||||
};
|
||||
comptime var bytes align(2) = S1{ .a = 0, .b = 0, .c = 5 };
|
||||
|
||||
const S2 = extern struct {
|
||||
a: u32 align(2),
|
||||
c: u8,
|
||||
};
|
||||
var ptr = @ptrCast(*const S2, &bytes);
|
||||
var val = ptr.c;
|
||||
try expect(val == 5);
|
||||
}
|
||||
|
||||
test "lower reinterpreted comptime field ptr" {
|
||||
if (builtin.zig_backend == .stage1) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
|
||||
// Test lowering a field ptr
|
||||
comptime var bytes align(2) = [_]u8{ 1, 2, 3, 4, 5, 6 };
|
||||
const S = extern struct {
|
||||
a: u32 align(2),
|
||||
c: u8,
|
||||
};
|
||||
comptime var ptr = @ptrCast(*const S, &bytes);
|
||||
var val = &ptr.c;
|
||||
try expect(val.* == 5);
|
||||
|
||||
// Test lowering an elem ptr
|
||||
comptime var src_value = S{ .a = 15, .c = 5 };
|
||||
comptime var ptr2 = @ptrCast(*[@sizeOf(S)]u8, &src_value);
|
||||
var val2 = &ptr2[4];
|
||||
try expect(val2.* == 5);
|
||||
}
|
||||
|
||||
test "reinterpret struct field at comptime" {
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue