InternPool: port most of value tags

This commit is contained in:
Jacob Young 2023-05-22 07:58:02 -04:00 committed by Andrew Kelley
parent 5555bdca04
commit 6e0de1d116
34 changed files with 5210 additions and 5984 deletions

View file

@ -459,6 +459,28 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?u29) type {
return self.items[prev_len..][0..n]; return self.items[prev_len..][0..n];
} }
/// Resize the array, adding `n` new elements, which have `undefined` values.
/// The return value is a slice pointing to the newly allocated elements.
/// The returned pointer becomes invalid when the list is resized.
/// Resizes list if `self.capacity` is not large enough.
pub fn addManyAsSlice(self: *Self, n: usize) Allocator.Error![]T {
const prev_len = self.items.len;
try self.resize(self.items.len + n);
return self.items[prev_len..][0..n];
}
/// Resize the array, adding `n` new elements, which have `undefined` values.
/// The return value is a slice pointing to the newly allocated elements.
/// Asserts that there is already space for the new item without allocating more.
/// **Does not** invalidate element pointers.
/// The returned pointer becomes invalid when the list is resized.
pub fn addManyAsSliceAssumeCapacity(self: *Self, n: usize) []T {
assert(self.items.len + n <= self.capacity);
const prev_len = self.items.len;
self.items.len += n;
return self.items[prev_len..][0..n];
}
/// Remove and return the last element from the list. /// Remove and return the last element from the list.
/// Asserts the list has at least one item. /// Asserts the list has at least one item.
/// Invalidates pointers to the removed element. /// Invalidates pointers to the removed element.
@ -949,6 +971,28 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) typ
return self.items[prev_len..][0..n]; return self.items[prev_len..][0..n];
} }
/// Resize the array, adding `n` new elements, which have `undefined` values.
/// The return value is a slice pointing to the newly allocated elements.
/// The returned pointer becomes invalid when the list is resized.
/// Resizes list if `self.capacity` is not large enough.
pub fn addManyAsSlice(self: *Self, allocator: Allocator, n: usize) Allocator.Error![]T {
const prev_len = self.items.len;
try self.resize(allocator, self.items.len + n);
return self.items[prev_len..][0..n];
}
/// Resize the array, adding `n` new elements, which have `undefined` values.
/// The return value is a slice pointing to the newly allocated elements.
/// Asserts that there is already space for the new item without allocating more.
/// **Does not** invalidate element pointers.
/// The returned pointer becomes invalid when the list is resized.
pub fn addManyAsSliceAssumeCapacity(self: *Self, n: usize) []T {
assert(self.items.len + n <= self.capacity);
const prev_len = self.items.len;
self.items.len += n;
return self.items[prev_len..][0..n];
}
/// Remove and return the last element from the list. /// Remove and return the last element from the list.
/// Asserts the list has at least one item. /// Asserts the list has at least one item.
/// Invalidates pointers to last element. /// Invalidates pointers to last element.

View file

@ -901,8 +901,8 @@ pub const Inst = struct {
manyptr_const_u8_type = @enumToInt(InternPool.Index.manyptr_const_u8_type), manyptr_const_u8_type = @enumToInt(InternPool.Index.manyptr_const_u8_type),
manyptr_const_u8_sentinel_0_type = @enumToInt(InternPool.Index.manyptr_const_u8_sentinel_0_type), manyptr_const_u8_sentinel_0_type = @enumToInt(InternPool.Index.manyptr_const_u8_sentinel_0_type),
single_const_pointer_to_comptime_int_type = @enumToInt(InternPool.Index.single_const_pointer_to_comptime_int_type), single_const_pointer_to_comptime_int_type = @enumToInt(InternPool.Index.single_const_pointer_to_comptime_int_type),
const_slice_u8_type = @enumToInt(InternPool.Index.const_slice_u8_type), slice_const_u8_type = @enumToInt(InternPool.Index.slice_const_u8_type),
const_slice_u8_sentinel_0_type = @enumToInt(InternPool.Index.const_slice_u8_sentinel_0_type), slice_const_u8_sentinel_0_type = @enumToInt(InternPool.Index.slice_const_u8_sentinel_0_type),
anyerror_void_error_union_type = @enumToInt(InternPool.Index.anyerror_void_error_union_type), anyerror_void_error_union_type = @enumToInt(InternPool.Index.anyerror_void_error_union_type),
generic_poison_type = @enumToInt(InternPool.Index.generic_poison_type), generic_poison_type = @enumToInt(InternPool.Index.generic_poison_type),
inferred_alloc_const_type = @enumToInt(InternPool.Index.inferred_alloc_const_type), inferred_alloc_const_type = @enumToInt(InternPool.Index.inferred_alloc_const_type),
@ -1382,7 +1382,7 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index, ip: InternPool) Type {
.bool_to_int => return Type.u1, .bool_to_int => return Type.u1,
.tag_name, .error_name => return Type.const_slice_u8_sentinel_0, .tag_name, .error_name => return Type.slice_const_u8_sentinel_0,
.call, .call_always_tail, .call_never_tail, .call_never_inline => { .call, .call_always_tail, .call_never_tail, .call_never_inline => {
const callee_ty = air.typeOf(datas[inst].pl_op.operand, ip); const callee_ty = air.typeOf(datas[inst].pl_op.operand, ip);

View file

@ -3934,7 +3934,7 @@ fn fnDecl(
var section_gz = decl_gz.makeSubBlock(params_scope); var section_gz = decl_gz.makeSubBlock(params_scope);
defer section_gz.unstack(); defer section_gz.unstack();
const section_ref: Zir.Inst.Ref = if (fn_proto.ast.section_expr == 0) .none else inst: { const section_ref: Zir.Inst.Ref = if (fn_proto.ast.section_expr == 0) .none else inst: {
const inst = try expr(&decl_gz, params_scope, .{ .rl = .{ .coerced_ty = .const_slice_u8_type } }, fn_proto.ast.section_expr); const inst = try expr(&decl_gz, params_scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, fn_proto.ast.section_expr);
if (section_gz.instructionsSlice().len == 0) { if (section_gz.instructionsSlice().len == 0) {
// In this case we will send a len=0 body which can be encoded more efficiently. // In this case we will send a len=0 body which can be encoded more efficiently.
break :inst inst; break :inst inst;
@ -4137,7 +4137,7 @@ fn globalVarDecl(
break :inst try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .address_space_type } }, var_decl.ast.addrspace_node); break :inst try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .address_space_type } }, var_decl.ast.addrspace_node);
}; };
const section_inst: Zir.Inst.Ref = if (var_decl.ast.section_node == 0) .none else inst: { const section_inst: Zir.Inst.Ref = if (var_decl.ast.section_node == 0) .none else inst: {
break :inst try comptimeExpr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .const_slice_u8_type } }, var_decl.ast.section_node); break :inst try comptimeExpr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = .slice_const_u8_type } }, var_decl.ast.section_node);
}; };
const has_section_or_addrspace = section_inst != .none or addrspace_inst != .none; const has_section_or_addrspace = section_inst != .none or addrspace_inst != .none;
wip_members.nextDecl(is_pub, is_export, align_inst != .none, has_section_or_addrspace); wip_members.nextDecl(is_pub, is_export, align_inst != .none, has_section_or_addrspace);
@ -7878,7 +7878,7 @@ fn unionInit(
params: []const Ast.Node.Index, params: []const Ast.Node.Index,
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
const union_type = try typeExpr(gz, scope, params[0]); const union_type = try typeExpr(gz, scope, params[0]);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]); const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[1]);
const field_type = try gz.addPlNode(.field_type_ref, params[1], Zir.Inst.FieldTypeRef{ const field_type = try gz.addPlNode(.field_type_ref, params[1], Zir.Inst.FieldTypeRef{
.container_type = union_type, .container_type = union_type,
.field_name = field_name, .field_name = field_name,
@ -8100,12 +8100,12 @@ fn builtinCall(
if (ri.rl == .ref) { if (ri.rl == .ref) {
return gz.addPlNode(.field_ptr_named, node, Zir.Inst.FieldNamed{ return gz.addPlNode(.field_ptr_named, node, Zir.Inst.FieldNamed{
.lhs = try expr(gz, scope, .{ .rl = .ref }, params[0]), .lhs = try expr(gz, scope, .{ .rl = .ref }, params[0]),
.field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]), .field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[1]),
}); });
} }
const result = try gz.addPlNode(.field_val_named, node, Zir.Inst.FieldNamed{ const result = try gz.addPlNode(.field_val_named, node, Zir.Inst.FieldNamed{
.lhs = try expr(gz, scope, .{ .rl = .none }, params[0]), .lhs = try expr(gz, scope, .{ .rl = .none }, params[0]),
.field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]), .field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[1]),
}); });
return rvalue(gz, ri, result, node); return rvalue(gz, ri, result, node);
}, },
@ -8271,11 +8271,11 @@ fn builtinCall(
.align_of => return simpleUnOpType(gz, scope, ri, node, params[0], .align_of), .align_of => return simpleUnOpType(gz, scope, ri, node, params[0], .align_of),
.ptr_to_int => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .ptr_to_int), .ptr_to_int => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .ptr_to_int),
.compile_error => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .compile_error), .compile_error => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[0], .compile_error),
.set_eval_branch_quota => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .coerced_ty = .u32_type } }, params[0], .set_eval_branch_quota), .set_eval_branch_quota => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .coerced_ty = .u32_type } }, params[0], .set_eval_branch_quota),
.enum_to_int => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .enum_to_int), .enum_to_int => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .enum_to_int),
.bool_to_int => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .bool_to_int), .bool_to_int => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .bool_to_int),
.embed_file => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .embed_file), .embed_file => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[0], .embed_file),
.error_name => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .anyerror_type } }, params[0], .error_name), .error_name => return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .anyerror_type } }, params[0], .error_name),
.set_runtime_safety => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .set_runtime_safety), .set_runtime_safety => return simpleUnOp(gz, scope, ri, node, bool_ri, params[0], .set_runtime_safety),
.sqrt => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .sqrt), .sqrt => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, params[0], .sqrt),
@ -8334,7 +8334,7 @@ fn builtinCall(
}, },
.panic => { .panic => {
try emitDbgNode(gz, node); try emitDbgNode(gz, node);
return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .panic); return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[0], .panic);
}, },
.trap => { .trap => {
try emitDbgNode(gz, node); try emitDbgNode(gz, node);
@ -8450,7 +8450,7 @@ fn builtinCall(
}, },
.c_define => { .c_define => {
if (!gz.c_import) return gz.astgen.failNode(node, "C define valid only inside C import block", .{}); if (!gz.c_import) return gz.astgen.failNode(node, "C define valid only inside C import block", .{});
const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0]); const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[0]);
const value = try comptimeExpr(gz, scope, .{ .rl = .none }, params[1]); const value = try comptimeExpr(gz, scope, .{ .rl = .none }, params[1]);
const result = try gz.addExtendedPayload(.c_define, Zir.Inst.BinNode{ const result = try gz.addExtendedPayload(.c_define, Zir.Inst.BinNode{
.node = gz.nodeIndexToRelative(node), .node = gz.nodeIndexToRelative(node),
@ -8546,7 +8546,7 @@ fn builtinCall(
}, },
.field_parent_ptr => { .field_parent_ptr => {
const parent_type = try typeExpr(gz, scope, params[0]); const parent_type = try typeExpr(gz, scope, params[0]);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]); const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, params[1]);
const result = try gz.addPlNode(.field_parent_ptr, node, Zir.Inst.FieldParentPtr{ const result = try gz.addPlNode(.field_parent_ptr, node, Zir.Inst.FieldParentPtr{
.parent_type = parent_type, .parent_type = parent_type,
.field_name = field_name, .field_name = field_name,
@ -8701,7 +8701,7 @@ fn hasDeclOrField(
tag: Zir.Inst.Tag, tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
const container_type = try typeExpr(gz, scope, lhs_node); const container_type = try typeExpr(gz, scope, lhs_node);
const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, rhs_node); const name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, rhs_node);
const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
.lhs = container_type, .lhs = container_type,
.rhs = name, .rhs = name,
@ -8851,7 +8851,7 @@ fn simpleCBuiltin(
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
const name: []const u8 = if (tag == .c_undef) "C undef" else "C include"; const name: []const u8 = if (tag == .c_undef) "C undef" else "C include";
if (!gz.c_import) return gz.astgen.failNode(node, "{s} valid only inside C import block", .{name}); if (!gz.c_import) return gz.astgen.failNode(node, "{s} valid only inside C import block", .{name});
const operand = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, operand_node); const operand = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, operand_node);
_ = try gz.addExtendedPayload(tag, Zir.Inst.UnNode{ _ = try gz.addExtendedPayload(tag, Zir.Inst.UnNode{
.node = gz.nodeIndexToRelative(node), .node = gz.nodeIndexToRelative(node),
.operand = operand, .operand = operand,
@ -8869,7 +8869,7 @@ fn offsetOf(
tag: Zir.Inst.Tag, tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
const type_inst = try typeExpr(gz, scope, lhs_node); const type_inst = try typeExpr(gz, scope, lhs_node);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, rhs_node); const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .slice_const_u8_type } }, rhs_node);
const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
.lhs = type_inst, .lhs = type_inst,
.rhs = field_name, .rhs = field_name,
@ -10317,8 +10317,8 @@ fn rvalue(
as_ty | @enumToInt(Zir.Inst.Ref.manyptr_const_u8_type), as_ty | @enumToInt(Zir.Inst.Ref.manyptr_const_u8_type),
as_ty | @enumToInt(Zir.Inst.Ref.manyptr_const_u8_sentinel_0_type), as_ty | @enumToInt(Zir.Inst.Ref.manyptr_const_u8_sentinel_0_type),
as_ty | @enumToInt(Zir.Inst.Ref.single_const_pointer_to_comptime_int_type), as_ty | @enumToInt(Zir.Inst.Ref.single_const_pointer_to_comptime_int_type),
as_ty | @enumToInt(Zir.Inst.Ref.const_slice_u8_type), as_ty | @enumToInt(Zir.Inst.Ref.slice_const_u8_type),
as_ty | @enumToInt(Zir.Inst.Ref.const_slice_u8_sentinel_0_type), as_ty | @enumToInt(Zir.Inst.Ref.slice_const_u8_sentinel_0_type),
as_ty | @enumToInt(Zir.Inst.Ref.anyerror_void_error_union_type), as_ty | @enumToInt(Zir.Inst.Ref.anyerror_void_error_union_type),
as_ty | @enumToInt(Zir.Inst.Ref.generic_poison_type), as_ty | @enumToInt(Zir.Inst.Ref.generic_poison_type),
as_ty | @enumToInt(Zir.Inst.Ref.empty_struct_type), as_ty | @enumToInt(Zir.Inst.Ref.empty_struct_type),

View file

@ -226,7 +226,7 @@ const Job = union(enum) {
/// Write the constant value for a Decl to the output file. /// Write the constant value for a Decl to the output file.
codegen_decl: Module.Decl.Index, codegen_decl: Module.Decl.Index,
/// Write the machine code for a function to the output file. /// Write the machine code for a function to the output file.
codegen_func: *Module.Fn, codegen_func: Module.Fn.Index,
/// Render the .h file snippet for the Decl. /// Render the .h file snippet for the Decl.
emit_h_decl: Module.Decl.Index, emit_h_decl: Module.Decl.Index,
/// The Decl needs to be analyzed and possibly export itself. /// The Decl needs to be analyzed and possibly export itself.
@ -3208,7 +3208,8 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
// Tests are always emitted in test binaries. The decl_refs are created by // Tests are always emitted in test binaries. The decl_refs are created by
// Module.populateTestFunctions, but this will not queue body analysis, so do // Module.populateTestFunctions, but this will not queue body analysis, so do
// that now. // that now.
try module.ensureFuncBodyAnalysisQueued(decl.val.castTag(.function).?.data); const func_index = module.intern_pool.indexToFunc(decl.val.ip_index).unwrap().?;
try module.ensureFuncBodyAnalysisQueued(func_index);
} }
}, },
.update_embed_file => |embed_file| { .update_embed_file => |embed_file| {

File diff suppressed because it is too large Load diff

View file

@ -109,7 +109,7 @@ memoized_calls: MemoizedCallSet = .{},
/// Contains the values from `@setAlignStack`. A sparse table is used here /// Contains the values from `@setAlignStack`. A sparse table is used here
/// instead of a field of `Fn` because usage of `@setAlignStack` is rare, while /// instead of a field of `Fn` because usage of `@setAlignStack` is rare, while
/// functions are many. /// functions are many.
align_stack_fns: std.AutoHashMapUnmanaged(*const Fn, SetAlignStack) = .{}, align_stack_fns: std.AutoHashMapUnmanaged(Fn.Index, SetAlignStack) = .{},
/// We optimize memory usage for a compilation with no compile errors by storing the /// We optimize memory usage for a compilation with no compile errors by storing the
/// error messages and mapping outside of `Decl`. /// error messages and mapping outside of `Decl`.
@ -242,22 +242,23 @@ pub const StringLiteralAdapter = struct {
}; };
const MonomorphedFuncsSet = std.HashMapUnmanaged( const MonomorphedFuncsSet = std.HashMapUnmanaged(
*Fn, Fn.Index,
void, void,
MonomorphedFuncsContext, MonomorphedFuncsContext,
std.hash_map.default_max_load_percentage, std.hash_map.default_max_load_percentage,
); );
const MonomorphedFuncsContext = struct { const MonomorphedFuncsContext = struct {
pub fn eql(ctx: @This(), a: *Fn, b: *Fn) bool { mod: *Module,
pub fn eql(ctx: @This(), a: Fn.Index, b: Fn.Index) bool {
_ = ctx; _ = ctx;
return a == b; return a == b;
} }
/// Must match `Sema.GenericCallAdapter.hash`. /// Must match `Sema.GenericCallAdapter.hash`.
pub fn hash(ctx: @This(), key: *Fn) u64 { pub fn hash(ctx: @This(), key: Fn.Index) u64 {
_ = ctx; return ctx.mod.funcPtr(key).hash;
return key.hash;
} }
}; };
@ -272,7 +273,7 @@ pub const MemoizedCall = struct {
module: *Module, module: *Module,
pub const Key = struct { pub const Key = struct {
func: *Fn, func: Fn.Index,
args: []TypedValue, args: []TypedValue,
}; };
@ -652,21 +653,12 @@ pub const Decl = struct {
pub fn clearValues(decl: *Decl, mod: *Module) void { pub fn clearValues(decl: *Decl, mod: *Module) void {
const gpa = mod.gpa; const gpa = mod.gpa;
if (decl.getExternFn()) |extern_fn| { if (decl.getFunctionIndex(mod).unwrap()) |func| {
extern_fn.deinit(gpa);
gpa.destroy(extern_fn);
}
if (decl.getFunction()) |func| {
_ = mod.align_stack_fns.remove(func); _ = mod.align_stack_fns.remove(func);
if (func.comptime_args != null) { if (mod.funcPtr(func).comptime_args != null) {
_ = mod.monomorphed_funcs.remove(func); _ = mod.monomorphed_funcs.removeContext(func, .{ .mod = mod });
} }
func.deinit(gpa); mod.destroyFunc(func);
gpa.destroy(func);
}
if (decl.getVariable()) |variable| {
variable.deinit(gpa);
gpa.destroy(variable);
} }
if (decl.value_arena) |value_arena| { if (decl.value_arena) |value_arena| {
if (decl.owns_tv) { if (decl.owns_tv) {
@ -835,11 +827,11 @@ pub const Decl = struct {
/// If the Decl has a value and it is a struct, return it, /// If the Decl has a value and it is a struct, return it,
/// otherwise null. /// otherwise null.
pub fn getStruct(decl: *Decl, mod: *Module) ?*Struct { pub fn getStruct(decl: Decl, mod: *Module) ?*Struct {
return mod.structPtrUnwrap(getStructIndex(decl, mod)); return mod.structPtrUnwrap(decl.getStructIndex(mod));
} }
pub fn getStructIndex(decl: *Decl, mod: *Module) Struct.OptionalIndex { pub fn getStructIndex(decl: Decl, mod: *Module) Struct.OptionalIndex {
if (!decl.owns_tv) return .none; if (!decl.owns_tv) return .none;
if (decl.val.ip_index == .none) return .none; if (decl.val.ip_index == .none) return .none;
return mod.intern_pool.indexToStructType(decl.val.ip_index); return mod.intern_pool.indexToStructType(decl.val.ip_index);
@ -847,7 +839,7 @@ pub const Decl = struct {
/// If the Decl has a value and it is a union, return it, /// If the Decl has a value and it is a union, return it,
/// otherwise null. /// otherwise null.
pub fn getUnion(decl: *Decl, mod: *Module) ?*Union { pub fn getUnion(decl: Decl, mod: *Module) ?*Union {
if (!decl.owns_tv) return null; if (!decl.owns_tv) return null;
if (decl.val.ip_index == .none) return null; if (decl.val.ip_index == .none) return null;
return mod.typeToUnion(decl.val.toType()); return mod.typeToUnion(decl.val.toType());
@ -855,32 +847,30 @@ pub const Decl = struct {
/// If the Decl has a value and it is a function, return it, /// If the Decl has a value and it is a function, return it,
/// otherwise null. /// otherwise null.
pub fn getFunction(decl: *const Decl) ?*Fn { pub fn getFunction(decl: Decl, mod: *Module) ?*Fn {
if (!decl.owns_tv) return null; return mod.funcPtrUnwrap(decl.getFunctionIndex(mod));
const func = (decl.val.castTag(.function) orelse return null).data; }
return func;
pub fn getFunctionIndex(decl: Decl, mod: *Module) Fn.OptionalIndex {
return if (decl.owns_tv) decl.val.getFunctionIndex(mod) else .none;
} }
/// If the Decl has a value and it is an extern function, returns it, /// If the Decl has a value and it is an extern function, returns it,
/// otherwise null. /// otherwise null.
pub fn getExternFn(decl: *const Decl) ?*ExternFn { pub fn getExternFunc(decl: Decl, mod: *Module) ?InternPool.Key.ExternFunc {
if (!decl.owns_tv) return null; return if (decl.owns_tv) decl.val.getExternFunc(mod) else null;
const extern_fn = (decl.val.castTag(.extern_fn) orelse return null).data;
return extern_fn;
} }
/// If the Decl has a value and it is a variable, returns it, /// If the Decl has a value and it is a variable, returns it,
/// otherwise null. /// otherwise null.
pub fn getVariable(decl: *const Decl) ?*Var { pub fn getVariable(decl: Decl, mod: *Module) ?InternPool.Key.Variable {
if (!decl.owns_tv) return null; return if (decl.owns_tv) decl.val.getVariable(mod) else null;
const variable = (decl.val.castTag(.variable) orelse return null).data;
return variable;
} }
/// Gets the namespace that this Decl creates by being a struct, union, /// Gets the namespace that this Decl creates by being a struct, union,
/// enum, or opaque. /// enum, or opaque.
/// Only returns it if the Decl is the owner. /// Only returns it if the Decl is the owner.
pub fn getInnerNamespaceIndex(decl: *Decl, mod: *Module) Namespace.OptionalIndex { pub fn getInnerNamespaceIndex(decl: Decl, mod: *Module) Namespace.OptionalIndex {
if (!decl.owns_tv) return .none; if (!decl.owns_tv) return .none;
return switch (decl.val.ip_index) { return switch (decl.val.ip_index) {
.empty_struct_type => .none, .empty_struct_type => .none,
@ -896,8 +886,8 @@ pub const Decl = struct {
} }
/// Same as `getInnerNamespaceIndex` but additionally obtains the pointer. /// Same as `getInnerNamespaceIndex` but additionally obtains the pointer.
pub fn getInnerNamespace(decl: *Decl, mod: *Module) ?*Namespace { pub fn getInnerNamespace(decl: Decl, mod: *Module) ?*Namespace {
return if (getInnerNamespaceIndex(decl, mod).unwrap()) |i| mod.namespacePtr(i) else null; return if (decl.getInnerNamespaceIndex(mod).unwrap()) |i| mod.namespacePtr(i) else null;
} }
pub fn dump(decl: *Decl) void { pub fn dump(decl: *Decl) void {
@ -927,14 +917,11 @@ pub const Decl = struct {
assert(decl.dependencies.swapRemove(other)); assert(decl.dependencies.swapRemove(other));
} }
pub fn isExtern(decl: Decl) bool { pub fn isExtern(decl: Decl, mod: *Module) bool {
assert(decl.has_tv); assert(decl.has_tv);
return switch (decl.val.ip_index) { return switch (mod.intern_pool.indexToKey(decl.val.ip_index)) {
.none => switch (decl.val.tag()) { .variable => |variable| variable.is_extern,
.extern_fn => true, .extern_func => true,
.variable => decl.val.castTag(.variable).?.data.init.ip_index == .unreachable_value,
else => false,
},
else => false, else => false,
}; };
} }
@ -1494,6 +1481,28 @@ pub const Fn = struct {
is_noinline: bool, is_noinline: bool,
calls_or_awaits_errorable_fn: bool = false, calls_or_awaits_errorable_fn: bool = false,
pub const Index = enum(u32) {
_,
pub fn toOptional(i: Index) OptionalIndex {
return @intToEnum(OptionalIndex, @enumToInt(i));
}
};
pub const OptionalIndex = enum(u32) {
none = std.math.maxInt(u32),
_,
pub fn init(oi: ?Index) OptionalIndex {
return @intToEnum(OptionalIndex, @enumToInt(oi orelse return .none));
}
pub fn unwrap(oi: OptionalIndex) ?Index {
if (oi == .none) return null;
return @intToEnum(Index, @enumToInt(oi));
}
};
pub const Analysis = enum { pub const Analysis = enum {
/// This function has not yet undergone analysis, because we have not /// This function has not yet undergone analysis, because we have not
/// seen a potential runtime call. It may be analyzed in future. /// seen a potential runtime call. It may be analyzed in future.
@ -1519,7 +1528,7 @@ pub const Fn = struct {
/// or comptime functions. /// or comptime functions.
pub const InferredErrorSet = struct { pub const InferredErrorSet = struct {
/// The function from which this error set originates. /// The function from which this error set originates.
func: *Fn, func: Fn.Index,
/// All currently known errors that this error set contains. This includes /// All currently known errors that this error set contains. This includes
/// direct additions via `return error.Foo;`, and possibly also errors that /// direct additions via `return error.Foo;`, and possibly also errors that
@ -1543,8 +1552,8 @@ pub const Fn = struct {
pub const Index = enum(u32) { pub const Index = enum(u32) {
_, _,
pub fn toOptional(i: Index) OptionalIndex { pub fn toOptional(i: InferredErrorSet.Index) InferredErrorSet.OptionalIndex {
return @intToEnum(OptionalIndex, @enumToInt(i)); return @intToEnum(InferredErrorSet.OptionalIndex, @enumToInt(i));
} }
}; };
@ -1552,13 +1561,13 @@ pub const Fn = struct {
none = std.math.maxInt(u32), none = std.math.maxInt(u32),
_, _,
pub fn init(oi: ?Index) OptionalIndex { pub fn init(oi: ?InferredErrorSet.Index) InferredErrorSet.OptionalIndex {
return @intToEnum(OptionalIndex, @enumToInt(oi orelse return .none)); return @intToEnum(InferredErrorSet.OptionalIndex, @enumToInt(oi orelse return .none));
} }
pub fn unwrap(oi: OptionalIndex) ?Index { pub fn unwrap(oi: InferredErrorSet.OptionalIndex) ?InferredErrorSet.Index {
if (oi == .none) return null; if (oi == .none) return null;
return @intToEnum(Index, @enumToInt(oi)); return @intToEnum(InferredErrorSet.Index, @enumToInt(oi));
} }
}; };
@ -1587,12 +1596,6 @@ pub const Fn = struct {
} }
}; };
/// TODO: remove this function
pub fn deinit(func: *Fn, gpa: Allocator) void {
_ = func;
_ = gpa;
}
pub fn isAnytypeParam(func: Fn, mod: *Module, index: u32) bool { pub fn isAnytypeParam(func: Fn, mod: *Module, index: u32) bool {
const file = mod.declPtr(func.owner_decl).getFileScope(mod); const file = mod.declPtr(func.owner_decl).getFileScope(mod);
@ -1647,28 +1650,6 @@ pub const Fn = struct {
} }
}; };
pub const Var = struct {
/// if is_extern == true this is undefined
init: Value,
owner_decl: Decl.Index,
/// Library name if specified.
/// For example `extern "c" var stderrp = ...` would have 'c' as library name.
/// Allocated with Module's allocator; outlives the ZIR code.
lib_name: ?[*:0]const u8,
is_extern: bool,
is_mutable: bool,
is_threadlocal: bool,
is_weak_linkage: bool,
pub fn deinit(variable: *Var, gpa: Allocator) void {
if (variable.lib_name) |lib_name| {
gpa.free(mem.sliceTo(lib_name, 0));
}
}
};
pub const DeclAdapter = struct { pub const DeclAdapter = struct {
mod: *Module, mod: *Module,
@ -3472,6 +3453,10 @@ pub fn structPtr(mod: *Module, index: Struct.Index) *Struct {
return mod.intern_pool.structPtr(index); return mod.intern_pool.structPtr(index);
} }
pub fn funcPtr(mod: *Module, index: Fn.Index) *Fn {
return mod.intern_pool.funcPtr(index);
}
pub fn inferredErrorSetPtr(mod: *Module, index: Fn.InferredErrorSet.Index) *Fn.InferredErrorSet { pub fn inferredErrorSetPtr(mod: *Module, index: Fn.InferredErrorSet.Index) *Fn.InferredErrorSet {
return mod.intern_pool.inferredErrorSetPtr(index); return mod.intern_pool.inferredErrorSetPtr(index);
} }
@ -3479,7 +3464,11 @@ pub fn inferredErrorSetPtr(mod: *Module, index: Fn.InferredErrorSet.Index) *Fn.I
/// This one accepts an index from the InternPool and asserts that it is not /// This one accepts an index from the InternPool and asserts that it is not
/// the anonymous empty struct type. /// the anonymous empty struct type.
pub fn structPtrUnwrap(mod: *Module, index: Struct.OptionalIndex) ?*Struct { pub fn structPtrUnwrap(mod: *Module, index: Struct.OptionalIndex) ?*Struct {
return structPtr(mod, index.unwrap() orelse return null); return mod.structPtr(index.unwrap() orelse return null);
}
pub fn funcPtrUnwrap(mod: *Module, index: Fn.OptionalIndex) ?*Fn {
return mod.funcPtr(index.unwrap() orelse return null);
} }
/// Returns true if and only if the Decl is the top level struct associated with a File. /// Returns true if and only if the Decl is the top level struct associated with a File.
@ -3952,7 +3941,7 @@ fn updateZirRefs(mod: *Module, file: *File, old_zir: Zir) !void {
}; };
} }
if (decl.getFunction()) |func| { if (decl.getFunction(mod)) |func| {
func.zir_body_inst = inst_map.get(func.zir_body_inst) orelse { func.zir_body_inst = inst_map.get(func.zir_body_inst) orelse {
try file.deleted_decls.append(gpa, decl_index); try file.deleted_decls.append(gpa, decl_index);
continue; continue;
@ -4139,7 +4128,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl_index: Decl.Index) SemaError!void {
try mod.deleteDeclExports(decl_index); try mod.deleteDeclExports(decl_index);
// Similarly, `@setAlignStack` invocations will be re-discovered. // Similarly, `@setAlignStack` invocations will be re-discovered.
if (decl.getFunction()) |func| { if (decl.getFunctionIndex(mod).unwrap()) |func| {
_ = mod.align_stack_fns.remove(func); _ = mod.align_stack_fns.remove(func);
} }
@ -4229,10 +4218,11 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl_index: Decl.Index) SemaError!void {
} }
} }
pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void { pub fn ensureFuncBodyAnalyzed(mod: *Module, func_index: Fn.Index) SemaError!void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const func = mod.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
@ -4264,7 +4254,7 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void {
defer tmp_arena.deinit(); defer tmp_arena.deinit();
const sema_arena = tmp_arena.allocator(); const sema_arena = tmp_arena.allocator();
var air = mod.analyzeFnBody(func, sema_arena) catch |err| switch (err) { var air = mod.analyzeFnBody(func_index, sema_arena) catch |err| switch (err) {
error.AnalysisFail => { error.AnalysisFail => {
if (func.state == .in_progress) { if (func.state == .in_progress) {
// If this decl caused the compile error, the analysis field would // If this decl caused the compile error, the analysis field would
@ -4333,7 +4323,7 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void {
if (no_bin_file and !dump_llvm_ir) return; if (no_bin_file and !dump_llvm_ir) return;
comp.bin_file.updateFunc(mod, func, air, liveness) catch |err| switch (err) { comp.bin_file.updateFunc(mod, func_index, air, liveness) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory, error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => { error.AnalysisFail => {
decl.analysis = .codegen_failure; decl.analysis = .codegen_failure;
@ -4363,7 +4353,8 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void {
/// analyzed, and for ensuring it can exist at runtime (see /// analyzed, and for ensuring it can exist at runtime (see
/// `sema.fnHasRuntimeBits`). This function does *not* guarantee that the body /// `sema.fnHasRuntimeBits`). This function does *not* guarantee that the body
/// will be analyzed when it returns: for that, see `ensureFuncBodyAnalyzed`. /// will be analyzed when it returns: for that, see `ensureFuncBodyAnalyzed`.
pub fn ensureFuncBodyAnalysisQueued(mod: *Module, func: *Fn) !void { pub fn ensureFuncBodyAnalysisQueued(mod: *Module, func_index: Fn.Index) !void {
const func = mod.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
@ -4401,7 +4392,7 @@ pub fn ensureFuncBodyAnalysisQueued(mod: *Module, func: *Fn) !void {
// Decl itself is safely analyzed, and body analysis is not yet queued // Decl itself is safely analyzed, and body analysis is not yet queued
try mod.comp.work_queue.writeItem(.{ .codegen_func = func }); try mod.comp.work_queue.writeItem(.{ .codegen_func = func_index });
if (mod.emit_h != null) { if (mod.emit_h != null) {
// TODO: we ideally only want to do this if the function's type changed // TODO: we ideally only want to do this if the function's type changed
// since the last update // since the last update
@ -4532,8 +4523,10 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
.owner_decl = new_decl, .owner_decl = new_decl,
.owner_decl_index = new_decl_index, .owner_decl_index = new_decl_index,
.func = null, .func = null,
.func_index = .none,
.fn_ret_ty = Type.void, .fn_ret_ty = Type.void,
.owner_func = null, .owner_func = null,
.owner_func_index = .none,
}; };
defer sema.deinit(); defer sema.deinit();
@ -4628,8 +4621,10 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
.owner_decl = decl, .owner_decl = decl,
.owner_decl_index = decl_index, .owner_decl_index = decl_index,
.func = null, .func = null,
.func_index = .none,
.fn_ret_ty = Type.void, .fn_ret_ty = Type.void,
.owner_func = null, .owner_func = null,
.owner_func_index = .none,
}; };
defer sema.deinit(); defer sema.deinit();
@ -4707,8 +4702,8 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
return true; return true;
} }
if (decl_tv.val.castTag(.function)) |fn_payload| { if (mod.intern_pool.indexToFunc(decl_tv.val.ip_index).unwrap()) |func_index| {
const func = fn_payload.data; const func = mod.funcPtr(func_index);
const owns_tv = func.owner_decl == decl_index; const owns_tv = func.owner_decl == decl_index;
if (owns_tv) { if (owns_tv) {
var prev_type_has_bits = false; var prev_type_has_bits = false;
@ -4718,7 +4713,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
if (decl.has_tv) { if (decl.has_tv) {
prev_type_has_bits = decl.ty.isFnOrHasRuntimeBits(mod); prev_type_has_bits = decl.ty.isFnOrHasRuntimeBits(mod);
type_changed = !decl.ty.eql(decl_tv.ty, mod); type_changed = !decl.ty.eql(decl_tv.ty, mod);
if (decl.getFunction()) |prev_func| { if (decl.getFunction(mod)) |prev_func| {
prev_is_inline = prev_func.state == .inline_only; prev_is_inline = prev_func.state == .inline_only;
} }
} }
@ -4757,38 +4752,25 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
switch (decl_tv.val.ip_index) { switch (decl_tv.val.ip_index) {
.generic_poison => unreachable, .generic_poison => unreachable,
.unreachable_value => unreachable, .unreachable_value => unreachable,
else => switch (mod.intern_pool.indexToKey(decl_tv.val.ip_index)) {
.none => switch (decl_tv.val.tag()) { .variable => |variable| if (variable.decl == decl_index) {
.variable => {
const variable = decl_tv.val.castTag(.variable).?.data;
if (variable.owner_decl == decl_index) {
decl.owns_tv = true; decl.owns_tv = true;
queue_linker_work = true; queue_linker_work = true;
const copied_init = try variable.init.copy(decl_arena_allocator);
variable.init = copied_init;
}
}, },
.extern_fn => {
const extern_fn = decl_tv.val.castTag(.extern_fn).?.data; .extern_func => |extern_fn| if (extern_fn.decl == decl_index) {
if (extern_fn.owner_decl == decl_index) {
decl.owns_tv = true; decl.owns_tv = true;
queue_linker_work = true; queue_linker_work = true;
is_extern = true; is_extern = true;
}
}, },
.function => {}, .func => {},
else => { else => {
log.debug("send global const to linker: {*} ({s})", .{ decl, decl.name }); log.debug("send global const to linker: {*} ({s})", .{ decl, decl.name });
queue_linker_work = true; queue_linker_work = true;
}, },
}, },
else => {
log.debug("send global const to linker: {*} ({s})", .{ decl, decl.name });
queue_linker_work = true;
},
} }
decl.ty = decl_tv.ty; decl.ty = decl_tv.ty;
@ -4810,12 +4792,9 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool {
break :blk (try decl_arena_allocator.dupeZ(u8, bytes)).ptr; break :blk (try decl_arena_allocator.dupeZ(u8, bytes)).ptr;
}; };
decl.@"addrspace" = blk: { decl.@"addrspace" = blk: {
const addrspace_ctx: Sema.AddressSpaceContext = switch (decl_tv.val.ip_index) { const addrspace_ctx: Sema.AddressSpaceContext = switch (mod.intern_pool.indexToKey(decl_tv.val.ip_index)) {
.none => switch (decl_tv.val.tag()) {
.function, .extern_fn => .function,
.variable => .variable, .variable => .variable,
else => .constant, .extern_func, .func => .function,
},
else => .constant, else => .constant,
}; };
@ -5388,7 +5367,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
decl.has_align = has_align; decl.has_align = has_align;
decl.has_linksection_or_addrspace = has_linksection_or_addrspace; decl.has_linksection_or_addrspace = has_linksection_or_addrspace;
decl.zir_decl_index = @intCast(u32, decl_sub_index); decl.zir_decl_index = @intCast(u32, decl_sub_index);
if (decl.getFunction()) |_| { if (decl.getFunctionIndex(mod) != .none) {
switch (comp.bin_file.tag) { switch (comp.bin_file.tag) {
.coff, .elf, .macho, .plan9 => { .coff, .elf, .macho, .plan9 => {
// TODO Look into detecting when this would be unnecessary by storing enough state // TODO Look into detecting when this would be unnecessary by storing enough state
@ -5572,11 +5551,12 @@ fn deleteDeclExports(mod: *Module, decl_index: Decl.Index) Allocator.Error!void
export_owners.deinit(mod.gpa); export_owners.deinit(mod.gpa);
} }
pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air { pub fn analyzeFnBody(mod: *Module, func_index: Fn.Index, arena: Allocator) SemaError!Air {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const gpa = mod.gpa; const gpa = mod.gpa;
const func = mod.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
@ -5597,8 +5577,10 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air {
.owner_decl = decl, .owner_decl = decl,
.owner_decl_index = decl_index, .owner_decl_index = decl_index,
.func = func, .func = func,
.func_index = func_index.toOptional(),
.fn_ret_ty = fn_ty_info.return_type.toType(), .fn_ret_ty = fn_ty_info.return_type.toType(),
.owner_func = func, .owner_func = func,
.owner_func_index = func_index.toOptional(),
.branch_quota = @max(func.branch_quota, Sema.default_branch_quota), .branch_quota = @max(func.branch_quota, Sema.default_branch_quota),
}; };
defer sema.deinit(); defer sema.deinit();
@ -5807,8 +5789,7 @@ fn markOutdatedDecl(mod: *Module, decl_index: Decl.Index) !void {
for (kv.value) |err| err.deinit(mod.gpa); for (kv.value) |err| err.deinit(mod.gpa);
} }
if (decl.has_tv and decl.owns_tv) { if (decl.has_tv and decl.owns_tv) {
if (decl.val.castTag(.function)) |payload| { if (decl.getFunctionIndex(mod).unwrap()) |func| {
const func = payload.data;
_ = mod.align_stack_fns.remove(func); _ = mod.align_stack_fns.remove(func);
} }
} }
@ -5852,6 +5833,14 @@ pub fn destroyUnion(mod: *Module, index: Union.Index) void {
return mod.intern_pool.destroyUnion(mod.gpa, index); return mod.intern_pool.destroyUnion(mod.gpa, index);
} }
pub fn createFunc(mod: *Module, initialization: Fn) Allocator.Error!Fn.Index {
return mod.intern_pool.createFunc(mod.gpa, initialization);
}
pub fn destroyFunc(mod: *Module, index: Fn.Index) void {
return mod.intern_pool.destroyFunc(mod.gpa, index);
}
pub fn allocateNewDecl( pub fn allocateNewDecl(
mod: *Module, mod: *Module,
namespace: Namespace.Index, namespace: Namespace.Index,
@ -6499,7 +6488,11 @@ pub fn populateTestFunctions(
try mod.ensureDeclAnalyzed(decl_index); try mod.ensureDeclAnalyzed(decl_index);
} }
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
const tmp_test_fn_ty = decl.ty.slicePtrFieldType(mod).childType(mod); const test_fn_ty = decl.ty.slicePtrFieldType(mod).childType(mod);
const null_usize = try mod.intern(.{ .opt = .{
.ty = try mod.intern(.{ .opt_type = .usize_type }),
.val = .none,
} });
const array_decl_index = d: { const array_decl_index = d: {
// Add mod.test_functions to an array decl then make the test_functions // Add mod.test_functions to an array decl then make the test_functions
@ -6512,7 +6505,7 @@ pub fn populateTestFunctions(
const array_decl_index = try mod.createAnonymousDeclFromDecl(decl, decl.src_namespace, null, .{ const array_decl_index = try mod.createAnonymousDeclFromDecl(decl, decl.src_namespace, null, .{
.ty = try mod.arrayType(.{ .ty = try mod.arrayType(.{
.len = test_fn_vals.len, .len = test_fn_vals.len,
.child = tmp_test_fn_ty.ip_index, .child = test_fn_ty.ip_index,
.sentinel = .none, .sentinel = .none,
}), }),
.val = try Value.Tag.aggregate.create(arena, test_fn_vals), .val = try Value.Tag.aggregate.create(arena, test_fn_vals),
@ -6530,7 +6523,7 @@ pub fn populateTestFunctions(
errdefer name_decl_arena.deinit(); errdefer name_decl_arena.deinit();
const bytes = try name_decl_arena.allocator().dupe(u8, test_name_slice); const bytes = try name_decl_arena.allocator().dupe(u8, test_name_slice);
const test_name_decl_index = try mod.createAnonymousDeclFromDecl(array_decl, array_decl.src_namespace, null, .{ const test_name_decl_index = try mod.createAnonymousDeclFromDecl(array_decl, array_decl.src_namespace, null, .{
.ty = try Type.array(name_decl_arena.allocator(), bytes.len, null, Type.u8, mod), .ty = try mod.arrayType(.{ .len = bytes.len, .child = .u8_type }),
.val = try Value.Tag.bytes.create(name_decl_arena.allocator(), bytes), .val = try Value.Tag.bytes.create(name_decl_arena.allocator(), bytes),
}); });
try mod.declPtr(test_name_decl_index).finalizeNewArena(&name_decl_arena); try mod.declPtr(test_name_decl_index).finalizeNewArena(&name_decl_arena);
@ -6540,16 +6533,24 @@ pub fn populateTestFunctions(
array_decl.dependencies.putAssumeCapacityNoClobber(test_name_decl_index, .normal); array_decl.dependencies.putAssumeCapacityNoClobber(test_name_decl_index, .normal);
try mod.linkerUpdateDecl(test_name_decl_index); try mod.linkerUpdateDecl(test_name_decl_index);
const field_vals = try arena.create([3]Value); const test_fn_fields = .{
field_vals.* = .{ // name
try Value.Tag.slice.create(arena, .{ try mod.intern(.{ .ptr = .{
.ptr = try Value.Tag.decl_ref.create(arena, test_name_decl_index), .ty = .slice_const_u8_type,
.len = try mod.intValue(Type.usize, test_name_slice.len), .addr = .{ .decl = test_name_decl_index },
}), // name } }),
try Value.Tag.decl_ref.create(arena, test_decl_index), // func // func
Value.null, // async_frame_size try mod.intern(.{ .ptr = .{
.ty = test_decl.ty.ip_index,
.addr = .{ .decl = test_decl_index },
} }),
// async_frame_size
null_usize,
}; };
test_fn_vals[i] = try Value.Tag.aggregate.create(arena, field_vals); test_fn_vals[i] = (try mod.intern(.{ .aggregate = .{
.ty = test_fn_ty.ip_index,
.storage = .{ .elems = &test_fn_fields },
} })).toValue();
} }
try array_decl.finalizeNewArena(&new_decl_arena); try array_decl.finalizeNewArena(&new_decl_arena);
@ -6558,26 +6559,18 @@ pub fn populateTestFunctions(
try mod.linkerUpdateDecl(array_decl_index); try mod.linkerUpdateDecl(array_decl_index);
{ {
var new_decl_arena = std.heap.ArenaAllocator.init(gpa); const new_ty = try mod.ptrType(.{
errdefer new_decl_arena.deinit(); .elem_type = test_fn_ty.ip_index,
const arena = new_decl_arena.allocator(); .is_const = true,
{
// This copy accesses the old Decl Type/Value so it must be done before `clearValues`.
const new_ty = try Type.ptr(arena, mod, .{
.size = .Slice, .size = .Slice,
.pointee_type = tmp_test_fn_ty,
.mutable = false,
.@"addrspace" = .generic,
}); });
const new_var = try gpa.create(Var); const new_val = decl.val;
errdefer gpa.destroy(new_var); const new_init = try mod.intern(.{ .ptr = .{
new_var.* = decl.val.castTag(.variable).?.data.*; .ty = new_ty.ip_index,
new_var.init = try Value.Tag.slice.create(arena, .{ .addr = .{ .decl = array_decl_index },
.ptr = try Value.Tag.decl_ref.create(arena, array_decl_index), .len = (try mod.intValue(Type.usize, mod.test_functions.count())).ip_index,
.len = try mod.intValue(Type.usize, mod.test_functions.count()), } });
}); mod.intern_pool.mutateVarInit(decl.val.ip_index, new_init);
const new_val = try Value.Tag.variable.create(arena, new_var);
// Since we are replacing the Decl's value we must perform cleanup on the // Since we are replacing the Decl's value we must perform cleanup on the
// previous value. // previous value.
@ -6586,9 +6579,6 @@ pub fn populateTestFunctions(
decl.val = new_val; decl.val = new_val;
decl.has_tv = true; decl.has_tv = true;
} }
try decl.finalizeNewArena(&new_decl_arena);
}
try mod.linkerUpdateDecl(decl_index); try mod.linkerUpdateDecl(decl_index);
} }
@ -6660,50 +6650,47 @@ fn reportRetryableFileError(
} }
pub fn markReferencedDeclsAlive(mod: *Module, val: Value) void { pub fn markReferencedDeclsAlive(mod: *Module, val: Value) void {
if (val.ip_index != .none) return; switch (val.ip_index) {
switch (val.tag()) { .none => switch (val.tag()) {
.decl_ref_mut => return mod.markDeclIndexAlive(val.castTag(.decl_ref_mut).?.data.decl_index),
.extern_fn => return mod.markDeclIndexAlive(val.castTag(.extern_fn).?.data.owner_decl),
.function => return mod.markDeclIndexAlive(val.castTag(.function).?.data.owner_decl),
.variable => return mod.markDeclIndexAlive(val.castTag(.variable).?.data.owner_decl),
.decl_ref => return mod.markDeclIndexAlive(val.cast(Value.Payload.Decl).?.data),
.repeated,
.eu_payload,
.opt_payload,
.empty_array_sentinel,
=> return mod.markReferencedDeclsAlive(val.cast(Value.Payload.SubValue).?.data),
.eu_payload_ptr,
.opt_payload_ptr,
=> return mod.markReferencedDeclsAlive(val.cast(Value.Payload.PayloadPtr).?.data.container_ptr),
.slice => {
const slice = val.cast(Value.Payload.Slice).?.data;
mod.markReferencedDeclsAlive(slice.ptr);
mod.markReferencedDeclsAlive(slice.len);
},
.elem_ptr => {
const elem_ptr = val.cast(Value.Payload.ElemPtr).?.data;
return mod.markReferencedDeclsAlive(elem_ptr.array_ptr);
},
.field_ptr => {
const field_ptr = val.cast(Value.Payload.FieldPtr).?.data;
return mod.markReferencedDeclsAlive(field_ptr.container_ptr);
},
.aggregate => { .aggregate => {
for (val.castTag(.aggregate).?.data) |field_val| { for (val.castTag(.aggregate).?.data) |field_val| {
mod.markReferencedDeclsAlive(field_val); mod.markReferencedDeclsAlive(field_val);
} }
}, },
.@"union" => { .@"union" => {
const data = val.cast(Value.Payload.Union).?.data; const data = val.castTag(.@"union").?.data;
mod.markReferencedDeclsAlive(data.tag); mod.markReferencedDeclsAlive(data.tag);
mod.markReferencedDeclsAlive(data.val); mod.markReferencedDeclsAlive(data.val);
}, },
else => {}, else => {},
},
else => switch (mod.intern_pool.indexToKey(val.ip_index)) {
.variable => |variable| mod.markDeclIndexAlive(variable.decl),
.extern_func => |extern_func| mod.markDeclIndexAlive(extern_func.decl),
.func => |func| mod.markDeclIndexAlive(mod.funcPtr(func.index).owner_decl),
.error_union => |error_union| switch (error_union.val) {
.err_name => {},
.payload => |payload| mod.markReferencedDeclsAlive(payload.toValue()),
},
.ptr => |ptr| {
switch (ptr.addr) {
.decl => |decl| mod.markDeclIndexAlive(decl),
.mut_decl => |mut_decl| mod.markDeclIndexAlive(mut_decl.decl),
.int, .comptime_field => {},
.eu_payload, .opt_payload => |parent| mod.markReferencedDeclsAlive(parent.toValue()),
.elem, .field => |base_index| mod.markReferencedDeclsAlive(base_index.base.toValue()),
}
if (ptr.len != .none) mod.markReferencedDeclsAlive(ptr.len.toValue());
},
.opt => |opt| if (opt.val != .none) mod.markReferencedDeclsAlive(opt.val.toValue()),
.aggregate => |aggregate| for (aggregate.storage.values()) |elem|
mod.markReferencedDeclsAlive(elem.toValue()),
.un => |un| {
mod.markReferencedDeclsAlive(un.tag.toValue());
mod.markReferencedDeclsAlive(un.val.toValue());
},
else => {},
},
} }
} }
@ -7075,6 +7062,12 @@ pub fn intBitsForValue(mod: *Module, val: Value, sign: bool) u16 {
return @intCast(u16, big.bitCountTwosComp()); return @intCast(u16, big.bitCountTwosComp());
}, },
.lazy_align => |lazy_ty| {
return Type.smallestUnsignedBits(lazy_ty.toType().abiAlignment(mod)) + @boolToInt(sign);
},
.lazy_size => |lazy_ty| {
return Type.smallestUnsignedBits(lazy_ty.toType().abiSize(mod)) + @boolToInt(sign);
},
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -102,248 +102,15 @@ pub fn print(
return writer.writeAll(" }"); return writer.writeAll(" }");
}, },
.the_only_possible_value => return writer.writeAll("0"),
.lazy_align => {
const sub_ty = val.castTag(.lazy_align).?.data;
const x = sub_ty.abiAlignment(mod);
return writer.print("{d}", .{x});
},
.lazy_size => {
const sub_ty = val.castTag(.lazy_size).?.data;
const x = sub_ty.abiSize(mod);
return writer.print("{d}", .{x});
},
.function => return writer.print("(function '{s}')", .{
mod.declPtr(val.castTag(.function).?.data.owner_decl).name,
}),
.extern_fn => return writer.writeAll("(extern function)"),
.variable => unreachable,
.decl_ref_mut => {
const decl_index = val.castTag(.decl_ref_mut).?.data.decl_index;
const decl = mod.declPtr(decl_index);
if (level == 0) {
return writer.print("(decl ref mut '{s}')", .{decl.name});
}
return print(.{
.ty = decl.ty,
.val = decl.val,
}, writer, level - 1, mod);
},
.decl_ref => {
const decl_index = val.castTag(.decl_ref).?.data;
const decl = mod.declPtr(decl_index);
if (level == 0) {
return writer.print("(decl ref '{s}')", .{decl.name});
}
return print(.{
.ty = decl.ty,
.val = decl.val,
}, writer, level - 1, mod);
},
.comptime_field_ptr => {
const payload = val.castTag(.comptime_field_ptr).?.data;
if (level == 0) {
return writer.writeAll("(comptime field ptr)");
}
return print(.{
.ty = payload.field_ty,
.val = payload.field_val,
}, writer, level - 1, mod);
},
.elem_ptr => {
const elem_ptr = val.castTag(.elem_ptr).?.data;
try writer.writeAll("&");
if (level == 0) {
try writer.writeAll("(ptr)");
} else {
try print(.{
.ty = elem_ptr.elem_ty,
.val = elem_ptr.array_ptr,
}, writer, level - 1, mod);
}
return writer.print("[{}]", .{elem_ptr.index});
},
.field_ptr => {
const field_ptr = val.castTag(.field_ptr).?.data;
try writer.writeAll("&");
if (level == 0) {
try writer.writeAll("(ptr)");
} else {
try print(.{
.ty = field_ptr.container_ty,
.val = field_ptr.container_ptr,
}, writer, level - 1, mod);
}
if (field_ptr.container_ty.zigTypeTag(mod) == .Struct) {
switch (mod.intern_pool.indexToKey(field_ptr.container_ty.ip_index)) {
.anon_struct_type => |anon_struct| {
if (anon_struct.names.len == 0) {
return writer.print(".@\"{d}\"", .{field_ptr.field_index});
}
},
else => {},
}
const field_name = field_ptr.container_ty.structFieldName(field_ptr.field_index, mod);
return writer.print(".{s}", .{field_name});
} else if (field_ptr.container_ty.zigTypeTag(mod) == .Union) {
const field_name = field_ptr.container_ty.unionFields(mod).keys()[field_ptr.field_index];
return writer.print(".{s}", .{field_name});
} else if (field_ptr.container_ty.isSlice(mod)) {
switch (field_ptr.field_index) {
Value.Payload.Slice.ptr_index => return writer.writeAll(".ptr"),
Value.Payload.Slice.len_index => return writer.writeAll(".len"),
else => unreachable,
}
}
},
.empty_array => return writer.writeAll(".{}"),
.enum_literal => return writer.print(".{}", .{std.zig.fmtId(val.castTag(.enum_literal).?.data)}),
.bytes => return writer.print("\"{}\"", .{std.zig.fmtEscapes(val.castTag(.bytes).?.data)}), .bytes => return writer.print("\"{}\"", .{std.zig.fmtEscapes(val.castTag(.bytes).?.data)}),
.str_lit => { .str_lit => {
const str_lit = val.castTag(.str_lit).?.data; const str_lit = val.castTag(.str_lit).?.data;
const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len]; const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
return writer.print("\"{}\"", .{std.zig.fmtEscapes(bytes)}); return writer.print("\"{}\"", .{std.zig.fmtEscapes(bytes)});
}, },
.repeated => {
if (level == 0) {
return writer.writeAll(".{ ... }");
}
var i: u32 = 0;
try writer.writeAll(".{ ");
const elem_tv = TypedValue{
.ty = ty.elemType2(mod),
.val = val.castTag(.repeated).?.data,
};
const len = ty.arrayLen(mod);
const max_len = std.math.min(len, max_aggregate_items);
while (i < max_len) : (i += 1) {
if (i != 0) try writer.writeAll(", ");
try print(elem_tv, writer, level - 1, mod);
}
if (len > max_aggregate_items) {
try writer.writeAll(", ...");
}
return writer.writeAll(" }");
},
.empty_array_sentinel => {
if (level == 0) {
return writer.writeAll(".{ (sentinel) }");
}
try writer.writeAll(".{ ");
try print(.{
.ty = ty.elemType2(mod),
.val = ty.sentinel(mod).?,
}, writer, level - 1, mod);
return writer.writeAll(" }");
},
.slice => {
if (level == 0) {
return writer.writeAll(".{ ... }");
}
const payload = val.castTag(.slice).?.data;
const elem_ty = ty.elemType2(mod);
const len = payload.len.toUnsignedInt(mod);
if (elem_ty.eql(Type.u8, mod)) str: {
const max_len = @intCast(usize, std.math.min(len, max_string_len));
var buf: [max_string_len]u8 = undefined;
var i: u32 = 0;
while (i < max_len) : (i += 1) {
const elem_val = payload.ptr.elemValue(mod, i) catch |err| switch (err) {
error.OutOfMemory => @panic("OOM"), // TODO: eliminate this panic
};
if (elem_val.isUndef(mod)) break :str;
buf[i] = std.math.cast(u8, elem_val.toUnsignedInt(mod)) orelse break :str;
}
// TODO would be nice if this had a bit of unicode awareness.
const truncated = if (len > max_string_len) " (truncated)" else "";
return writer.print("\"{}{s}\"", .{ std.zig.fmtEscapes(buf[0..max_len]), truncated });
}
try writer.writeAll(".{ ");
const max_len = std.math.min(len, max_aggregate_items);
var i: u32 = 0;
while (i < max_len) : (i += 1) {
if (i != 0) try writer.writeAll(", ");
const elem_val = payload.ptr.elemValue(mod, i) catch |err| switch (err) {
error.OutOfMemory => @panic("OOM"), // TODO: eliminate this panic
};
try print(.{
.ty = elem_ty,
.val = elem_val,
}, writer, level - 1, mod);
}
if (len > max_aggregate_items) {
try writer.writeAll(", ...");
}
return writer.writeAll(" }");
},
.@"error" => return writer.print("error.{s}", .{val.castTag(.@"error").?.data.name}),
.eu_payload => {
val = val.castTag(.eu_payload).?.data;
ty = ty.errorUnionPayload(mod);
},
.opt_payload => {
val = val.castTag(.opt_payload).?.data;
ty = ty.optionalChild(mod);
return print(.{ .ty = ty, .val = val }, writer, level, mod);
},
.eu_payload_ptr => {
try writer.writeAll("&");
if (level == 0) {
return writer.writeAll("(ptr)");
}
const data = val.castTag(.eu_payload_ptr).?.data;
try writer.writeAll("@as(");
try print(.{
.ty = Type.type,
.val = ty.toValue(),
}, writer, level - 1, mod);
try writer.writeAll(", &(payload of ");
try print(.{
.ty = mod.singleMutPtrType(data.container_ty) catch @panic("OOM"),
.val = data.container_ptr,
}, writer, level - 1, mod);
try writer.writeAll("))");
return;
},
.opt_payload_ptr => {
if (level == 0) {
return writer.writeAll("&(ptr)");
}
const data = val.castTag(.opt_payload_ptr).?.data;
try writer.writeAll("@as(");
try print(.{
.ty = Type.type,
.val = ty.toValue(),
}, writer, level - 1, mod);
try writer.writeAll(", &(payload of ");
try print(.{
.ty = mod.singleMutPtrType(data.container_ty) catch @panic("OOM"),
.val = data.container_ptr,
}, writer, level - 1, mod);
try writer.writeAll("))");
return;
},
// TODO these should not appear in this function // TODO these should not appear in this function
.inferred_alloc => return writer.writeAll("(inferred allocation value)"), .inferred_alloc => return writer.writeAll("(inferred allocation value)"),
.inferred_alloc_comptime => return writer.writeAll("(inferred comptime allocation value)"), .inferred_alloc_comptime => return writer.writeAll("(inferred comptime allocation value)"),
.runtime_value => return writer.writeAll("[runtime value]"),
}, },
else => { else => {
const key = mod.intern_pool.indexToKey(val.ip_index); const key = mod.intern_pool.indexToKey(val.ip_index);
@ -353,6 +120,12 @@ pub fn print(
switch (key) { switch (key) {
.int => |int| switch (int.storage) { .int => |int| switch (int.storage) {
inline .u64, .i64, .big_int => |x| return writer.print("{}", .{x}), inline .u64, .i64, .big_int => |x| return writer.print("{}", .{x}),
.lazy_align => |lazy_ty| return writer.print("{d}", .{
lazy_ty.toType().abiAlignment(mod),
}),
.lazy_size => |lazy_ty| return writer.print("{d}", .{
lazy_ty.toType().abiSize(mod),
}),
}, },
.enum_tag => |enum_tag| { .enum_tag => |enum_tag| {
if (level == 0) { if (level == 0) {
@ -407,7 +180,7 @@ fn printAggregate(
} }
try print(.{ try print(.{
.ty = ty.structFieldType(i, mod), .ty = ty.structFieldType(i, mod),
.val = try val.fieldValue(ty, mod, i), .val = try val.fieldValue(mod, i),
}, writer, level - 1, mod); }, writer, level - 1, mod);
} }
if (ty.structFieldCount(mod) > max_aggregate_items) { if (ty.structFieldCount(mod) > max_aggregate_items) {
@ -424,7 +197,7 @@ fn printAggregate(
var i: u32 = 0; var i: u32 = 0;
while (i < max_len) : (i += 1) { while (i < max_len) : (i += 1) {
const elem = try val.fieldValue(ty, mod, i); const elem = try val.fieldValue(mod, i);
if (elem.isUndef(mod)) break :str; if (elem.isUndef(mod)) break :str;
buf[i] = std.math.cast(u8, elem.toUnsignedInt(mod)) orelse break :str; buf[i] = std.math.cast(u8, elem.toUnsignedInt(mod)) orelse break :str;
} }
@ -441,7 +214,7 @@ fn printAggregate(
if (i != 0) try writer.writeAll(", "); if (i != 0) try writer.writeAll(", ");
try print(.{ try print(.{
.ty = elem_ty, .ty = elem_ty,
.val = try val.fieldValue(ty, mod, i), .val = try val.fieldValue(mod, i),
}, writer, level - 1, mod); }, writer, level - 1, mod);
} }
if (len > max_aggregate_items) { if (len > max_aggregate_items) {

View file

@ -2108,8 +2108,8 @@ pub const Inst = struct {
manyptr_const_u8_type = @enumToInt(InternPool.Index.manyptr_const_u8_type), manyptr_const_u8_type = @enumToInt(InternPool.Index.manyptr_const_u8_type),
manyptr_const_u8_sentinel_0_type = @enumToInt(InternPool.Index.manyptr_const_u8_sentinel_0_type), manyptr_const_u8_sentinel_0_type = @enumToInt(InternPool.Index.manyptr_const_u8_sentinel_0_type),
single_const_pointer_to_comptime_int_type = @enumToInt(InternPool.Index.single_const_pointer_to_comptime_int_type), single_const_pointer_to_comptime_int_type = @enumToInt(InternPool.Index.single_const_pointer_to_comptime_int_type),
const_slice_u8_type = @enumToInt(InternPool.Index.const_slice_u8_type), slice_const_u8_type = @enumToInt(InternPool.Index.slice_const_u8_type),
const_slice_u8_sentinel_0_type = @enumToInt(InternPool.Index.const_slice_u8_sentinel_0_type), slice_const_u8_sentinel_0_type = @enumToInt(InternPool.Index.slice_const_u8_sentinel_0_type),
anyerror_void_error_union_type = @enumToInt(InternPool.Index.anyerror_void_error_union_type), anyerror_void_error_union_type = @enumToInt(InternPool.Index.anyerror_void_error_union_type),
generic_poison_type = @enumToInt(InternPool.Index.generic_poison_type), generic_poison_type = @enumToInt(InternPool.Index.generic_poison_type),
inferred_alloc_const_type = @enumToInt(InternPool.Index.inferred_alloc_const_type), inferred_alloc_const_type = @enumToInt(InternPool.Index.inferred_alloc_const_type),

View file

@ -328,7 +328,7 @@ const Self = @This();
pub fn generate( pub fn generate(
bin_file: *link.File, bin_file: *link.File,
src_loc: Module.SrcLoc, src_loc: Module.SrcLoc,
module_fn: *Module.Fn, module_fn_index: Module.Fn.Index,
air: Air, air: Air,
liveness: Liveness, liveness: Liveness,
code: *std.ArrayList(u8), code: *std.ArrayList(u8),
@ -339,6 +339,7 @@ pub fn generate(
} }
const mod = bin_file.options.module.?; const mod = bin_file.options.module.?;
const module_fn = mod.funcPtr(module_fn_index);
const fn_owner_decl = mod.declPtr(module_fn.owner_decl); const fn_owner_decl = mod.declPtr(module_fn.owner_decl);
assert(fn_owner_decl.has_tv); assert(fn_owner_decl.has_tv);
const fn_type = fn_owner_decl.ty; const fn_type = fn_owner_decl.ty;
@ -4311,9 +4312,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
// Due to incremental compilation, how function calls are generated depends // Due to incremental compilation, how function calls are generated depends
// on linking. // on linking.
if (try self.air.value(callee, mod)) |func_value| { if (try self.air.value(callee, mod)) |func_value| {
if (func_value.castTag(.function)) |func_payload| { if (func_value.getFunction(mod)) |func| {
const func = func_payload.data;
if (self.bin_file.cast(link.File.Elf)) |elf_file| { if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl); const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl);
const atom = elf_file.getAtom(atom_index); const atom = elf_file.getAtom(atom_index);
@ -4353,10 +4352,9 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
.tag = .blr, .tag = .blr,
.data = .{ .reg = .x30 }, .data = .{ .reg = .x30 },
}); });
} else if (func_value.castTag(.extern_fn)) |func_payload| { } else if (func_value.getExternFunc(mod)) |extern_func| {
const extern_fn = func_payload.data; const decl_name = mem.sliceTo(mod.declPtr(extern_func.decl).name, 0);
const decl_name = mem.sliceTo(mod.declPtr(extern_fn.owner_decl).name, 0); const lib_name = mod.intern_pool.stringToSliceUnwrap(extern_func.lib_name);
const lib_name = mem.sliceTo(extern_fn.lib_name, 0);
if (self.bin_file.cast(link.File.MachO)) |macho_file| { if (self.bin_file.cast(link.File.MachO)) |macho_file| {
const sym_index = try macho_file.getGlobalSymbol(decl_name, lib_name); const sym_index = try macho_file.getGlobalSymbol(decl_name, lib_name);
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl); const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
@ -4627,7 +4625,8 @@ fn airDbgStmt(self: *Self, inst: Air.Inst.Index) !void {
fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void { fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const function = self.air.values[ty_pl.payload].castTag(.function).?.data; const mod = self.bin_file.options.module.?;
const function = self.air.values[ty_pl.payload].getFunction(mod).?;
// TODO emit debug info for function change // TODO emit debug info for function change
_ = function; _ = function;
return self.finishAir(inst, .dead, .{ .none, .none, .none }); return self.finishAir(inst, .dead, .{ .none, .none, .none });

View file

@ -334,7 +334,7 @@ const Self = @This();
pub fn generate( pub fn generate(
bin_file: *link.File, bin_file: *link.File,
src_loc: Module.SrcLoc, src_loc: Module.SrcLoc,
module_fn: *Module.Fn, module_fn_index: Module.Fn.Index,
air: Air, air: Air,
liveness: Liveness, liveness: Liveness,
code: *std.ArrayList(u8), code: *std.ArrayList(u8),
@ -345,6 +345,7 @@ pub fn generate(
} }
const mod = bin_file.options.module.?; const mod = bin_file.options.module.?;
const module_fn = mod.funcPtr(module_fn_index);
const fn_owner_decl = mod.declPtr(module_fn.owner_decl); const fn_owner_decl = mod.declPtr(module_fn.owner_decl);
assert(fn_owner_decl.has_tv); assert(fn_owner_decl.has_tv);
const fn_type = fn_owner_decl.ty; const fn_type = fn_owner_decl.ty;
@ -4291,9 +4292,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
// Due to incremental compilation, how function calls are generated depends // Due to incremental compilation, how function calls are generated depends
// on linking. // on linking.
if (try self.air.value(callee, mod)) |func_value| { if (try self.air.value(callee, mod)) |func_value| {
if (func_value.castTag(.function)) |func_payload| { if (func_value.getFunction(mod)) |func| {
const func = func_payload.data;
if (self.bin_file.cast(link.File.Elf)) |elf_file| { if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl); const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl);
const atom = elf_file.getAtom(atom_index); const atom = elf_file.getAtom(atom_index);
@ -4308,7 +4307,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
@tagName(self.target.cpu.arch), @tagName(self.target.cpu.arch),
}); });
} }
} else if (func_value.castTag(.extern_fn)) |_| { } else if (func_value.getExternFunc(mod)) |_| {
return self.fail("TODO implement calling extern functions", .{}); return self.fail("TODO implement calling extern functions", .{});
} else { } else {
return self.fail("TODO implement calling bitcasted functions", .{}); return self.fail("TODO implement calling bitcasted functions", .{});
@ -4573,7 +4572,8 @@ fn airDbgStmt(self: *Self, inst: Air.Inst.Index) !void {
fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void { fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const function = self.air.values[ty_pl.payload].castTag(.function).?.data; const mod = self.bin_file.options.module.?;
const function = self.air.values[ty_pl.payload].getFunction(mod).?;
// TODO emit debug info for function change // TODO emit debug info for function change
_ = function; _ = function;
return self.finishAir(inst, .dead, .{ .none, .none, .none }); return self.finishAir(inst, .dead, .{ .none, .none, .none });

View file

@ -217,7 +217,7 @@ const Self = @This();
pub fn generate( pub fn generate(
bin_file: *link.File, bin_file: *link.File,
src_loc: Module.SrcLoc, src_loc: Module.SrcLoc,
module_fn: *Module.Fn, module_fn_index: Module.Fn.Index,
air: Air, air: Air,
liveness: Liveness, liveness: Liveness,
code: *std.ArrayList(u8), code: *std.ArrayList(u8),
@ -228,6 +228,7 @@ pub fn generate(
} }
const mod = bin_file.options.module.?; const mod = bin_file.options.module.?;
const module_fn = mod.funcPtr(module_fn_index);
const fn_owner_decl = mod.declPtr(module_fn.owner_decl); const fn_owner_decl = mod.declPtr(module_fn.owner_decl);
assert(fn_owner_decl.has_tv); assert(fn_owner_decl.has_tv);
const fn_type = fn_owner_decl.ty; const fn_type = fn_owner_decl.ty;
@ -1745,8 +1746,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
} }
if (try self.air.value(callee, mod)) |func_value| { if (try self.air.value(callee, mod)) |func_value| {
if (func_value.castTag(.function)) |func_payload| { if (mod.funcPtrUnwrap(mod.intern_pool.indexToFunc(func_value.ip_index))) |func| {
const func = func_payload.data;
const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl); const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl);
const atom = elf_file.getAtom(atom_index); const atom = elf_file.getAtom(atom_index);
_ = try atom.getOrCreateOffsetTableEntry(elf_file); _ = try atom.getOrCreateOffsetTableEntry(elf_file);
@ -1760,7 +1760,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
.imm12 = 0, .imm12 = 0,
} }, } },
}); });
} else if (func_value.castTag(.extern_fn)) |_| { } else if (mod.intern_pool.indexToKey(func_value.ip_index) == .extern_func) {
return self.fail("TODO implement calling extern functions", .{}); return self.fail("TODO implement calling extern functions", .{});
} else { } else {
return self.fail("TODO implement calling bitcasted functions", .{}); return self.fail("TODO implement calling bitcasted functions", .{});
@ -1879,7 +1879,8 @@ fn airDbgStmt(self: *Self, inst: Air.Inst.Index) !void {
fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void { fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const function = self.air.values[ty_pl.payload].castTag(.function).?.data; const mod = self.bin_file.options.module.?;
const function = self.air.values[ty_pl.payload].getFunction(mod).?;
// TODO emit debug info for function change // TODO emit debug info for function change
_ = function; _ = function;
return self.finishAir(inst, .dead, .{ .none, .none, .none }); return self.finishAir(inst, .dead, .{ .none, .none, .none });

View file

@ -260,7 +260,7 @@ const BigTomb = struct {
pub fn generate( pub fn generate(
bin_file: *link.File, bin_file: *link.File,
src_loc: Module.SrcLoc, src_loc: Module.SrcLoc,
module_fn: *Module.Fn, module_fn_index: Module.Fn.Index,
air: Air, air: Air,
liveness: Liveness, liveness: Liveness,
code: *std.ArrayList(u8), code: *std.ArrayList(u8),
@ -271,6 +271,7 @@ pub fn generate(
} }
const mod = bin_file.options.module.?; const mod = bin_file.options.module.?;
const module_fn = mod.funcPtr(module_fn_index);
const fn_owner_decl = mod.declPtr(module_fn.owner_decl); const fn_owner_decl = mod.declPtr(module_fn.owner_decl);
assert(fn_owner_decl.has_tv); assert(fn_owner_decl.has_tv);
const fn_type = fn_owner_decl.ty; const fn_type = fn_owner_decl.ty;
@ -1346,8 +1347,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
// on linking. // on linking.
if (try self.air.value(callee, mod)) |func_value| { if (try self.air.value(callee, mod)) |func_value| {
if (self.bin_file.tag == link.File.Elf.base_tag) { if (self.bin_file.tag == link.File.Elf.base_tag) {
if (func_value.castTag(.function)) |func_payload| { if (mod.funcPtrUnwrap(mod.intern_pool.indexToFunc(func_value.ip_index))) |func| {
const func = func_payload.data;
const got_addr = if (self.bin_file.cast(link.File.Elf)) |elf_file| blk: { const got_addr = if (self.bin_file.cast(link.File.Elf)) |elf_file| blk: {
const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl); const atom_index = try elf_file.getOrCreateAtomForDecl(func.owner_decl);
const atom = elf_file.getAtom(atom_index); const atom = elf_file.getAtom(atom_index);
@ -1374,7 +1374,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
.tag = .nop, .tag = .nop,
.data = .{ .nop = {} }, .data = .{ .nop = {} },
}); });
} else if (func_value.castTag(.extern_fn)) |_| { } else if (mod.intern_pool.indexToKey(func_value.ip_index) == .extern_func) {
return self.fail("TODO implement calling extern functions", .{}); return self.fail("TODO implement calling extern functions", .{});
} else { } else {
return self.fail("TODO implement calling bitcasted functions", .{}); return self.fail("TODO implement calling bitcasted functions", .{});
@ -1663,7 +1663,8 @@ fn airDbgBlock(self: *Self, inst: Air.Inst.Index) !void {
fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void { fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const function = self.air.values[ty_pl.payload].castTag(.function).?.data; const mod = self.bin_file.options.module.?;
const function = self.air.values[ty_pl.payload].getFunction(mod).?;
// TODO emit debug info for function change // TODO emit debug info for function change
_ = function; _ = function;
return self.finishAir(inst, .dead, .{ .none, .none, .none }); return self.finishAir(inst, .dead, .{ .none, .none, .none });

View file

@ -1203,20 +1203,22 @@ fn genFunctype(
pub fn generate( pub fn generate(
bin_file: *link.File, bin_file: *link.File,
src_loc: Module.SrcLoc, src_loc: Module.SrcLoc,
func: *Module.Fn, func_index: Module.Fn.Index,
air: Air, air: Air,
liveness: Liveness, liveness: Liveness,
code: *std.ArrayList(u8), code: *std.ArrayList(u8),
debug_output: codegen.DebugInfoOutput, debug_output: codegen.DebugInfoOutput,
) codegen.CodeGenError!codegen.Result { ) codegen.CodeGenError!codegen.Result {
_ = src_loc; _ = src_loc;
const mod = bin_file.options.module.?;
const func = mod.funcPtr(func_index);
var code_gen: CodeGen = .{ var code_gen: CodeGen = .{
.gpa = bin_file.allocator, .gpa = bin_file.allocator,
.air = air, .air = air,
.liveness = liveness, .liveness = liveness,
.code = code, .code = code,
.decl_index = func.owner_decl, .decl_index = func.owner_decl,
.decl = bin_file.options.module.?.declPtr(func.owner_decl), .decl = mod.declPtr(func.owner_decl),
.err_msg = undefined, .err_msg = undefined,
.locals = .{}, .locals = .{},
.target = bin_file.options.target, .target = bin_file.options.target,
@ -2196,27 +2198,33 @@ fn airCall(func: *CodeGen, inst: Air.Inst.Index, modifier: std.builtin.CallModif
const callee: ?Decl.Index = blk: { const callee: ?Decl.Index = blk: {
const func_val = (try func.air.value(pl_op.operand, mod)) orelse break :blk null; const func_val = (try func.air.value(pl_op.operand, mod)) orelse break :blk null;
if (func_val.castTag(.function)) |function| { if (func_val.getFunction(mod)) |function| {
_ = try func.bin_file.getOrCreateAtomForDecl(function.data.owner_decl); _ = try func.bin_file.getOrCreateAtomForDecl(function.owner_decl);
break :blk function.data.owner_decl; break :blk function.owner_decl;
} else if (func_val.castTag(.extern_fn)) |extern_fn| { } else if (func_val.getExternFunc(mod)) |extern_func| {
const ext_decl = mod.declPtr(extern_fn.data.owner_decl); const ext_decl = mod.declPtr(extern_func.decl);
const ext_info = mod.typeToFunc(ext_decl.ty).?; const ext_info = mod.typeToFunc(ext_decl.ty).?;
var func_type = try genFunctype(func.gpa, ext_info.cc, ext_info.param_types, ext_info.return_type.toType(), mod); var func_type = try genFunctype(func.gpa, ext_info.cc, ext_info.param_types, ext_info.return_type.toType(), mod);
defer func_type.deinit(func.gpa); defer func_type.deinit(func.gpa);
const atom_index = try func.bin_file.getOrCreateAtomForDecl(extern_fn.data.owner_decl); const atom_index = try func.bin_file.getOrCreateAtomForDecl(extern_func.decl);
const atom = func.bin_file.getAtomPtr(atom_index); const atom = func.bin_file.getAtomPtr(atom_index);
const type_index = try func.bin_file.storeDeclType(extern_fn.data.owner_decl, func_type); const type_index = try func.bin_file.storeDeclType(extern_func.decl, func_type);
try func.bin_file.addOrUpdateImport( try func.bin_file.addOrUpdateImport(
mem.sliceTo(ext_decl.name, 0), mem.sliceTo(ext_decl.name, 0),
atom.getSymbolIndex().?, atom.getSymbolIndex().?,
ext_decl.getExternFn().?.lib_name, mod.intern_pool.stringToSliceUnwrap(ext_decl.getExternFunc(mod).?.lib_name),
type_index, type_index,
); );
break :blk extern_fn.data.owner_decl; break :blk extern_func.decl;
} else if (func_val.castTag(.decl_ref)) |decl_ref| { } else switch (mod.intern_pool.indexToKey(func_val.ip_index)) {
_ = try func.bin_file.getOrCreateAtomForDecl(decl_ref.data); .ptr => |ptr| switch (ptr.addr) {
break :blk decl_ref.data; .decl => |decl| {
_ = try func.bin_file.getOrCreateAtomForDecl(decl);
break :blk decl;
},
else => {},
},
else => {},
} }
return func.fail("Expected a function, but instead found type '{}'", .{func_val.tag()}); return func.fail("Expected a function, but instead found type '{}'", .{func_val.tag()});
}; };
@ -2932,29 +2940,41 @@ fn wrapOperand(func: *CodeGen, operand: WValue, ty: Type) InnerError!WValue {
return WValue{ .stack = {} }; return WValue{ .stack = {} };
} }
fn lowerParentPtr(func: *CodeGen, ptr_val: Value, offset: u32) InnerError!WValue { fn lowerParentPtr(func: *CodeGen, ptr_val: Value) InnerError!WValue {
const mod = func.bin_file.base.options.module.?; const mod = func.bin_file.base.options.module.?;
switch (ptr_val.tag()) { const ptr = mod.intern_pool.indexToKey(ptr_val.ip_index).ptr;
.decl_ref_mut => { switch (ptr.addr) {
const decl_index = ptr_val.castTag(.decl_ref_mut).?.data.decl_index; .decl => |decl_index| {
return func.lowerParentPtrDecl(ptr_val, decl_index, offset); return func.lowerParentPtrDecl(ptr_val, decl_index, 0);
}, },
.decl_ref => { .mut_decl => |mut_decl| {
const decl_index = ptr_val.castTag(.decl_ref).?.data; const decl_index = mut_decl.decl;
return func.lowerParentPtrDecl(ptr_val, decl_index, offset); return func.lowerParentPtrDecl(ptr_val, decl_index, 0);
}, },
.variable => { .int, .eu_payload => |tag| return func.fail("TODO: Implement lowerParentPtr for {}", .{tag}),
const decl_index = ptr_val.castTag(.variable).?.data.owner_decl; .opt_payload => |base_ptr| {
return func.lowerParentPtrDecl(ptr_val, decl_index, offset); return func.lowerParentPtr(base_ptr.toValue());
}, },
.field_ptr => { .comptime_field => unreachable,
const field_ptr = ptr_val.castTag(.field_ptr).?.data; .elem => |elem| {
const parent_ty = field_ptr.container_ty; const index = elem.index;
const elem_type = mod.intern_pool.typeOf(elem.base).toType().elemType2(mod);
const offset = index * elem_type.abiSize(mod);
const array_ptr = try func.lowerParentPtr(elem.base.toValue());
const field_offset = switch (parent_ty.zigTypeTag(mod)) { return WValue{ .memory_offset = .{
.pointer = array_ptr.memory,
.offset = @intCast(u32, offset),
} };
},
.field => |field| {
const parent_ty = mod.intern_pool.typeOf(field.base).toType().childType(mod);
const parent_ptr = try func.lowerParentPtr(field.base.toValue());
const offset = switch (parent_ty.zigTypeTag(mod)) {
.Struct => switch (parent_ty.containerLayout(mod)) { .Struct => switch (parent_ty.containerLayout(mod)) {
.Packed => parent_ty.packedStructFieldByteOffset(field_ptr.field_index, mod), .Packed => parent_ty.packedStructFieldByteOffset(field.index, mod),
else => parent_ty.structFieldOffset(field_ptr.field_index, mod), else => parent_ty.structFieldOffset(field.index, mod),
}, },
.Union => switch (parent_ty.containerLayout(mod)) { .Union => switch (parent_ty.containerLayout(mod)) {
.Packed => 0, .Packed => 0,
@ -2964,12 +2984,12 @@ fn lowerParentPtr(func: *CodeGen, ptr_val: Value, offset: u32) InnerError!WValue
if (layout.payload_align > layout.tag_align) break :blk 0; if (layout.payload_align > layout.tag_align) break :blk 0;
// tag is stored first so calculate offset from where payload starts // tag is stored first so calculate offset from where payload starts
const field_offset = @intCast(u32, std.mem.alignForwardGeneric(u64, layout.tag_size, layout.tag_align)); const offset = @intCast(u32, std.mem.alignForwardGeneric(u64, layout.tag_size, layout.tag_align));
break :blk field_offset; break :blk offset;
}, },
}, },
.Pointer => switch (parent_ty.ptrSize(mod)) { .Pointer => switch (parent_ty.ptrSize(mod)) {
.Slice => switch (field_ptr.field_index) { .Slice => switch (field.index) {
0 => 0, 0 => 0,
1 => func.ptrSize(), 1 => func.ptrSize(),
else => unreachable, else => unreachable,
@ -2978,19 +2998,23 @@ fn lowerParentPtr(func: *CodeGen, ptr_val: Value, offset: u32) InnerError!WValue
}, },
else => unreachable, else => unreachable,
}; };
return func.lowerParentPtr(field_ptr.container_ptr, offset + @intCast(u32, field_offset));
return switch (parent_ptr) {
.memory => |ptr_| WValue{
.memory_offset = .{
.pointer = ptr_,
.offset = @intCast(u32, offset),
}, },
.elem_ptr => {
const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
const index = elem_ptr.index;
const elem_offset = index * elem_ptr.elem_ty.abiSize(mod);
return func.lowerParentPtr(elem_ptr.array_ptr, offset + @intCast(u32, elem_offset));
}, },
.opt_payload_ptr => { .memory_offset => |mem_off| WValue{
const payload_ptr = ptr_val.castTag(.opt_payload_ptr).?.data; .memory_offset = .{
return func.lowerParentPtr(payload_ptr.container_ptr, offset); .pointer = mem_off.pointer,
.offset = @intCast(u32, offset) + mem_off.offset,
},
},
else => unreachable,
};
}, },
else => |tag| return func.fail("TODO: Implement lowerParentPtr for tag: {}", .{tag}),
} }
} }
@ -3045,21 +3069,97 @@ fn toTwosComplement(value: anytype, bits: u7) std.meta.Int(.unsigned, @typeInfo(
fn lowerConstant(func: *CodeGen, arg_val: Value, ty: Type) InnerError!WValue { fn lowerConstant(func: *CodeGen, arg_val: Value, ty: Type) InnerError!WValue {
const mod = func.bin_file.base.options.module.?; const mod = func.bin_file.base.options.module.?;
var val = arg_val; var val = arg_val;
if (val.castTag(.runtime_value)) |rt| { switch (mod.intern_pool.indexToKey(val.ip_index)) {
val = rt.data; .runtime_value => |rt| val = rt.val.toValue(),
else => {},
} }
if (val.isUndefDeep(mod)) return func.emitUndefined(ty); if (val.isUndefDeep(mod)) return func.emitUndefined(ty);
if (val.castTag(.decl_ref)) |decl_ref| {
const decl_index = decl_ref.data; if (val.ip_index == .none) switch (ty.zigTypeTag(mod)) {
return func.lowerDeclRefValue(.{ .ty = ty, .val = val }, decl_index, 0); .Array => |zig_type| return func.fail("Wasm TODO: LowerConstant for zigTypeTag {}", .{zig_type}),
} .Struct => {
if (val.castTag(.decl_ref_mut)) |decl_ref_mut| { const struct_obj = mod.typeToStruct(ty).?;
const decl_index = decl_ref_mut.data.decl_index; assert(struct_obj.layout == .Packed);
return func.lowerDeclRefValue(.{ .ty = ty, .val = val }, decl_index, 0); var buf: [8]u8 = .{0} ** 8; // zero the buffer so we do not read 0xaa as integer
} val.writeToPackedMemory(ty, func.bin_file.base.options.module.?, &buf, 0) catch unreachable;
switch (ty.zigTypeTag(mod)) { const int_val = try mod.intValue(
.Void => return WValue{ .none = {} }, struct_obj.backing_int_ty,
.Int => { std.mem.readIntLittle(u64, &buf),
);
return func.lowerConstant(int_val, struct_obj.backing_int_ty);
},
.Vector => {
assert(determineSimdStoreStrategy(ty, mod) == .direct);
var buf: [16]u8 = undefined;
val.writeToMemory(ty, mod, &buf) catch unreachable;
return func.storeSimdImmd(buf);
},
.Frame,
.AnyFrame,
=> return func.fail("Wasm TODO: LowerConstant for type {}", .{ty.fmt(mod)}),
.Float,
.Union,
.Optional,
.ErrorUnion,
.ErrorSet,
.Int,
.Enum,
.Bool,
.Pointer,
=> unreachable, // handled below
.Type,
.Void,
.NoReturn,
.ComptimeFloat,
.ComptimeInt,
.Undefined,
.Null,
.Opaque,
.EnumLiteral,
.Fn,
=> unreachable, // comptime-only types
};
switch (mod.intern_pool.indexToKey(val.ip_index)) {
.int_type,
.ptr_type,
.array_type,
.vector_type,
.opt_type,
.anyframe_type,
.error_union_type,
.simple_type,
.struct_type,
.anon_struct_type,
.union_type,
.opaque_type,
.enum_type,
.func_type,
.error_set_type,
.inferred_error_set_type,
=> unreachable, // types, not values
.undef, .runtime_value => unreachable, // handled above
.simple_value => |simple_value| switch (simple_value) {
.undefined,
.void,
.null,
.empty_struct,
.@"unreachable",
.generic_poison,
=> unreachable, // non-runtime values
.false, .true => return WValue{ .imm32 = switch (simple_value) {
.false => 0,
.true => 1,
else => unreachable,
} },
},
.variable,
.extern_func,
.func,
.enum_literal,
=> unreachable, // non-runtime values
.int => {
const int_info = ty.intInfo(mod); const int_info = ty.intInfo(mod);
switch (int_info.signedness) { switch (int_info.signedness) {
.signed => switch (int_info.bits) { .signed => switch (int_info.bits) {
@ -3080,61 +3180,58 @@ fn lowerConstant(func: *CodeGen, arg_val: Value, ty: Type) InnerError!WValue {
}, },
} }
}, },
.Bool => return WValue{ .imm32 = @intCast(u32, val.toUnsignedInt(mod)) }, .err => |err| {
.Float => switch (ty.floatBits(func.target)) { const name = mod.intern_pool.stringToSlice(err.name);
16 => return WValue{ .imm32 = @bitCast(u16, val.toFloat(f16, mod)) }, const kv = try mod.getErrorValue(name);
32 => return WValue{ .float32 = val.toFloat(f32, mod) },
64 => return WValue{ .float64 = val.toFloat(f64, mod) },
else => unreachable,
},
.Pointer => return switch (val.ip_index) {
.null_value => WValue{ .imm32 = 0 },
.none => switch (val.tag()) {
.field_ptr, .elem_ptr, .opt_payload_ptr => func.lowerParentPtr(val, 0),
else => return func.fail("Wasm TODO: lowerConstant for other const pointer tag {}", .{val.tag()}),
},
else => switch (mod.intern_pool.indexToKey(val.ip_index)) {
.int => |int| WValue{ .imm32 = @intCast(u32, int.storage.u64) },
else => unreachable,
},
},
.Enum => {
const enum_tag = mod.intern_pool.indexToKey(val.ip_index).enum_tag;
const int_tag_ty = mod.intern_pool.typeOf(enum_tag.int);
return func.lowerConstant(enum_tag.int.toValue(), int_tag_ty.toType());
},
.ErrorSet => switch (val.tag()) {
.@"error" => {
const kv = try func.bin_file.base.options.module.?.getErrorValue(val.getError().?);
return WValue{ .imm32 = kv.value }; return WValue{ .imm32 = kv.value };
}, },
else => return WValue{ .imm32 = 0 }, .error_union => {
},
.ErrorUnion => {
const error_type = ty.errorUnionSet(mod); const error_type = ty.errorUnionSet(mod);
const payload_type = ty.errorUnionPayload(mod); const payload_type = ty.errorUnionPayload(mod);
if (!payload_type.hasRuntimeBitsIgnoreComptime(mod)) { if (!payload_type.hasRuntimeBitsIgnoreComptime(mod)) {
// We use the error type directly as the type. // We use the error type directly as the type.
const is_pl = val.errorUnionIsPayload(); const is_pl = val.errorUnionIsPayload(mod);
const err_val = if (!is_pl) val else try mod.intValue(error_type, 0); const err_val = if (!is_pl) val else try mod.intValue(error_type, 0);
return func.lowerConstant(err_val, error_type); return func.lowerConstant(err_val, error_type);
} }
return func.fail("Wasm TODO: lowerConstant error union with non-zero-bit payload type", .{}); return func.fail("Wasm TODO: lowerConstant error union with non-zero-bit payload type", .{});
}, },
.Optional => if (ty.optionalReprIsPayload(mod)) { .enum_tag => |enum_tag| {
const int_tag_ty = mod.intern_pool.typeOf(enum_tag.int);
return func.lowerConstant(enum_tag.int.toValue(), int_tag_ty.toType());
},
.float => |float| switch (float.storage) {
.f16 => |f16_val| return WValue{ .imm32 = @bitCast(u16, f16_val) },
.f32 => |f32_val| return WValue{ .float32 = f32_val },
.f64 => |f64_val| return WValue{ .float64 = f64_val },
else => unreachable,
},
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| return func.lowerDeclRefValue(.{ .ty = ty, .val = val }, decl, 0),
.mut_decl => |mut_decl| return func.lowerDeclRefValue(.{ .ty = ty, .val = val }, mut_decl.decl, 0),
.int => |int| return func.lowerConstant(int.toValue(), mod.intern_pool.typeOf(int).toType()),
.opt_payload, .elem, .field => return func.lowerParentPtr(val),
else => return func.fail("Wasm TODO: lowerConstant for other const addr tag {}", .{ptr.addr}),
},
.opt => if (ty.optionalReprIsPayload(mod)) {
const pl_ty = ty.optionalChild(mod); const pl_ty = ty.optionalChild(mod);
if (val.castTag(.opt_payload)) |payload| { if (val.optionalValue(mod)) |payload| {
return func.lowerConstant(payload.data, pl_ty); return func.lowerConstant(payload, pl_ty);
} else if (val.isNull(mod)) {
return WValue{ .imm32 = 0 };
} else { } else {
return func.lowerConstant(val, pl_ty); return WValue{ .imm32 = 0 };
} }
} else { } else {
const is_pl = val.tag() == .opt_payload; return WValue{ .imm32 = @boolToInt(!val.isNull(mod)) };
return WValue{ .imm32 = @boolToInt(is_pl) };
}, },
.Struct => { .aggregate => switch (mod.intern_pool.indexToKey(ty.ip_index)) {
.array_type => return func.fail("Wasm TODO: LowerConstant for {}", .{ty.fmt(mod)}),
.vector_type => {
assert(determineSimdStoreStrategy(ty, mod) == .direct);
var buf: [16]u8 = undefined;
val.writeToMemory(ty, mod, &buf) catch unreachable;
return func.storeSimdImmd(buf);
},
.struct_type, .anon_struct_type => {
const struct_obj = mod.typeToStruct(ty).?; const struct_obj = mod.typeToStruct(ty).?;
assert(struct_obj.layout == .Packed); assert(struct_obj.layout == .Packed);
var buf: [8]u8 = .{0} ** 8; // zero the buffer so we do not read 0xaa as integer var buf: [8]u8 = .{0} ** 8; // zero the buffer so we do not read 0xaa as integer
@ -3145,21 +3242,9 @@ fn lowerConstant(func: *CodeGen, arg_val: Value, ty: Type) InnerError!WValue {
); );
return func.lowerConstant(int_val, struct_obj.backing_int_ty); return func.lowerConstant(int_val, struct_obj.backing_int_ty);
}, },
.Vector => { else => unreachable,
assert(determineSimdStoreStrategy(ty, mod) == .direct);
var buf: [16]u8 = undefined;
val.writeToMemory(ty, func.bin_file.base.options.module.?, &buf) catch unreachable;
return func.storeSimdImmd(buf);
}, },
.Union => { .un => return func.fail("Wasm TODO: LowerConstant for {}", .{ty.fmt(mod)}),
// in this case we have a packed union which will not be passed by reference.
const union_ty = mod.typeToUnion(ty).?;
const union_obj = val.castTag(.@"union").?.data;
const field_index = ty.unionTagFieldIndex(union_obj.tag, func.bin_file.base.options.module.?).?;
const field_ty = union_ty.fields.values()[field_index].ty;
return func.lowerConstant(union_obj.val, field_ty);
},
else => |zig_type| return func.fail("Wasm TODO: LowerConstant for zigTypeTag {}", .{zig_type}),
} }
} }
@ -3221,31 +3306,33 @@ fn valueAsI32(func: *const CodeGen, val: Value, ty: Type) i32 {
.bool_true => return 1, .bool_true => return 1,
.bool_false => return 0, .bool_false => return 0,
else => return switch (mod.intern_pool.indexToKey(val.ip_index)) { else => return switch (mod.intern_pool.indexToKey(val.ip_index)) {
.enum_tag => |enum_tag| intIndexAsI32(&mod.intern_pool, enum_tag.int), .enum_tag => |enum_tag| intIndexAsI32(&mod.intern_pool, enum_tag.int, mod),
.int => |int| intStorageAsI32(int.storage), .int => |int| intStorageAsI32(int.storage, mod),
.ptr => |ptr| intIndexAsI32(&mod.intern_pool, ptr.addr.int), .ptr => |ptr| intIndexAsI32(&mod.intern_pool, ptr.addr.int, mod),
else => unreachable, else => unreachable,
}, },
} }
switch (ty.zigTypeTag(mod)) { switch (ty.zigTypeTag(mod)) {
.ErrorSet => { .ErrorSet => {
const kv = func.bin_file.base.options.module.?.getErrorValue(val.getError().?) catch unreachable; // passed invalid `Value` to function const kv = func.bin_file.base.options.module.?.getErrorValue(val.getError(mod).?) catch unreachable; // passed invalid `Value` to function
return @bitCast(i32, kv.value); return @bitCast(i32, kv.value);
}, },
else => unreachable, // Programmer called this function for an illegal type else => unreachable, // Programmer called this function for an illegal type
} }
} }
fn intIndexAsI32(ip: *const InternPool, int: InternPool.Index) i32 { fn intIndexAsI32(ip: *const InternPool, int: InternPool.Index, mod: *Module) i32 {
return intStorageAsI32(ip.indexToKey(int).int.storage); return intStorageAsI32(ip.indexToKey(int).int.storage, mod);
} }
fn intStorageAsI32(storage: InternPool.Key.Int.Storage) i32 { fn intStorageAsI32(storage: InternPool.Key.Int.Storage, mod: *Module) i32 {
return switch (storage) { return switch (storage) {
.i64 => |x| @intCast(i32, x), .i64 => |x| @intCast(i32, x),
.u64 => |x| @bitCast(i32, @intCast(u32, x)), .u64 => |x| @bitCast(i32, @intCast(u32, x)),
.big_int => unreachable, .big_int => unreachable,
.lazy_align => |ty| @bitCast(i32, ty.toType().abiAlignment(mod)),
.lazy_size => |ty| @bitCast(i32, @intCast(u32, ty.toType().abiSize(mod))),
}; };
} }
@ -5514,7 +5601,7 @@ fn airErrorName(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
// As the names are global and the slice elements are constant, we do not have // As the names are global and the slice elements are constant, we do not have
// to make a copy of the ptr+value but can point towards them directly. // to make a copy of the ptr+value but can point towards them directly.
const error_table_symbol = try func.bin_file.getErrorTableSymbol(); const error_table_symbol = try func.bin_file.getErrorTableSymbol();
const name_ty = Type.const_slice_u8_sentinel_0; const name_ty = Type.slice_const_u8_sentinel_0;
const mod = func.bin_file.base.options.module.?; const mod = func.bin_file.base.options.module.?;
const abi_size = name_ty.abiSize(mod); const abi_size = name_ty.abiSize(mod);
@ -6935,7 +7022,7 @@ fn getTagNameFunction(func: *CodeGen, enum_ty: Type) InnerError!u32 {
// finish function body // finish function body
try writer.writeByte(std.wasm.opcode(.end)); try writer.writeByte(std.wasm.opcode(.end));
const slice_ty = Type.const_slice_u8_sentinel_0; const slice_ty = Type.slice_const_u8_sentinel_0;
const func_type = try genFunctype(arena, .Unspecified, &.{int_tag_ty.ip_index}, slice_ty, mod); const func_type = try genFunctype(arena, .Unspecified, &.{int_tag_ty.ip_index}, slice_ty, mod);
return func.bin_file.createFunction(func_name, func_type, &body_list, &relocs); return func.bin_file.createFunction(func_name, func_type, &body_list, &relocs);
} }

View file

@ -632,7 +632,7 @@ const Self = @This();
pub fn generate( pub fn generate(
bin_file: *link.File, bin_file: *link.File,
src_loc: Module.SrcLoc, src_loc: Module.SrcLoc,
module_fn: *Module.Fn, module_fn_index: Module.Fn.Index,
air: Air, air: Air,
liveness: Liveness, liveness: Liveness,
code: *std.ArrayList(u8), code: *std.ArrayList(u8),
@ -643,6 +643,7 @@ pub fn generate(
} }
const mod = bin_file.options.module.?; const mod = bin_file.options.module.?;
const module_fn = mod.funcPtr(module_fn_index);
const fn_owner_decl = mod.declPtr(module_fn.owner_decl); const fn_owner_decl = mod.declPtr(module_fn.owner_decl);
assert(fn_owner_decl.has_tv); assert(fn_owner_decl.has_tv);
const fn_type = fn_owner_decl.ty; const fn_type = fn_owner_decl.ty;
@ -687,7 +688,7 @@ pub fn generate(
@enumToInt(FrameIndex.stack_frame), @enumToInt(FrameIndex.stack_frame),
FrameAlloc.init(.{ FrameAlloc.init(.{
.size = 0, .size = 0,
.alignment = if (mod.align_stack_fns.get(module_fn)) |set_align_stack| .alignment = if (mod.align_stack_fns.get(module_fn_index)) |set_align_stack|
set_align_stack.alignment set_align_stack.alignment
else else
1, 1,
@ -2760,19 +2761,18 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
const elem_ty = src_ty.childType(mod); const elem_ty = src_ty.childType(mod);
const mask_val = try mod.intValue(elem_ty, @as(u64, math.maxInt(u64)) >> @intCast(u6, 64 - dst_info.bits)); const mask_val = try mod.intValue(elem_ty, @as(u64, math.maxInt(u64)) >> @intCast(u6, 64 - dst_info.bits));
var splat_pl = Value.Payload.SubValue{ const splat_ty = try mod.vectorType(.{
.base = .{ .tag = .repeated },
.data = mask_val,
};
const splat_val = Value.initPayload(&splat_pl.base);
const full_ty = try mod.vectorType(.{
.len = @intCast(u32, @divExact(@as(u64, if (src_abi_size > 16) 256 else 128), src_info.bits)), .len = @intCast(u32, @divExact(@as(u64, if (src_abi_size > 16) 256 else 128), src_info.bits)),
.child = elem_ty.ip_index, .child = elem_ty.ip_index,
}); });
const full_abi_size = @intCast(u32, full_ty.abiSize(mod)); const splat_abi_size = @intCast(u32, splat_ty.abiSize(mod));
const splat_mcv = try self.genTypedValue(.{ .ty = full_ty, .val = splat_val }); const splat_val = try mod.intern(.{ .aggregate = .{
.ty = splat_ty.ip_index,
.storage = .{ .repeated_elem = mask_val.ip_index },
} });
const splat_mcv = try self.genTypedValue(.{ .ty = splat_ty, .val = splat_val.toValue() });
const splat_addr_mcv: MCValue = switch (splat_mcv) { const splat_addr_mcv: MCValue = switch (splat_mcv) {
.memory, .indirect, .load_frame => splat_mcv.address(), .memory, .indirect, .load_frame => splat_mcv.address(),
else => .{ .register = try self.copyToTmpRegister(Type.usize, splat_mcv.address()) }, else => .{ .register = try self.copyToTmpRegister(Type.usize, splat_mcv.address()) },
@ -2784,14 +2784,14 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
.{ .vp_, .@"and" }, .{ .vp_, .@"and" },
dst_reg, dst_reg,
dst_reg, dst_reg,
splat_addr_mcv.deref().mem(Memory.PtrSize.fromSize(full_abi_size)), splat_addr_mcv.deref().mem(Memory.PtrSize.fromSize(splat_abi_size)),
); );
try self.asmRegisterRegisterRegister(mir_tag, dst_reg, dst_reg, dst_reg); try self.asmRegisterRegisterRegister(mir_tag, dst_reg, dst_reg, dst_reg);
} else { } else {
try self.asmRegisterMemory( try self.asmRegisterMemory(
.{ .p_, .@"and" }, .{ .p_, .@"and" },
dst_reg, dst_reg,
splat_addr_mcv.deref().mem(Memory.PtrSize.fromSize(full_abi_size)), splat_addr_mcv.deref().mem(Memory.PtrSize.fromSize(splat_abi_size)),
); );
try self.asmRegisterRegister(mir_tag, dst_reg, dst_reg); try self.asmRegisterRegister(mir_tag, dst_reg, dst_reg);
} }
@ -4893,23 +4893,14 @@ fn airFloatSign(self: *Self, inst: Air.Inst.Index) !void {
const dst_lock = self.register_manager.lockReg(dst_reg); const dst_lock = self.register_manager.lockReg(dst_reg);
defer if (dst_lock) |lock| self.register_manager.unlockReg(lock); defer if (dst_lock) |lock| self.register_manager.unlockReg(lock);
var arena = std.heap.ArenaAllocator.init(self.gpa);
defer arena.deinit();
const ExpectedContents = struct {
repeated: Value.Payload.SubValue,
};
var stack align(@alignOf(ExpectedContents)) =
std.heap.stackFallback(@sizeOf(ExpectedContents), arena.allocator());
const vec_ty = try mod.vectorType(.{ const vec_ty = try mod.vectorType(.{
.len = @divExact(abi_size * 8, scalar_bits), .len = @divExact(abi_size * 8, scalar_bits),
.child = (try mod.intType(.signed, scalar_bits)).ip_index, .child = (try mod.intType(.signed, scalar_bits)).ip_index,
}); });
const sign_val = switch (tag) { const sign_val = switch (tag) {
.neg => try vec_ty.minInt(stack.get(), mod), .neg => try vec_ty.minInt(mod),
.fabs => try vec_ty.maxInt(stack.get(), mod, vec_ty), .fabs => try vec_ty.maxInt(mod, vec_ty),
else => unreachable, else => unreachable,
}; };
@ -8106,13 +8097,15 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
// Due to incremental compilation, how function calls are generated depends // Due to incremental compilation, how function calls are generated depends
// on linking. // on linking.
if (try self.air.value(callee, mod)) |func_value| { if (try self.air.value(callee, mod)) |func_value| {
if (if (func_value.castTag(.function)) |func_payload| const func_key = mod.intern_pool.indexToKey(func_value.ip_index);
func_payload.data.owner_decl if (switch (func_key) {
else if (func_value.castTag(.decl_ref)) |decl_ref_payload| .func => |func| mod.funcPtr(func.index).owner_decl,
decl_ref_payload.data .ptr => |ptr| switch (ptr.addr) {
else .decl => |decl| decl,
null) |owner_decl| else => null,
{ },
else => null,
}) |owner_decl| {
if (self.bin_file.cast(link.File.Elf)) |elf_file| { if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const atom_index = try elf_file.getOrCreateAtomForDecl(owner_decl); const atom_index = try elf_file.getOrCreateAtomForDecl(owner_decl);
const atom = elf_file.getAtom(atom_index); const atom = elf_file.getAtom(atom_index);
@ -8145,10 +8138,9 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
.disp = @intCast(i32, fn_got_addr), .disp = @intCast(i32, fn_got_addr),
})); }));
} else unreachable; } else unreachable;
} else if (func_value.castTag(.extern_fn)) |func_payload| { } else if (func_value.getExternFunc(mod)) |extern_func| {
const extern_fn = func_payload.data; const decl_name = mem.sliceTo(mod.declPtr(extern_func.decl).name, 0);
const decl_name = mem.sliceTo(mod.declPtr(extern_fn.owner_decl).name, 0); const lib_name = mod.intern_pool.stringToSliceUnwrap(extern_func.lib_name);
const lib_name = mem.sliceTo(extern_fn.lib_name, 0);
if (self.bin_file.cast(link.File.Coff)) |coff_file| { if (self.bin_file.cast(link.File.Coff)) |coff_file| {
const atom_index = try self.owner.getSymbolIndex(self); const atom_index = try self.owner.getSymbolIndex(self);
const sym_index = try coff_file.getGlobalSymbol(decl_name, lib_name); const sym_index = try coff_file.getGlobalSymbol(decl_name, lib_name);
@ -8554,7 +8546,8 @@ fn airDbgStmt(self: *Self, inst: Air.Inst.Index) !void {
fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void { fn airDbgInline(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const function = self.air.values[ty_pl.payload].castTag(.function).?.data; const mod = self.bin_file.options.module.?;
const function = self.air.values[ty_pl.payload].getFunction(mod).?;
// TODO emit debug info for function change // TODO emit debug info for function change
_ = function; _ = function;
return self.finishAir(inst, .unreach, .{ .none, .none, .none }); return self.finishAir(inst, .unreach, .{ .none, .none, .none });

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -236,9 +236,9 @@ pub const DeclGen = struct {
if (try self.air.value(inst, mod)) |val| { if (try self.air.value(inst, mod)) |val| {
const ty = self.typeOf(inst); const ty = self.typeOf(inst);
if (ty.zigTypeTag(mod) == .Fn) { if (ty.zigTypeTag(mod) == .Fn) {
const fn_decl_index = switch (val.tag()) { const fn_decl_index = switch (mod.intern_pool.indexToKey(val.ip_index)) {
.extern_fn => val.castTag(.extern_fn).?.data.owner_decl, .extern_func => |extern_func| extern_func.decl,
.function => val.castTag(.function).?.data.owner_decl, .func => |func| mod.funcPtr(func.index).owner_decl,
else => unreachable, else => unreachable,
}; };
const spv_decl_index = try self.resolveDecl(fn_decl_index); const spv_decl_index = try self.resolveDecl(fn_decl_index);
@ -261,7 +261,7 @@ pub const DeclGen = struct {
const entry = try self.decl_link.getOrPut(decl_index); const entry = try self.decl_link.getOrPut(decl_index);
if (!entry.found_existing) { if (!entry.found_existing) {
// TODO: Extern fn? // TODO: Extern fn?
const kind: SpvModule.DeclKind = if (decl.val.tag() == .function) const kind: SpvModule.DeclKind = if (decl.getFunctionIndex(self.module) != .none)
.func .func
else else
.global; .global;
@ -573,6 +573,7 @@ pub const DeclGen = struct {
fn addDeclRef(self: *@This(), ty: Type, decl_index: Decl.Index) !void { fn addDeclRef(self: *@This(), ty: Type, decl_index: Decl.Index) !void {
const dg = self.dg; const dg = self.dg;
const mod = dg.module;
const ty_ref = try self.dg.resolveType(ty, .indirect); const ty_ref = try self.dg.resolveType(ty, .indirect);
const ty_id = dg.typeId(ty_ref); const ty_id = dg.typeId(ty_ref);
@ -580,8 +581,8 @@ pub const DeclGen = struct {
const decl = dg.module.declPtr(decl_index); const decl = dg.module.declPtr(decl_index);
const spv_decl_index = try dg.resolveDecl(decl_index); const spv_decl_index = try dg.resolveDecl(decl_index);
switch (decl.val.tag()) { switch (mod.intern_pool.indexToKey(decl.val.ip_index)) {
.function => { .func => {
// TODO: Properly lower function pointers. For now we are going to hack around it and // TODO: Properly lower function pointers. For now we are going to hack around it and
// just generate an empty pointer. Function pointers are represented by usize for now, // just generate an empty pointer. Function pointers are represented by usize for now,
// though. // though.
@ -589,7 +590,7 @@ pub const DeclGen = struct {
// TODO: Add dependency // TODO: Add dependency
return; return;
}, },
.extern_fn => unreachable, // TODO .extern_func => unreachable, // TODO
else => { else => {
const result_id = dg.spv.allocId(); const result_id = dg.spv.allocId();
log.debug("addDeclRef: id = {}, index = {}, name = {s}", .{ result_id.id, @enumToInt(spv_decl_index), decl.name }); log.debug("addDeclRef: id = {}, index = {}, name = {s}", .{ result_id.id, @enumToInt(spv_decl_index), decl.name });
@ -610,39 +611,23 @@ pub const DeclGen = struct {
} }
} }
fn lower(self: *@This(), ty: Type, val: Value) !void { fn lower(self: *@This(), ty: Type, arg_val: Value) !void {
const dg = self.dg; const dg = self.dg;
const mod = dg.module; const mod = dg.module;
if (val.isUndef(mod)) { var val = arg_val;
switch (mod.intern_pool.indexToKey(val.ip_index)) {
.runtime_value => |rt| val = rt.val.toValue(),
else => {},
}
if (val.isUndefDeep(mod)) {
const size = ty.abiSize(mod); const size = ty.abiSize(mod);
return try self.addUndef(size); return try self.addUndef(size);
} }
switch (ty.zigTypeTag(mod)) { if (val.ip_index == .none) switch (ty.zigTypeTag(mod)) {
.Int => try self.addInt(ty, val),
.Float => try self.addFloat(ty, val),
.Bool => try self.addConstBool(val.toBool(mod)),
.Array => switch (val.tag()) { .Array => switch (val.tag()) {
.aggregate => {
const elem_vals = val.castTag(.aggregate).?.data;
const elem_ty = ty.childType(mod);
const len = @intCast(u32, ty.arrayLenIncludingSentinel(mod)); // TODO: limit spir-v to 32 bit arrays in a more elegant way.
for (elem_vals[0..len]) |elem_val| {
try self.lower(elem_ty, elem_val);
}
},
.repeated => {
const elem_val = val.castTag(.repeated).?.data;
const elem_ty = ty.childType(mod);
const len = @intCast(u32, ty.arrayLen(mod));
for (0..len) |_| {
try self.lower(elem_ty, elem_val);
}
if (ty.sentinel(mod)) |sentinel| {
try self.lower(elem_ty, sentinel);
}
},
.str_lit => { .str_lit => {
const str_lit = val.castTag(.str_lit).?.data; const str_lit = val.castTag(.str_lit).?.data;
const bytes = dg.module.string_literal_bytes.items[str_lit.index..][0..str_lit.len]; const bytes = dg.module.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
@ -657,29 +642,6 @@ pub const DeclGen = struct {
}, },
else => |tag| return dg.todo("indirect array constant with tag {s}", .{@tagName(tag)}), else => |tag| return dg.todo("indirect array constant with tag {s}", .{@tagName(tag)}),
}, },
.Pointer => switch (val.tag()) {
.decl_ref_mut => {
const decl_index = val.castTag(.decl_ref_mut).?.data.decl_index;
try self.addDeclRef(ty, decl_index);
},
.decl_ref => {
const decl_index = val.castTag(.decl_ref).?.data;
try self.addDeclRef(ty, decl_index);
},
.slice => {
const slice = val.castTag(.slice).?.data;
const ptr_ty = ty.slicePtrFieldType(mod);
try self.lower(ptr_ty, slice.ptr);
try self.addInt(Type.usize, slice.len);
},
.zero => try self.addNullPtr(try dg.resolveType(ty, .indirect)),
.int_u64, .one, .int_big_positive, .lazy_align, .lazy_size => {
try self.addInt(Type.usize, val);
},
else => |tag| return dg.todo("pointer value of type {s}", .{@tagName(tag)}),
},
.Struct => { .Struct => {
if (ty.isSimpleTupleOrAnonStruct(mod)) { if (ty.isSimpleTupleOrAnonStruct(mod)) {
unreachable; // TODO unreachable; // TODO
@ -705,20 +667,134 @@ pub const DeclGen = struct {
} }
} }
}, },
.Optional => { .Vector,
.Frame,
.AnyFrame,
=> return dg.todo("indirect constant of type {}", .{ty.fmt(mod)}),
.Float,
.Union,
.Optional,
.ErrorUnion,
.ErrorSet,
.Int,
.Enum,
.Bool,
.Pointer,
=> unreachable, // handled below
.Type,
.Void,
.NoReturn,
.ComptimeFloat,
.ComptimeInt,
.Undefined,
.Null,
.Opaque,
.EnumLiteral,
.Fn,
=> unreachable, // comptime-only types
};
switch (mod.intern_pool.indexToKey(val.ip_index)) {
.int_type,
.ptr_type,
.array_type,
.vector_type,
.opt_type,
.anyframe_type,
.error_union_type,
.simple_type,
.struct_type,
.anon_struct_type,
.union_type,
.opaque_type,
.enum_type,
.func_type,
.error_set_type,
.inferred_error_set_type,
=> unreachable, // types, not values
.undef, .runtime_value => unreachable, // handled above
.simple_value => |simple_value| switch (simple_value) {
.undefined,
.void,
.null,
.empty_struct,
.@"unreachable",
.generic_poison,
=> unreachable, // non-runtime values
.false, .true => try self.addConstBool(val.toBool(mod)),
},
.variable,
.extern_func,
.func,
.enum_literal,
=> unreachable, // non-runtime values
.int => try self.addInt(ty, val),
.err => |err| {
const name = mod.intern_pool.stringToSlice(err.name);
const kv = try mod.getErrorValue(name);
try self.addConstInt(u16, @intCast(u16, kv.value));
},
.error_union => |error_union| {
const payload_ty = ty.errorUnionPayload(mod);
const is_pl = val.errorUnionIsPayload(mod);
const error_val = if (!is_pl) val else try mod.intValue(Type.anyerror, 0);
const eu_layout = dg.errorUnionLayout(payload_ty);
if (!eu_layout.payload_has_bits) {
return try self.lower(Type.anyerror, error_val);
}
const payload_size = payload_ty.abiSize(mod);
const error_size = Type.anyerror.abiAlignment(mod);
const ty_size = ty.abiSize(mod);
const padding = ty_size - payload_size - error_size;
const payload_val = switch (error_union.val) {
.err_name => try mod.intern(.{ .undef = payload_ty.ip_index }),
.payload => |payload| payload,
}.toValue();
if (eu_layout.error_first) {
try self.lower(Type.anyerror, error_val);
try self.lower(payload_ty, payload_val);
} else {
try self.lower(payload_ty, payload_val);
try self.lower(Type.anyerror, error_val);
}
try self.addUndef(padding);
},
.enum_tag => {
const int_val = try val.enumToInt(ty, mod);
const int_ty = try ty.intTagType(mod);
try self.lower(int_ty, int_val);
},
.float => try self.addFloat(ty, val),
.ptr => |ptr| {
switch (ptr.addr) {
.decl => |decl| try self.addDeclRef(ty, decl),
.mut_decl => |mut_decl| try self.addDeclRef(ty, mut_decl.decl),
else => |tag| return dg.todo("pointer value of type {s}", .{@tagName(tag)}),
}
if (ptr.len != .none) {
try self.addInt(Type.usize, ptr.len.toValue());
}
},
.opt => {
const payload_ty = ty.optionalChild(mod); const payload_ty = ty.optionalChild(mod);
const has_payload = !val.isNull(mod); const payload_val = val.optionalValue(mod);
const abi_size = ty.abiSize(mod); const abi_size = ty.abiSize(mod);
if (!payload_ty.hasRuntimeBits(mod)) { if (!payload_ty.hasRuntimeBits(mod)) {
try self.addConstBool(has_payload); try self.addConstBool(payload_val != null);
return; return;
} else if (ty.optionalReprIsPayload(mod)) { } else if (ty.optionalReprIsPayload(mod)) {
// Optional representation is a nullable pointer or slice. // Optional representation is a nullable pointer or slice.
if (val.castTag(.opt_payload)) |payload| { if (payload_val) |pl_val| {
try self.lower(payload_ty, payload.data); try self.lower(payload_ty, pl_val);
} else if (has_payload) {
try self.lower(payload_ty, val);
} else { } else {
const ptr_ty_ref = try dg.resolveType(ty, .indirect); const ptr_ty_ref = try dg.resolveType(ty, .indirect);
try self.addNullPtr(ptr_ty_ref); try self.addNullPtr(ptr_ty_ref);
@ -734,27 +810,63 @@ pub const DeclGen = struct {
const payload_size = payload_ty.abiSize(mod); const payload_size = payload_ty.abiSize(mod);
const padding = abi_size - payload_size - 1; const padding = abi_size - payload_size - 1;
if (val.castTag(.opt_payload)) |payload| { if (payload_val) |pl_val| {
try self.lower(payload_ty, payload.data); try self.lower(payload_ty, pl_val);
} else { } else {
try self.addUndef(payload_size); try self.addUndef(payload_size);
} }
try self.addConstBool(has_payload); try self.addConstBool(payload_val != null);
try self.addUndef(padding); try self.addUndef(padding);
}, },
.Enum => { .aggregate => |aggregate| switch (mod.intern_pool.indexToKey(ty.ip_index)) {
const int_val = try val.enumToInt(ty, mod); .array_type => |array_type| {
const elem_ty = array_type.child.toType();
const int_ty = try ty.intTagType(mod); switch (aggregate.storage) {
.bytes => |bytes| try self.addBytes(bytes),
try self.lower(int_ty, int_val); .elems, .repeated_elem => {
for (0..array_type.len) |i| {
try self.lower(elem_ty, switch (aggregate.storage) {
.bytes => unreachable,
.elems => |elem_vals| elem_vals[@intCast(usize, i)].toValue(),
.repeated_elem => |elem_val| elem_val.toValue(),
});
}
}, },
.Union => { }
const tag_and_val = val.castTag(.@"union").?.data; if (array_type.sentinel != .none) {
try self.lower(elem_ty, array_type.sentinel.toValue());
}
},
.vector_type => return dg.todo("indirect constant of type {}", .{ty.fmt(mod)}),
.struct_type => {
const struct_ty = mod.typeToStruct(ty).?;
if (struct_ty.layout == .Packed) {
return dg.todo("packed struct constants", .{});
}
const struct_begin = self.size;
const field_vals = val.castTag(.aggregate).?.data;
for (struct_ty.fields.values(), 0..) |field, i| {
if (field.is_comptime or !field.ty.hasRuntimeBits(mod)) continue;
try self.lower(field.ty, field_vals[i]);
// Add padding if required.
// TODO: Add to type generation as well?
const unpadded_field_end = self.size - struct_begin;
const padded_field_end = ty.structFieldOffset(i + 1, mod);
const padding = padded_field_end - unpadded_field_end;
try self.addUndef(padding);
}
},
.anon_struct_type => unreachable, // TODO
else => unreachable,
},
.un => |un| {
const layout = ty.unionGetLayout(mod); const layout = ty.unionGetLayout(mod);
if (layout.payload_size == 0) { if (layout.payload_size == 0) {
return try self.lower(ty.unionTagTypeSafety(mod).?, tag_and_val.tag); return try self.lower(ty.unionTagTypeSafety(mod).?, un.tag.toValue());
} }
const union_ty = mod.typeToUnion(ty).?; const union_ty = mod.typeToUnion(ty).?;
@ -762,18 +874,18 @@ pub const DeclGen = struct {
return dg.todo("packed union constants", .{}); return dg.todo("packed union constants", .{});
} }
const active_field = ty.unionTagFieldIndex(tag_and_val.tag, dg.module).?; const active_field = ty.unionTagFieldIndex(un.tag.toValue(), dg.module).?;
const active_field_ty = union_ty.fields.values()[active_field].ty; const active_field_ty = union_ty.fields.values()[active_field].ty;
const has_tag = layout.tag_size != 0; const has_tag = layout.tag_size != 0;
const tag_first = layout.tag_align >= layout.payload_align; const tag_first = layout.tag_align >= layout.payload_align;
if (has_tag and tag_first) { if (has_tag and tag_first) {
try self.lower(ty.unionTagTypeSafety(mod).?, tag_and_val.tag); try self.lower(ty.unionTagTypeSafety(mod).?, un.tag.toValue());
} }
const active_field_size = if (active_field_ty.hasRuntimeBitsIgnoreComptime(mod)) blk: { const active_field_size = if (active_field_ty.hasRuntimeBitsIgnoreComptime(mod)) blk: {
try self.lower(active_field_ty, tag_and_val.val); try self.lower(active_field_ty, un.val.toValue());
break :blk active_field_ty.abiSize(mod); break :blk active_field_ty.abiSize(mod);
} else 0; } else 0;
@ -781,53 +893,11 @@ pub const DeclGen = struct {
try self.addUndef(payload_padding_len); try self.addUndef(payload_padding_len);
if (has_tag and !tag_first) { if (has_tag and !tag_first) {
try self.lower(ty.unionTagTypeSafety(mod).?, tag_and_val.tag); try self.lower(ty.unionTagTypeSafety(mod).?, un.tag.toValue());
} }
try self.addUndef(layout.padding); try self.addUndef(layout.padding);
}, },
.ErrorSet => switch (val.ip_index) {
.none => switch (val.tag()) {
.@"error" => {
const err_name = val.castTag(.@"error").?.data.name;
const kv = try dg.module.getErrorValue(err_name);
try self.addConstInt(u16, @intCast(u16, kv.value));
},
else => unreachable,
},
else => switch (mod.intern_pool.indexToKey(val.ip_index)) {
.int => |int| try self.addConstInt(u16, @intCast(u16, int.storage.u64)),
else => unreachable,
},
},
.ErrorUnion => {
const payload_ty = ty.errorUnionPayload(mod);
const is_pl = val.errorUnionIsPayload();
const error_val = if (!is_pl) val else try mod.intValue(Type.anyerror, 0);
const eu_layout = dg.errorUnionLayout(payload_ty);
if (!eu_layout.payload_has_bits) {
return try self.lower(Type.anyerror, error_val);
}
const payload_size = payload_ty.abiSize(mod);
const error_size = Type.anyerror.abiAlignment(mod);
const ty_size = ty.abiSize(mod);
const padding = ty_size - payload_size - error_size;
const payload_val = if (val.castTag(.eu_payload)) |pl| pl.data else Value.undef;
if (eu_layout.error_first) {
try self.lower(Type.anyerror, error_val);
try self.lower(payload_ty, payload_val);
} else {
try self.lower(payload_ty, payload_val);
try self.lower(Type.anyerror, error_val);
}
try self.addUndef(padding);
},
else => |tag| return dg.todo("indirect constant of type {s}", .{@tagName(tag)}),
} }
} }
}; };
@ -1542,7 +1612,7 @@ pub const DeclGen = struct {
const decl_id = self.spv.declPtr(spv_decl_index).result_id; const decl_id = self.spv.declPtr(spv_decl_index).result_id;
log.debug("genDecl: id = {}, index = {}, name = {s}", .{ decl_id.id, @enumToInt(spv_decl_index), decl.name }); log.debug("genDecl: id = {}, index = {}, name = {s}", .{ decl_id.id, @enumToInt(spv_decl_index), decl.name });
if (decl.val.castTag(.function)) |_| { if (decl.getFunction(mod)) |_| {
assert(decl.ty.zigTypeTag(mod) == .Fn); assert(decl.ty.zigTypeTag(mod) == .Fn);
const prototype_id = try self.resolveTypeId(decl.ty); const prototype_id = try self.resolveTypeId(decl.ty);
try self.func.prologue.emit(self.spv.gpa, .OpFunction, .{ try self.func.prologue.emit(self.spv.gpa, .OpFunction, .{
@ -1595,8 +1665,8 @@ pub const DeclGen = struct {
try self.generateTestEntryPoint(fqn, spv_decl_index); try self.generateTestEntryPoint(fqn, spv_decl_index);
} }
} else { } else {
const init_val = if (decl.val.castTag(.variable)) |payload| const init_val = if (decl.getVariable(mod)) |payload|
payload.data.init payload.init.toValue()
else else
decl.val; decl.val;

View file

@ -564,7 +564,8 @@ pub const File = struct {
} }
/// May be called before or after updateDeclExports for any given Decl. /// May be called before or after updateDeclExports for any given Decl.
pub fn updateFunc(base: *File, module: *Module, func: *Module.Fn, air: Air, liveness: Liveness) UpdateDeclError!void { pub fn updateFunc(base: *File, module: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) UpdateDeclError!void {
const func = module.funcPtr(func_index);
const owner_decl = module.declPtr(func.owner_decl); const owner_decl = module.declPtr(func.owner_decl);
log.debug("updateFunc {*} ({s}), type={}", .{ log.debug("updateFunc {*} ({s}), type={}", .{
owner_decl, owner_decl.name, owner_decl.ty.fmt(module), owner_decl, owner_decl.name, owner_decl.ty.fmt(module),
@ -575,14 +576,14 @@ pub const File = struct {
} }
switch (base.tag) { switch (base.tag) {
// zig fmt: off // zig fmt: off
.coff => return @fieldParentPtr(Coff, "base", base).updateFunc(module, func, air, liveness), .coff => return @fieldParentPtr(Coff, "base", base).updateFunc(module, func_index, air, liveness),
.elf => return @fieldParentPtr(Elf, "base", base).updateFunc(module, func, air, liveness), .elf => return @fieldParentPtr(Elf, "base", base).updateFunc(module, func_index, air, liveness),
.macho => return @fieldParentPtr(MachO, "base", base).updateFunc(module, func, air, liveness), .macho => return @fieldParentPtr(MachO, "base", base).updateFunc(module, func_index, air, liveness),
.c => return @fieldParentPtr(C, "base", base).updateFunc(module, func, air, liveness), .c => return @fieldParentPtr(C, "base", base).updateFunc(module, func_index, air, liveness),
.wasm => return @fieldParentPtr(Wasm, "base", base).updateFunc(module, func, air, liveness), .wasm => return @fieldParentPtr(Wasm, "base", base).updateFunc(module, func_index, air, liveness),
.spirv => return @fieldParentPtr(SpirV, "base", base).updateFunc(module, func, air, liveness), .spirv => return @fieldParentPtr(SpirV, "base", base).updateFunc(module, func_index, air, liveness),
.plan9 => return @fieldParentPtr(Plan9, "base", base).updateFunc(module, func, air, liveness), .plan9 => return @fieldParentPtr(Plan9, "base", base).updateFunc(module, func_index, air, liveness),
.nvptx => return @fieldParentPtr(NvPtx, "base", base).updateFunc(module, func, air, liveness), .nvptx => return @fieldParentPtr(NvPtx, "base", base).updateFunc(module, func_index, air, liveness),
// zig fmt: on // zig fmt: on
} }
} }

View file

@ -87,12 +87,13 @@ pub fn freeDecl(self: *C, decl_index: Module.Decl.Index) void {
} }
} }
pub fn updateFunc(self: *C, module: *Module, func: *Module.Fn, air: Air, liveness: Liveness) !void { pub fn updateFunc(self: *C, module: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) !void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const gpa = self.base.allocator; const gpa = self.base.allocator;
const func = module.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const gop = try self.decl_table.getOrPut(gpa, decl_index); const gop = try self.decl_table.getOrPut(gpa, decl_index);
if (!gop.found_existing) { if (!gop.found_existing) {
@ -111,7 +112,7 @@ pub fn updateFunc(self: *C, module: *Module, func: *Module.Fn, air: Air, livenes
.value_map = codegen.CValueMap.init(gpa), .value_map = codegen.CValueMap.init(gpa),
.air = air, .air = air,
.liveness = liveness, .liveness = liveness,
.func = func, .func_index = func_index,
.object = .{ .object = .{
.dg = .{ .dg = .{
.gpa = gpa, .gpa = gpa,
@ -555,7 +556,8 @@ fn flushDecl(
export_names: std.StringHashMapUnmanaged(void), export_names: std.StringHashMapUnmanaged(void),
) FlushDeclError!void { ) FlushDeclError!void {
const gpa = self.base.allocator; const gpa = self.base.allocator;
const decl = self.base.options.module.?.declPtr(decl_index); const mod = self.base.options.module.?;
const decl = mod.declPtr(decl_index);
// Before flushing any particular Decl we must ensure its // Before flushing any particular Decl we must ensure its
// dependencies are already flushed, so that the order in the .c // dependencies are already flushed, so that the order in the .c
// file comes out correctly. // file comes out correctly.
@ -569,7 +571,7 @@ fn flushDecl(
try self.flushLazyFns(f, decl_block.lazy_fns); try self.flushLazyFns(f, decl_block.lazy_fns);
try f.all_buffers.ensureUnusedCapacity(gpa, 1); try f.all_buffers.ensureUnusedCapacity(gpa, 1);
if (!(decl.isExtern() and export_names.contains(mem.span(decl.name)))) if (!(decl.isExtern(mod) and export_names.contains(mem.span(decl.name))))
f.appendBufAssumeCapacity(decl_block.fwd_decl.items); f.appendBufAssumeCapacity(decl_block.fwd_decl.items);
} }

View file

@ -1032,18 +1032,19 @@ fn freeAtom(self: *Coff, atom_index: Atom.Index) void {
self.getAtomPtr(atom_index).sym_index = 0; self.getAtomPtr(atom_index).sym_index = 0;
} }
pub fn updateFunc(self: *Coff, mod: *Module, func: *Module.Fn, air: Air, liveness: Liveness) !void { pub fn updateFunc(self: *Coff, mod: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) !void {
if (build_options.skip_non_native and builtin.object_format != .coff) { if (build_options.skip_non_native and builtin.object_format != .coff) {
@panic("Attempted to compile for object format that was disabled by build configuration"); @panic("Attempted to compile for object format that was disabled by build configuration");
} }
if (build_options.have_llvm) { if (build_options.have_llvm) {
if (self.llvm_object) |llvm_object| { if (self.llvm_object) |llvm_object| {
return llvm_object.updateFunc(mod, func, air, liveness); return llvm_object.updateFunc(mod, func_index, air, liveness);
} }
} }
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const func = mod.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
@ -1057,7 +1058,7 @@ pub fn updateFunc(self: *Coff, mod: *Module, func: *Module.Fn, air: Air, livenes
const res = try codegen.generateFunction( const res = try codegen.generateFunction(
&self.base, &self.base,
decl.srcLoc(mod), decl.srcLoc(mod),
func, func_index,
air, air,
liveness, liveness,
&code_buffer, &code_buffer,
@ -1155,11 +1156,10 @@ pub fn updateDecl(
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
if (decl.val.tag() == .extern_fn) { if (decl.getExternFunc(mod)) |_| {
return; // TODO Should we do more when front-end analyzed extern decl? return; // TODO Should we do more when front-end analyzed extern decl?
} }
if (decl.val.castTag(.variable)) |payload| { if (decl.getVariable(mod)) |variable| {
const variable = payload.data;
if (variable.is_extern) { if (variable.is_extern) {
return; // TODO Should we do more when front-end analyzed extern decl? return; // TODO Should we do more when front-end analyzed extern decl?
} }
@ -1172,7 +1172,7 @@ pub fn updateDecl(
var code_buffer = std.ArrayList(u8).init(self.base.allocator); var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit(); defer code_buffer.deinit();
const decl_val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val; const decl_val = if (decl.getVariable(mod)) |variable| variable.init.toValue() else decl.val;
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{ const res = try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{
.ty = decl.ty, .ty = decl.ty,
.val = decl_val, .val = decl_val,
@ -1313,7 +1313,7 @@ fn getDeclOutputSection(self: *Coff, decl_index: Module.Decl.Index) u16 {
// TODO: what if this is a function pointer? // TODO: what if this is a function pointer?
.Fn => break :blk self.text_section_index.?, .Fn => break :blk self.text_section_index.?,
else => { else => {
if (val.castTag(.variable)) |_| { if (decl.getVariable(mod)) |_| {
break :blk self.data_section_index.?; break :blk self.data_section_index.?;
} }
break :blk self.rdata_section_index.?; break :blk self.rdata_section_index.?;
@ -1425,7 +1425,7 @@ pub fn updateDeclExports(
// detect the default subsystem. // detect the default subsystem.
for (exports) |exp| { for (exports) |exp| {
const exported_decl = mod.declPtr(exp.exported_decl); const exported_decl = mod.declPtr(exp.exported_decl);
if (exported_decl.getFunction() == null) continue; if (exported_decl.getFunctionIndex(mod) == .none) continue;
const winapi_cc = switch (self.base.options.target.cpu.arch) { const winapi_cc = switch (self.base.options.target.cpu.arch) {
.x86 => std.builtin.CallingConvention.Stdcall, .x86 => std.builtin.CallingConvention.Stdcall,
else => std.builtin.CallingConvention.C, else => std.builtin.CallingConvention.C,

View file

@ -971,7 +971,7 @@ pub fn initDeclState(self: *Dwarf, mod: *Module, decl_index: Module.Decl.Index)
// For functions we need to add a prologue to the debug line program. // For functions we need to add a prologue to the debug line program.
try dbg_line_buffer.ensureTotalCapacity(26); try dbg_line_buffer.ensureTotalCapacity(26);
const func = decl.val.castTag(.function).?.data; const func = decl.getFunction(mod).?;
log.debug("decl.src_line={d}, func.lbrace_line={d}, func.rbrace_line={d}", .{ log.debug("decl.src_line={d}, func.lbrace_line={d}, func.rbrace_line={d}", .{
decl.src_line, decl.src_line,
func.lbrace_line, func.lbrace_line,
@ -1514,7 +1514,7 @@ fn writeDeclDebugInfo(self: *Dwarf, atom_index: Atom.Index, dbg_info_buf: []cons
} }
} }
pub fn updateDeclLineNumber(self: *Dwarf, module: *Module, decl_index: Module.Decl.Index) !void { pub fn updateDeclLineNumber(self: *Dwarf, mod: *Module, decl_index: Module.Decl.Index) !void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
@ -1522,8 +1522,8 @@ pub fn updateDeclLineNumber(self: *Dwarf, module: *Module, decl_index: Module.De
const atom = self.getAtom(.src_fn, atom_index); const atom = self.getAtom(.src_fn, atom_index);
if (atom.len == 0) return; if (atom.len == 0) return;
const decl = module.declPtr(decl_index); const decl = mod.declPtr(decl_index);
const func = decl.val.castTag(.function).?.data; const func = decl.getFunction(mod).?;
log.debug("decl.src_line={d}, func.lbrace_line={d}, func.rbrace_line={d}", .{ log.debug("decl.src_line={d}, func.lbrace_line={d}, func.rbrace_line={d}", .{
decl.src_line, decl.src_line,
func.lbrace_line, func.lbrace_line,

View file

@ -2465,7 +2465,7 @@ fn getDeclShdrIndex(self: *Elf, decl_index: Module.Decl.Index) u16 {
// TODO: what if this is a function pointer? // TODO: what if this is a function pointer?
.Fn => break :blk self.text_section_index.?, .Fn => break :blk self.text_section_index.?,
else => { else => {
if (val.castTag(.variable)) |_| { if (decl.getVariable(mod)) |_| {
break :blk self.data_section_index.?; break :blk self.data_section_index.?;
} }
break :blk self.rodata_section_index.?; break :blk self.rodata_section_index.?;
@ -2574,17 +2574,18 @@ fn updateDeclCode(self: *Elf, decl_index: Module.Decl.Index, code: []const u8, s
return local_sym; return local_sym;
} }
pub fn updateFunc(self: *Elf, mod: *Module, func: *Module.Fn, air: Air, liveness: Liveness) !void { pub fn updateFunc(self: *Elf, mod: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) !void {
if (build_options.skip_non_native and builtin.object_format != .elf) { if (build_options.skip_non_native and builtin.object_format != .elf) {
@panic("Attempted to compile for object format that was disabled by build configuration"); @panic("Attempted to compile for object format that was disabled by build configuration");
} }
if (build_options.have_llvm) { if (build_options.have_llvm) {
if (self.llvm_object) |llvm_object| return llvm_object.updateFunc(mod, func, air, liveness); if (self.llvm_object) |llvm_object| return llvm_object.updateFunc(mod, func_index, air, liveness);
} }
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const func = mod.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
@ -2599,11 +2600,11 @@ pub fn updateFunc(self: *Elf, mod: *Module, func: *Module.Fn, air: Air, liveness
defer if (decl_state) |*ds| ds.deinit(); defer if (decl_state) |*ds| ds.deinit();
const res = if (decl_state) |*ds| const res = if (decl_state) |*ds|
try codegen.generateFunction(&self.base, decl.srcLoc(mod), func, air, liveness, &code_buffer, .{ try codegen.generateFunction(&self.base, decl.srcLoc(mod), func_index, air, liveness, &code_buffer, .{
.dwarf = ds, .dwarf = ds,
}) })
else else
try codegen.generateFunction(&self.base, decl.srcLoc(mod), func, air, liveness, &code_buffer, .none); try codegen.generateFunction(&self.base, decl.srcLoc(mod), func_index, air, liveness, &code_buffer, .none);
const code = switch (res) { const code = switch (res) {
.ok => code_buffer.items, .ok => code_buffer.items,
@ -2646,11 +2647,10 @@ pub fn updateDecl(
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
if (decl.val.tag() == .extern_fn) { if (decl.getExternFunc(mod)) |_| {
return; // TODO Should we do more when front-end analyzed extern decl? return; // TODO Should we do more when front-end analyzed extern decl?
} }
if (decl.val.castTag(.variable)) |payload| { if (decl.getVariable(mod)) |variable| {
const variable = payload.data;
if (variable.is_extern) { if (variable.is_extern) {
return; // TODO Should we do more when front-end analyzed extern decl? return; // TODO Should we do more when front-end analyzed extern decl?
} }
@ -2667,7 +2667,7 @@ pub fn updateDecl(
defer if (decl_state) |*ds| ds.deinit(); defer if (decl_state) |*ds| ds.deinit();
// TODO implement .debug_info for global variables // TODO implement .debug_info for global variables
const decl_val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val; const decl_val = if (decl.getVariable(mod)) |variable| variable.init.toValue() else decl.val;
const res = if (decl_state) |*ds| const res = if (decl_state) |*ds|
try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{ try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{
.ty = decl.ty, .ty = decl.ty,

View file

@ -1847,16 +1847,17 @@ fn addStubEntry(self: *MachO, target: SymbolWithLoc) !void {
self.markRelocsDirtyByTarget(target); self.markRelocsDirtyByTarget(target);
} }
pub fn updateFunc(self: *MachO, mod: *Module, func: *Module.Fn, air: Air, liveness: Liveness) !void { pub fn updateFunc(self: *MachO, mod: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) !void {
if (build_options.skip_non_native and builtin.object_format != .macho) { if (build_options.skip_non_native and builtin.object_format != .macho) {
@panic("Attempted to compile for object format that was disabled by build configuration"); @panic("Attempted to compile for object format that was disabled by build configuration");
} }
if (build_options.have_llvm) { if (build_options.have_llvm) {
if (self.llvm_object) |llvm_object| return llvm_object.updateFunc(mod, func, air, liveness); if (self.llvm_object) |llvm_object| return llvm_object.updateFunc(mod, func_index, air, liveness);
} }
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const func = mod.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
@ -1874,11 +1875,11 @@ pub fn updateFunc(self: *MachO, mod: *Module, func: *Module.Fn, air: Air, livene
defer if (decl_state) |*ds| ds.deinit(); defer if (decl_state) |*ds| ds.deinit();
const res = if (decl_state) |*ds| const res = if (decl_state) |*ds|
try codegen.generateFunction(&self.base, decl.srcLoc(mod), func, air, liveness, &code_buffer, .{ try codegen.generateFunction(&self.base, decl.srcLoc(mod), func_index, air, liveness, &code_buffer, .{
.dwarf = ds, .dwarf = ds,
}) })
else else
try codegen.generateFunction(&self.base, decl.srcLoc(mod), func, air, liveness, &code_buffer, .none); try codegen.generateFunction(&self.base, decl.srcLoc(mod), func_index, air, liveness, &code_buffer, .none);
var code = switch (res) { var code = switch (res) {
.ok => code_buffer.items, .ok => code_buffer.items,
@ -1983,18 +1984,17 @@ pub fn updateDecl(self: *MachO, mod: *Module, decl_index: Module.Decl.Index) !vo
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
if (decl.val.tag() == .extern_fn) { if (decl.getExternFunc(mod)) |_| {
return; // TODO Should we do more when front-end analyzed extern decl? return; // TODO Should we do more when front-end analyzed extern decl?
} }
if (decl.val.castTag(.variable)) |payload| { if (decl.getVariable(mod)) |variable| {
const variable = payload.data;
if (variable.is_extern) { if (variable.is_extern) {
return; // TODO Should we do more when front-end analyzed extern decl? return; // TODO Should we do more when front-end analyzed extern decl?
} }
} }
const is_threadlocal = if (decl.val.castTag(.variable)) |payload| const is_threadlocal = if (decl.getVariable(mod)) |variable|
payload.data.is_threadlocal and !self.base.options.single_threaded variable.is_threadlocal and !self.base.options.single_threaded
else else
false; false;
if (is_threadlocal) return self.updateThreadlocalVariable(mod, decl_index); if (is_threadlocal) return self.updateThreadlocalVariable(mod, decl_index);
@ -2012,7 +2012,7 @@ pub fn updateDecl(self: *MachO, mod: *Module, decl_index: Module.Decl.Index) !vo
null; null;
defer if (decl_state) |*ds| ds.deinit(); defer if (decl_state) |*ds| ds.deinit();
const decl_val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val; const decl_val = if (decl.getVariable(mod)) |variable| variable.init.toValue() else decl.val;
const res = if (decl_state) |*ds| const res = if (decl_state) |*ds|
try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{ try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{
.ty = decl.ty, .ty = decl.ty,
@ -2177,7 +2177,7 @@ fn updateThreadlocalVariable(self: *MachO, module: *Module, decl_index: Module.D
const decl = module.declPtr(decl_index); const decl = module.declPtr(decl_index);
const decl_metadata = self.decls.get(decl_index).?; const decl_metadata = self.decls.get(decl_index).?;
const decl_val = decl.val.castTag(.variable).?.data.init; const decl_val = decl.getVariable(mod).?.init.toValue();
const res = if (decl_state) |*ds| const res = if (decl_state) |*ds|
try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{ try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{
.ty = decl.ty, .ty = decl.ty,
@ -2278,8 +2278,8 @@ fn getDeclOutputSection(self: *MachO, decl_index: Module.Decl.Index) u8 {
} }
} }
if (val.castTag(.variable)) |variable| { if (decl.getVariable(mod)) |variable| {
if (variable.data.is_threadlocal and !single_threaded) { if (variable.is_threadlocal and !single_threaded) {
break :blk self.thread_data_section_index.?; break :blk self.thread_data_section_index.?;
} }
break :blk self.data_section_index.?; break :blk self.data_section_index.?;
@ -2289,7 +2289,7 @@ fn getDeclOutputSection(self: *MachO, decl_index: Module.Decl.Index) u8 {
// TODO: what if this is a function pointer? // TODO: what if this is a function pointer?
.Fn => break :blk self.text_section_index.?, .Fn => break :blk self.text_section_index.?,
else => { else => {
if (val.castTag(.variable)) |_| { if (decl.getVariable(mod)) |_| {
break :blk self.data_section_index.?; break :blk self.data_section_index.?;
} }
break :blk self.data_const_section_index.?; break :blk self.data_const_section_index.?;

View file

@ -68,9 +68,9 @@ pub fn deinit(self: *NvPtx) void {
self.base.allocator.free(self.ptx_file_name); self.base.allocator.free(self.ptx_file_name);
} }
pub fn updateFunc(self: *NvPtx, module: *Module, func: *Module.Fn, air: Air, liveness: Liveness) !void { pub fn updateFunc(self: *NvPtx, module: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) !void {
if (!build_options.have_llvm) return; if (!build_options.have_llvm) return;
try self.llvm_object.updateFunc(module, func, air, liveness); try self.llvm_object.updateFunc(module, func_index, air, liveness);
} }
pub fn updateDecl(self: *NvPtx, module: *Module, decl_index: Module.Decl.Index) !void { pub fn updateDecl(self: *NvPtx, module: *Module, decl_index: Module.Decl.Index) !void {

View file

@ -276,11 +276,12 @@ fn addPathComponents(self: *Plan9, path: []const u8, a: *std.ArrayList(u8)) !voi
} }
} }
pub fn updateFunc(self: *Plan9, mod: *Module, func: *Module.Fn, air: Air, liveness: Liveness) !void { pub fn updateFunc(self: *Plan9, mod: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) !void {
if (build_options.skip_non_native and builtin.object_format != .plan9) { if (build_options.skip_non_native and builtin.object_format != .plan9) {
@panic("Attempted to compile for object format that was disabled by build configuration"); @panic("Attempted to compile for object format that was disabled by build configuration");
} }
const func = mod.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
self.freeUnnamedConsts(decl_index); self.freeUnnamedConsts(decl_index);
@ -299,7 +300,7 @@ pub fn updateFunc(self: *Plan9, mod: *Module, func: *Module.Fn, air: Air, livene
const res = try codegen.generateFunction( const res = try codegen.generateFunction(
&self.base, &self.base,
decl.srcLoc(mod), decl.srcLoc(mod),
func, func_index,
air, air,
liveness, liveness,
&code_buffer, &code_buffer,
@ -391,11 +392,10 @@ pub fn lowerUnnamedConst(self: *Plan9, tv: TypedValue, decl_index: Module.Decl.I
pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: Module.Decl.Index) !void { pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: Module.Decl.Index) !void {
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
if (decl.val.tag() == .extern_fn) { if (decl.getExternFunc(mod)) |_| {
return; // TODO Should we do more when front-end analyzed extern decl? return; // TODO Should we do more when front-end analyzed extern decl?
} }
if (decl.val.castTag(.variable)) |payload| { if (decl.getVariable(mod)) |variable| {
const variable = payload.data;
if (variable.is_extern) { if (variable.is_extern) {
return; // TODO Should we do more when front-end analyzed extern decl? return; // TODO Should we do more when front-end analyzed extern decl?
} }
@ -407,7 +407,7 @@ pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: Module.Decl.Index) !vo
var code_buffer = std.ArrayList(u8).init(self.base.allocator); var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit(); defer code_buffer.deinit();
const decl_val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val; const decl_val = if (decl.getVariable(mod)) |variable| variable.init.toValue() else decl.val;
// TODO we need the symbol index for symbol in the table of locals for the containing atom // TODO we need the symbol index for symbol in the table of locals for the containing atom
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{ const res = try codegen.generateSymbol(&self.base, decl.srcLoc(mod), .{
.ty = decl.ty, .ty = decl.ty,
@ -771,7 +771,7 @@ pub fn freeDecl(self: *Plan9, decl_index: Module.Decl.Index) void {
// in the deleteUnusedDecl function. // in the deleteUnusedDecl function.
const mod = self.base.options.module.?; const mod = self.base.options.module.?;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
const is_fn = (decl.val.tag() == .function); const is_fn = decl.getFunctionIndex(mod) != .none;
if (is_fn) { if (is_fn) {
var symidx_and_submap = self.fn_decl_table.get(decl.getFileScope(mod)).?; var symidx_and_submap = self.fn_decl_table.get(decl.getFileScope(mod)).?;
var submap = symidx_and_submap.functions; var submap = symidx_and_submap.functions;

View file

@ -103,11 +103,13 @@ pub fn deinit(self: *SpirV) void {
self.decl_link.deinit(); self.decl_link.deinit();
} }
pub fn updateFunc(self: *SpirV, module: *Module, func: *Module.Fn, air: Air, liveness: Liveness) !void { pub fn updateFunc(self: *SpirV, module: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) !void {
if (build_options.skip_non_native) { if (build_options.skip_non_native) {
@panic("Attempted to compile for architecture that was disabled by build configuration"); @panic("Attempted to compile for architecture that was disabled by build configuration");
} }
const func = module.funcPtr(func_index);
var decl_gen = codegen.DeclGen.init(self.base.allocator, module, &self.spv, &self.decl_link); var decl_gen = codegen.DeclGen.init(self.base.allocator, module, &self.spv, &self.decl_link);
defer decl_gen.deinit(); defer decl_gen.deinit();
@ -136,7 +138,7 @@ pub fn updateDeclExports(
exports: []const *Module.Export, exports: []const *Module.Export,
) !void { ) !void {
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
if (decl.val.tag() == .function and decl.ty.fnCallingConvention(mod) == .Kernel) { if (decl.getFunctionIndex(mod) != .none and decl.ty.fnCallingConvention(mod) == .Kernel) {
// TODO: Unify with resolveDecl in spirv.zig. // TODO: Unify with resolveDecl in spirv.zig.
const entry = try self.decl_link.getOrPut(decl_index); const entry = try self.decl_link.getOrPut(decl_index);
if (!entry.found_existing) { if (!entry.found_existing) {

View file

@ -1324,17 +1324,18 @@ pub fn allocateSymbol(wasm: *Wasm) !u32 {
return index; return index;
} }
pub fn updateFunc(wasm: *Wasm, mod: *Module, func: *Module.Fn, air: Air, liveness: Liveness) !void { pub fn updateFunc(wasm: *Wasm, mod: *Module, func_index: Module.Fn.Index, air: Air, liveness: Liveness) !void {
if (build_options.skip_non_native and builtin.object_format != .wasm) { if (build_options.skip_non_native and builtin.object_format != .wasm) {
@panic("Attempted to compile for object format that was disabled by build configuration"); @panic("Attempted to compile for object format that was disabled by build configuration");
} }
if (build_options.have_llvm) { if (build_options.have_llvm) {
if (wasm.llvm_object) |llvm_object| return llvm_object.updateFunc(mod, func, air, liveness); if (wasm.llvm_object) |llvm_object| return llvm_object.updateFunc(mod, func_index, air, liveness);
} }
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const func = mod.funcPtr(func_index);
const decl_index = func.owner_decl; const decl_index = func.owner_decl;
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
const atom_index = try wasm.getOrCreateAtomForDecl(decl_index); const atom_index = try wasm.getOrCreateAtomForDecl(decl_index);
@ -1358,7 +1359,7 @@ pub fn updateFunc(wasm: *Wasm, mod: *Module, func: *Module.Fn, air: Air, livenes
const result = try codegen.generateFunction( const result = try codegen.generateFunction(
&wasm.base, &wasm.base,
decl.srcLoc(mod), decl.srcLoc(mod),
func, func_index,
air, air,
liveness, liveness,
&code_writer, &code_writer,
@ -1403,9 +1404,9 @@ pub fn updateDecl(wasm: *Wasm, mod: *Module, decl_index: Module.Decl.Index) !voi
defer tracy.end(); defer tracy.end();
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
if (decl.val.castTag(.function)) |_| { if (decl.getFunction(mod)) |_| {
return; return;
} else if (decl.val.castTag(.extern_fn)) |_| { } else if (decl.getExternFunc(mod)) |_| {
return; return;
} }
@ -1413,12 +1414,13 @@ pub fn updateDecl(wasm: *Wasm, mod: *Module, decl_index: Module.Decl.Index) !voi
const atom = wasm.getAtomPtr(atom_index); const atom = wasm.getAtomPtr(atom_index);
atom.clear(); atom.clear();
if (decl.isExtern()) { if (decl.isExtern(mod)) {
const variable = decl.getVariable().?; const variable = decl.getVariable(mod).?;
const name = mem.sliceTo(decl.name, 0); const name = mem.sliceTo(decl.name, 0);
return wasm.addOrUpdateImport(name, atom.sym_index, variable.lib_name, null); const lib_name = mod.intern_pool.stringToSliceUnwrap(variable.lib_name);
return wasm.addOrUpdateImport(name, atom.sym_index, lib_name, null);
} }
const val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val; const val = if (decl.getVariable(mod)) |variable| variable.init.toValue() else decl.val;
var code_writer = std.ArrayList(u8).init(wasm.base.allocator); var code_writer = std.ArrayList(u8).init(wasm.base.allocator);
defer code_writer.deinit(); defer code_writer.deinit();
@ -1791,7 +1793,7 @@ pub fn freeDecl(wasm: *Wasm, decl_index: Module.Decl.Index) void {
assert(wasm.symbol_atom.remove(local_atom.symbolLoc())); assert(wasm.symbol_atom.remove(local_atom.symbolLoc()));
} }
if (decl.isExtern()) { if (decl.isExtern(mod)) {
_ = wasm.imports.remove(atom.symbolLoc()); _ = wasm.imports.remove(atom.symbolLoc());
} }
_ = wasm.resolved_symbols.swapRemove(atom.symbolLoc()); _ = wasm.resolved_symbols.swapRemove(atom.symbolLoc());
@ -1852,7 +1854,7 @@ pub fn addOrUpdateImport(
/// Symbol index that is external /// Symbol index that is external
symbol_index: u32, symbol_index: u32,
/// Optional library name (i.e. `extern "c" fn foo() void` /// Optional library name (i.e. `extern "c" fn foo() void`
lib_name: ?[*:0]const u8, lib_name: ?[:0]const u8,
/// The index of the type that represents the function signature /// The index of the type that represents the function signature
/// when the extern is a function. When this is null, a data-symbol /// when the extern is a function. When this is null, a data-symbol
/// is asserted instead. /// is asserted instead.
@ -1863,7 +1865,7 @@ pub fn addOrUpdateImport(
// Also mangle the name when the lib name is set and not equal to "C" so imports with the same // Also mangle the name when the lib name is set and not equal to "C" so imports with the same
// name but different module can be resolved correctly. // name but different module can be resolved correctly.
const mangle_name = lib_name != null and const mangle_name = lib_name != null and
!std.mem.eql(u8, std.mem.sliceTo(lib_name.?, 0), "c"); !std.mem.eql(u8, lib_name.?, "c");
const full_name = if (mangle_name) full_name: { const full_name = if (mangle_name) full_name: {
break :full_name try std.fmt.allocPrint(wasm.base.allocator, "{s}|{s}", .{ name, lib_name.? }); break :full_name try std.fmt.allocPrint(wasm.base.allocator, "{s}|{s}", .{ name, lib_name.? });
} else name; } else name;
@ -1889,7 +1891,7 @@ pub fn addOrUpdateImport(
if (type_index) |ty_index| { if (type_index) |ty_index| {
const gop = try wasm.imports.getOrPut(wasm.base.allocator, .{ .index = symbol_index, .file = null }); const gop = try wasm.imports.getOrPut(wasm.base.allocator, .{ .index = symbol_index, .file = null });
const module_name = if (lib_name) |l_name| blk: { const module_name = if (lib_name) |l_name| blk: {
break :blk mem.sliceTo(l_name, 0); break :blk l_name;
} else wasm.host_name; } else wasm.host_name;
if (!gop.found_existing) { if (!gop.found_existing) {
gop.value_ptr.* = .{ gop.value_ptr.* = .{
@ -2931,7 +2933,7 @@ pub fn getErrorTableSymbol(wasm: *Wasm) !u32 {
const atom_index = try wasm.createAtom(); const atom_index = try wasm.createAtom();
const atom = wasm.getAtomPtr(atom_index); const atom = wasm.getAtomPtr(atom_index);
const slice_ty = Type.const_slice_u8_sentinel_0; const slice_ty = Type.slice_const_u8_sentinel_0;
const mod = wasm.base.options.module.?; const mod = wasm.base.options.module.?;
atom.alignment = slice_ty.abiAlignment(mod); atom.alignment = slice_ty.abiAlignment(mod);
const sym_index = atom.sym_index; const sym_index = atom.sym_index;
@ -2988,7 +2990,7 @@ fn populateErrorNameTable(wasm: *Wasm) !void {
for (mod.error_name_list.items) |error_name| { for (mod.error_name_list.items) |error_name| {
const len = @intCast(u32, error_name.len + 1); // names are 0-termianted const len = @intCast(u32, error_name.len + 1); // names are 0-termianted
const slice_ty = Type.const_slice_u8_sentinel_0; const slice_ty = Type.slice_const_u8_sentinel_0;
const offset = @intCast(u32, atom.code.items.len); const offset = @intCast(u32, atom.code.items.len);
// first we create the data for the slice of the name // first we create the data for the slice of the name
try atom.code.appendNTimes(wasm.base.allocator, 0, 4); // ptr to name, will be relocated try atom.code.appendNTimes(wasm.base.allocator, 0, 4); // ptr to name, will be relocated
@ -3366,15 +3368,15 @@ pub fn flushModule(wasm: *Wasm, comp: *Compilation, prog_node: *std.Progress.Nod
var decl_it = wasm.decls.iterator(); var decl_it = wasm.decls.iterator();
while (decl_it.next()) |entry| { while (decl_it.next()) |entry| {
const decl = mod.declPtr(entry.key_ptr.*); const decl = mod.declPtr(entry.key_ptr.*);
if (decl.isExtern()) continue; if (decl.isExtern(mod)) continue;
const atom_index = entry.value_ptr.*; const atom_index = entry.value_ptr.*;
const atom = wasm.getAtomPtr(atom_index); const atom = wasm.getAtomPtr(atom_index);
if (decl.ty.zigTypeTag(mod) == .Fn) { if (decl.ty.zigTypeTag(mod) == .Fn) {
try wasm.parseAtom(atom_index, .function); try wasm.parseAtom(atom_index, .function);
} else if (decl.getVariable()) |variable| { } else if (decl.getVariable(mod)) |variable| {
if (!variable.is_mutable) { if (variable.is_const) {
try wasm.parseAtom(atom_index, .{ .data = .read_only }); try wasm.parseAtom(atom_index, .{ .data = .read_only });
} else if (variable.init.isUndefDeep(mod)) { } else if (variable.init.toValue().isUndefDeep(mod)) {
// for safe build modes, we store the atom in the data segment, // for safe build modes, we store the atom in the data segment,
// whereas for unsafe build modes we store it in bss. // whereas for unsafe build modes we store it in bss.
const is_initialized = wasm.base.options.optimize_mode == .Debug or const is_initialized = wasm.base.options.optimize_mode == .Debug or

View file

@ -699,8 +699,8 @@ const Writer = struct {
fn writeDbgInline(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { fn writeDbgInline(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
const ty_pl = w.air.instructions.items(.data)[inst].ty_pl; const ty_pl = w.air.instructions.items(.data)[inst].ty_pl;
const function = w.air.values[ty_pl.payload].castTag(.function).?.data; const func_index = w.module.intern_pool.indexToFunc(w.air.values[ty_pl.payload].ip_index);
const owner_decl = w.module.declPtr(function.owner_decl); const owner_decl = w.module.declPtr(w.module.funcPtrUnwrap(func_index).?.owner_decl);
try s.print("{s}", .{owner_decl.name}); try s.print("{s}", .{owner_decl.name});
} }

View file

@ -93,16 +93,23 @@ pub const Type = struct {
}, },
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.extern_func => unreachable, .simple_value,
.int => unreachable, .variable,
.float => unreachable, .extern_func,
.ptr => unreachable, .func,
.opt => unreachable, .int,
.enum_tag => unreachable, .err,
.simple_value => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
}; };
} }
@ -358,7 +365,7 @@ pub const Type = struct {
const func = ies.func; const func = ies.func;
try writer.writeAll("@typeInfo(@typeInfo(@TypeOf("); try writer.writeAll("@typeInfo(@typeInfo(@TypeOf(");
const owner_decl = mod.declPtr(func.owner_decl); const owner_decl = mod.declPtr(mod.funcPtr(func).owner_decl);
try owner_decl.renderFullyQualifiedName(mod, writer); try owner_decl.renderFullyQualifiedName(mod, writer);
try writer.writeAll(")).Fn.return_type.?).ErrorUnion.error_set"); try writer.writeAll(")).Fn.return_type.?).ErrorUnion.error_set");
}, },
@ -467,16 +474,23 @@ pub const Type = struct {
}, },
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
} }
} }
@ -675,16 +689,23 @@ pub const Type = struct {
.enum_type => |enum_type| enum_type.tag_ty.toType().hasRuntimeBitsAdvanced(mod, ignore_comptime_only, strat), .enum_type => |enum_type| enum_type.tag_ty.toType().hasRuntimeBitsAdvanced(mod, ignore_comptime_only, strat),
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
}, },
}; };
} }
@ -777,16 +798,23 @@ pub const Type = struct {
}, },
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
}; };
} }
@ -866,8 +894,8 @@ pub const Type = struct {
/// May capture a reference to `ty`. /// May capture a reference to `ty`.
/// Returned value has type `comptime_int`. /// Returned value has type `comptime_int`.
pub fn lazyAbiAlignment(ty: Type, mod: *Module, arena: Allocator) !Value { pub fn lazyAbiAlignment(ty: Type, mod: *Module) !Value {
switch (try ty.abiAlignmentAdvanced(mod, .{ .lazy = arena })) { switch (try ty.abiAlignmentAdvanced(mod, .lazy)) {
.val => |val| return val, .val => |val| return val,
.scalar => |x| return mod.intValue(Type.comptime_int, x), .scalar => |x| return mod.intValue(Type.comptime_int, x),
} }
@ -880,7 +908,7 @@ pub const Type = struct {
pub const AbiAlignmentAdvancedStrat = union(enum) { pub const AbiAlignmentAdvancedStrat = union(enum) {
eager, eager,
lazy: Allocator, lazy,
sema: *Sema, sema: *Sema,
}; };
@ -1019,16 +1047,18 @@ pub const Type = struct {
if (!struct_obj.haveFieldTypes()) switch (strat) { if (!struct_obj.haveFieldTypes()) switch (strat) {
.eager => unreachable, // struct layout not resolved .eager => unreachable, // struct layout not resolved
.sema => unreachable, // handled above .sema => unreachable, // handled above
.lazy => |arena| return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(arena, ty) }, .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
}; };
if (struct_obj.layout == .Packed) { if (struct_obj.layout == .Packed) {
switch (strat) { switch (strat) {
.sema => |sema| try sema.resolveTypeLayout(ty), .sema => |sema| try sema.resolveTypeLayout(ty),
.lazy => |arena| { .lazy => if (!struct_obj.haveLayout()) return .{ .val = (try mod.intern(.{ .int = .{
if (!struct_obj.haveLayout()) { .ty = .comptime_int_type,
return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(arena, ty) }; .storage = .{ .lazy_align = ty.ip_index },
} } })).toValue() },
},
.eager => {}, .eager => {},
} }
assert(struct_obj.haveLayout()); assert(struct_obj.haveLayout());
@ -1039,7 +1069,10 @@ pub const Type = struct {
var big_align: u32 = 0; var big_align: u32 = 0;
for (fields.values()) |field| { for (fields.values()) |field| {
if (!(field.ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) { if (!(field.ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) {
error.NeedLazy => return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(strat.lazy, ty) }, error.NeedLazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
else => |e| return e, else => |e| return e,
})) continue; })) continue;
@ -1050,7 +1083,10 @@ pub const Type = struct {
.val => switch (strat) { .val => switch (strat) {
.eager => unreachable, // struct layout not resolved .eager => unreachable, // struct layout not resolved
.sema => unreachable, // handled above .sema => unreachable, // handled above
.lazy => |arena| return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(arena, ty) }, .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
}, },
}; };
big_align = @max(big_align, field_align); big_align = @max(big_align, field_align);
@ -1077,7 +1113,10 @@ pub const Type = struct {
.val => switch (strat) { .val => switch (strat) {
.eager => unreachable, // field type alignment not resolved .eager => unreachable, // field type alignment not resolved
.sema => unreachable, // passed to abiAlignmentAdvanced above .sema => unreachable, // passed to abiAlignmentAdvanced above
.lazy => |arena| return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(arena, ty) }, .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
}, },
} }
} }
@ -1092,16 +1131,23 @@ pub const Type = struct {
.enum_type => |enum_type| return AbiAlignmentAdvanced{ .scalar = enum_type.tag_ty.toType().abiAlignment(mod) }, .enum_type => |enum_type| return AbiAlignmentAdvanced{ .scalar = enum_type.tag_ty.toType().abiAlignment(mod) },
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
}, },
} }
} }
@ -1118,7 +1164,10 @@ pub const Type = struct {
switch (strat) { switch (strat) {
.eager, .sema => { .eager, .sema => {
if (!(payload_ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) { if (!(payload_ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) {
error.NeedLazy => return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(strat.lazy, ty) }, error.NeedLazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
else => |e| return e, else => |e| return e,
})) { })) {
return AbiAlignmentAdvanced{ .scalar = code_align }; return AbiAlignmentAdvanced{ .scalar = code_align };
@ -1128,7 +1177,7 @@ pub const Type = struct {
(try payload_ty.abiAlignmentAdvanced(mod, strat)).scalar, (try payload_ty.abiAlignmentAdvanced(mod, strat)).scalar,
) }; ) };
}, },
.lazy => |arena| { .lazy => {
switch (try payload_ty.abiAlignmentAdvanced(mod, strat)) { switch (try payload_ty.abiAlignmentAdvanced(mod, strat)) {
.scalar => |payload_align| { .scalar => |payload_align| {
return AbiAlignmentAdvanced{ return AbiAlignmentAdvanced{
@ -1137,7 +1186,10 @@ pub const Type = struct {
}, },
.val => {}, .val => {},
} }
return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(arena, ty) }; return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() };
}, },
} }
} }
@ -1160,16 +1212,22 @@ pub const Type = struct {
switch (strat) { switch (strat) {
.eager, .sema => { .eager, .sema => {
if (!(child_type.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) { if (!(child_type.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) {
error.NeedLazy => return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(strat.lazy, ty) }, error.NeedLazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
else => |e| return e, else => |e| return e,
})) { })) {
return AbiAlignmentAdvanced{ .scalar = 1 }; return AbiAlignmentAdvanced{ .scalar = 1 };
} }
return child_type.abiAlignmentAdvanced(mod, strat); return child_type.abiAlignmentAdvanced(mod, strat);
}, },
.lazy => |arena| switch (try child_type.abiAlignmentAdvanced(mod, strat)) { .lazy => switch (try child_type.abiAlignmentAdvanced(mod, strat)) {
.scalar => |x| return AbiAlignmentAdvanced{ .scalar = @max(x, 1) }, .scalar => |x| return AbiAlignmentAdvanced{ .scalar = @max(x, 1) },
.val => return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(arena, ty) }, .val => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
}, },
} }
} }
@ -1198,7 +1256,10 @@ pub const Type = struct {
if (!union_obj.haveFieldTypes()) switch (strat) { if (!union_obj.haveFieldTypes()) switch (strat) {
.eager => unreachable, // union layout not resolved .eager => unreachable, // union layout not resolved
.sema => unreachable, // handled above .sema => unreachable, // handled above
.lazy => |arena| return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(arena, ty) }, .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
}; };
if (union_obj.fields.count() == 0) { if (union_obj.fields.count() == 0) {
if (have_tag) { if (have_tag) {
@ -1212,7 +1273,10 @@ pub const Type = struct {
if (have_tag) max_align = union_obj.tag_ty.abiAlignment(mod); if (have_tag) max_align = union_obj.tag_ty.abiAlignment(mod);
for (union_obj.fields.values()) |field| { for (union_obj.fields.values()) |field| {
if (!(field.ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) { if (!(field.ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) {
error.NeedLazy => return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(strat.lazy, ty) }, error.NeedLazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
else => |e| return e, else => |e| return e,
})) continue; })) continue;
@ -1223,7 +1287,10 @@ pub const Type = struct {
.val => switch (strat) { .val => switch (strat) {
.eager => unreachable, // struct layout not resolved .eager => unreachable, // struct layout not resolved
.sema => unreachable, // handled above .sema => unreachable, // handled above
.lazy => |arena| return AbiAlignmentAdvanced{ .val = try Value.Tag.lazy_align.create(arena, ty) }, .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_align = ty.ip_index },
} })).toValue() },
}, },
}; };
max_align = @max(max_align, field_align); max_align = @max(max_align, field_align);
@ -1232,8 +1299,8 @@ pub const Type = struct {
} }
/// May capture a reference to `ty`. /// May capture a reference to `ty`.
pub fn lazyAbiSize(ty: Type, mod: *Module, arena: Allocator) !Value { pub fn lazyAbiSize(ty: Type, mod: *Module) !Value {
switch (try ty.abiSizeAdvanced(mod, .{ .lazy = arena })) { switch (try ty.abiSizeAdvanced(mod, .lazy)) {
.val => |val| return val, .val => |val| return val,
.scalar => |x| return mod.intValue(Type.comptime_int, x), .scalar => |x| return mod.intValue(Type.comptime_int, x),
} }
@ -1283,7 +1350,10 @@ pub const Type = struct {
.scalar => |elem_size| return .{ .scalar = len * elem_size }, .scalar => |elem_size| return .{ .scalar = len * elem_size },
.val => switch (strat) { .val => switch (strat) {
.sema, .eager => unreachable, .sema, .eager => unreachable,
.lazy => |arena| return .{ .val = try Value.Tag.lazy_size.create(arena, ty) }, .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_size = ty.ip_index },
} })).toValue() },
}, },
} }
}, },
@ -1291,9 +1361,10 @@ pub const Type = struct {
const opt_sema = switch (strat) { const opt_sema = switch (strat) {
.sema => |sema| sema, .sema => |sema| sema,
.eager => null, .eager => null,
.lazy => |arena| return AbiSizeAdvanced{ .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.val = try Value.Tag.lazy_size.create(arena, ty), .ty = .comptime_int_type,
}, .storage = .{ .lazy_size = ty.ip_index },
} })).toValue() },
}; };
const elem_bits_u64 = try vector_type.child.toType().bitSizeAdvanced(mod, opt_sema); const elem_bits_u64 = try vector_type.child.toType().bitSizeAdvanced(mod, opt_sema);
const elem_bits = @intCast(u32, elem_bits_u64); const elem_bits = @intCast(u32, elem_bits_u64);
@ -1301,9 +1372,10 @@ pub const Type = struct {
const total_bytes = (total_bits + 7) / 8; const total_bytes = (total_bits + 7) / 8;
const alignment = switch (try ty.abiAlignmentAdvanced(mod, strat)) { const alignment = switch (try ty.abiAlignmentAdvanced(mod, strat)) {
.scalar => |x| x, .scalar => |x| x,
.val => return AbiSizeAdvanced{ .val => return .{ .val = (try mod.intern(.{ .int = .{
.val = try Value.Tag.lazy_size.create(strat.lazy, ty), .ty = .comptime_int_type,
}, .storage = .{ .lazy_size = ty.ip_index },
} })).toValue() },
}; };
const result = std.mem.alignForwardGeneric(u32, total_bytes, alignment); const result = std.mem.alignForwardGeneric(u32, total_bytes, alignment);
return AbiSizeAdvanced{ .scalar = result }; return AbiSizeAdvanced{ .scalar = result };
@ -1320,7 +1392,10 @@ pub const Type = struct {
// in abiAlignmentAdvanced. // in abiAlignmentAdvanced.
const code_size = abiSize(Type.anyerror, mod); const code_size = abiSize(Type.anyerror, mod);
if (!(payload_ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) { if (!(payload_ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) {
error.NeedLazy => return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(strat.lazy, ty) }, error.NeedLazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_size = ty.ip_index },
} })).toValue() },
else => |e| return e, else => |e| return e,
})) { })) {
// Same as anyerror. // Same as anyerror.
@ -1333,7 +1408,10 @@ pub const Type = struct {
.val => switch (strat) { .val => switch (strat) {
.sema => unreachable, .sema => unreachable,
.eager => unreachable, .eager => unreachable,
.lazy => |arena| return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(arena, ty) }, .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_size = ty.ip_index },
} })).toValue() },
}, },
}; };
@ -1420,11 +1498,10 @@ pub const Type = struct {
switch (strat) { switch (strat) {
.sema => |sema| try sema.resolveTypeLayout(ty), .sema => |sema| try sema.resolveTypeLayout(ty),
.lazy => |arena| { .lazy => if (!struct_obj.haveLayout()) return .{ .val = (try mod.intern(.{ .int = .{
if (!struct_obj.haveLayout()) { .ty = .comptime_int_type,
return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(arena, ty) }; .storage = .{ .lazy_size = ty.ip_index },
} } })).toValue() },
},
.eager => {}, .eager => {},
} }
assert(struct_obj.haveLayout()); assert(struct_obj.haveLayout());
@ -1433,12 +1510,13 @@ pub const Type = struct {
else => { else => {
switch (strat) { switch (strat) {
.sema => |sema| try sema.resolveTypeLayout(ty), .sema => |sema| try sema.resolveTypeLayout(ty),
.lazy => |arena| { .lazy => {
const struct_obj = mod.structPtrUnwrap(struct_type.index) orelse const struct_obj = mod.structPtrUnwrap(struct_type.index) orelse
return AbiSizeAdvanced{ .scalar = 0 }; return AbiSizeAdvanced{ .scalar = 0 };
if (!struct_obj.haveLayout()) { if (!struct_obj.haveLayout()) return .{ .val = (try mod.intern(.{ .int = .{
return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(arena, ty) }; .ty = .comptime_int_type,
} .storage = .{ .lazy_size = ty.ip_index },
} })).toValue() };
}, },
.eager => {}, .eager => {},
} }
@ -1469,16 +1547,23 @@ pub const Type = struct {
.enum_type => |enum_type| return AbiSizeAdvanced{ .scalar = enum_type.tag_ty.toType().abiSize(mod) }, .enum_type => |enum_type| return AbiSizeAdvanced{ .scalar = enum_type.tag_ty.toType().abiSize(mod) },
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
}, },
} }
} }
@ -1492,11 +1577,10 @@ pub const Type = struct {
) Module.CompileError!AbiSizeAdvanced { ) Module.CompileError!AbiSizeAdvanced {
switch (strat) { switch (strat) {
.sema => |sema| try sema.resolveTypeLayout(ty), .sema => |sema| try sema.resolveTypeLayout(ty),
.lazy => |arena| { .lazy => if (!union_obj.haveLayout()) return .{ .val = (try mod.intern(.{ .int = .{
if (!union_obj.haveLayout()) { .ty = .comptime_int_type,
return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(arena, ty) }; .storage = .{ .lazy_size = ty.ip_index },
} } })).toValue() },
},
.eager => {}, .eager => {},
} }
return AbiSizeAdvanced{ .scalar = union_obj.abiSize(mod, have_tag) }; return AbiSizeAdvanced{ .scalar = union_obj.abiSize(mod, have_tag) };
@ -1514,7 +1598,10 @@ pub const Type = struct {
} }
if (!(child_ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) { if (!(child_ty.hasRuntimeBitsAdvanced(mod, false, strat) catch |err| switch (err) {
error.NeedLazy => return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(strat.lazy, ty) }, error.NeedLazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_size = ty.ip_index },
} })).toValue() },
else => |e| return e, else => |e| return e,
})) return AbiSizeAdvanced{ .scalar = 1 }; })) return AbiSizeAdvanced{ .scalar = 1 };
@ -1527,7 +1614,10 @@ pub const Type = struct {
.val => switch (strat) { .val => switch (strat) {
.sema => unreachable, .sema => unreachable,
.eager => unreachable, .eager => unreachable,
.lazy => |arena| return AbiSizeAdvanced{ .val = try Value.Tag.lazy_size.create(arena, ty) }, .lazy => return .{ .val = (try mod.intern(.{ .int = .{
.ty = .comptime_int_type,
.storage = .{ .lazy_size = ty.ip_index },
} })).toValue() },
}, },
}; };
@ -1690,16 +1780,23 @@ pub const Type = struct {
.enum_type => |enum_type| return bitSizeAdvanced(enum_type.tag_ty.toType(), mod, opt_sema), .enum_type => |enum_type| return bitSizeAdvanced(enum_type.tag_ty.toType(), mod, opt_sema),
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
} }
} }
@ -2270,16 +2367,23 @@ pub const Type = struct {
.opaque_type => unreachable, .opaque_type => unreachable,
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
}, },
}; };
} }
@ -2443,16 +2547,17 @@ pub const Type = struct {
.inferred_error_set_type, .inferred_error_set_type,
=> return null, => return null,
.array_type => |array_type| { inline .array_type, .vector_type => |seq_type| {
if (array_type.len == 0) if (seq_type.len == 0) return (try mod.intern(.{ .aggregate = .{
return Value.initTag(.empty_array); .ty = ty.ip_index,
if ((try array_type.child.toType().onePossibleValue(mod)) != null) .storage = .{ .elems = &.{} },
return Value.initTag(.the_only_possible_value); } })).toValue();
return null; if (try seq_type.child.toType().onePossibleValue(mod)) |opv| {
}, return (try mod.intern(.{ .aggregate = .{
.vector_type => |vector_type| { .ty = ty.ip_index,
if (vector_type.len == 0) return Value.initTag(.empty_array); .storage = .{ .repeated_elem = opv.ip_index },
if (try vector_type.child.toType().onePossibleValue(mod)) |v| return v; } })).toValue();
}
return null; return null;
}, },
.opt_type => |child| { .opt_type => |child| {
@ -2595,16 +2700,23 @@ pub const Type = struct {
}, },
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
}, },
}; };
} }
@ -2733,16 +2845,23 @@ pub const Type = struct {
.enum_type => |enum_type| enum_type.tag_ty.toType().comptimeOnly(mod), .enum_type => |enum_type| enum_type.tag_ty.toType().comptimeOnly(mod),
// values, not types // values, not types
.undef => unreachable, .undef,
.un => unreachable, .runtime_value,
.simple_value => unreachable, .simple_value,
.extern_func => unreachable, .variable,
.int => unreachable, .extern_func,
.float => unreachable, .func,
.ptr => unreachable, .int,
.opt => unreachable, .err,
.enum_tag => unreachable, .error_union,
.aggregate => unreachable, .enum_literal,
.enum_tag,
.float,
.ptr,
.opt,
.aggregate,
.un,
=> unreachable,
}, },
}; };
} }
@ -2802,13 +2921,12 @@ pub const Type = struct {
} }
// Works for vectors and vectors of integers. // Works for vectors and vectors of integers.
pub fn minInt(ty: Type, arena: Allocator, mod: *Module) !Value { pub fn minInt(ty: Type, mod: *Module) !Value {
const scalar = try minIntScalar(ty.scalarType(mod), mod); const scalar = try minIntScalar(ty.scalarType(mod), mod);
if (ty.zigTypeTag(mod) == .Vector and scalar.tag() != .the_only_possible_value) { return if (ty.zigTypeTag(mod) == .Vector) (try mod.intern(.{ .aggregate = .{
return Value.Tag.repeated.create(arena, scalar); .ty = ty.ip_index,
} else { .storage = .{ .repeated_elem = scalar.ip_index },
return scalar; } })).toValue() else scalar;
}
} }
/// Asserts that the type is an integer. /// Asserts that the type is an integer.
@ -2832,13 +2950,12 @@ pub const Type = struct {
// Works for vectors and vectors of integers. // Works for vectors and vectors of integers.
/// The returned Value will have type dest_ty. /// The returned Value will have type dest_ty.
pub fn maxInt(ty: Type, arena: Allocator, mod: *Module, dest_ty: Type) !Value { pub fn maxInt(ty: Type, mod: *Module, dest_ty: Type) !Value {
const scalar = try maxIntScalar(ty.scalarType(mod), mod, dest_ty); const scalar = try maxIntScalar(ty.scalarType(mod), mod, dest_ty);
if (ty.zigTypeTag(mod) == .Vector and scalar.tag() != .the_only_possible_value) { return if (ty.zigTypeTag(mod) == .Vector) (try mod.intern(.{ .aggregate = .{
return Value.Tag.repeated.create(arena, scalar); .ty = ty.ip_index,
} else { .storage = .{ .repeated_elem = scalar.ip_index },
return scalar; } })).toValue() else scalar;
}
} }
/// The returned Value will have type dest_ty. /// The returned Value will have type dest_ty.
@ -3386,12 +3503,12 @@ pub const Type = struct {
pub const @"c_ulonglong": Type = .{ .ip_index = .c_ulonglong_type }; pub const @"c_ulonglong": Type = .{ .ip_index = .c_ulonglong_type };
pub const @"c_longdouble": Type = .{ .ip_index = .c_longdouble_type }; pub const @"c_longdouble": Type = .{ .ip_index = .c_longdouble_type };
pub const const_slice_u8: Type = .{ .ip_index = .const_slice_u8_type }; pub const slice_const_u8: Type = .{ .ip_index = .slice_const_u8_type };
pub const manyptr_u8: Type = .{ .ip_index = .manyptr_u8_type }; pub const manyptr_u8: Type = .{ .ip_index = .manyptr_u8_type };
pub const single_const_pointer_to_comptime_int: Type = .{ pub const single_const_pointer_to_comptime_int: Type = .{
.ip_index = .single_const_pointer_to_comptime_int_type, .ip_index = .single_const_pointer_to_comptime_int_type,
}; };
pub const const_slice_u8_sentinel_0: Type = .{ .ip_index = .const_slice_u8_sentinel_0_type }; pub const slice_const_u8_sentinel_0: Type = .{ .ip_index = .slice_const_u8_sentinel_0_type };
pub const empty_struct_literal: Type = .{ .ip_index = .empty_struct_type }; pub const empty_struct_literal: Type = .{ .ip_index = .empty_struct_type };
pub const generic_poison: Type = .{ .ip_index = .generic_poison_type }; pub const generic_poison: Type = .{ .ip_index = .generic_poison_type };

File diff suppressed because it is too large Load diff

View file

@ -533,8 +533,8 @@ type_tag_handlers = {
'empty_struct_literal': lambda payload: '@TypeOf(.{})', 'empty_struct_literal': lambda payload: '@TypeOf(.{})',
'anyerror_void_error_union': lambda payload: 'anyerror!void', 'anyerror_void_error_union': lambda payload: 'anyerror!void',
'const_slice_u8': lambda payload: '[]const u8', 'slice_const_u8': lambda payload: '[]const u8',
'const_slice_u8_sentinel_0': lambda payload: '[:0]const u8', 'slice_const_u8_sentinel_0': lambda payload: '[:0]const u8',
'fn_noreturn_no_args': lambda payload: 'fn() noreturn', 'fn_noreturn_no_args': lambda payload: 'fn() noreturn',
'fn_void_no_args': lambda payload: 'fn() void', 'fn_void_no_args': lambda payload: 'fn() void',
'fn_naked_noreturn_no_args': lambda payload: 'fn() callconv(.Naked) noreturn', 'fn_naked_noreturn_no_args': lambda payload: 'fn() callconv(.Naked) noreturn',
@ -560,7 +560,7 @@ type_tag_handlers = {
'many_mut_pointer': lambda payload: '[*]%s' % type_Type_SummaryProvider(payload), 'many_mut_pointer': lambda payload: '[*]%s' % type_Type_SummaryProvider(payload),
'c_const_pointer': lambda payload: '[*c]const %s' % type_Type_SummaryProvider(payload), 'c_const_pointer': lambda payload: '[*c]const %s' % type_Type_SummaryProvider(payload),
'c_mut_pointer': lambda payload: '[*c]%s' % type_Type_SummaryProvider(payload), 'c_mut_pointer': lambda payload: '[*c]%s' % type_Type_SummaryProvider(payload),
'const_slice': lambda payload: '[]const %s' % type_Type_SummaryProvider(payload), 'slice_const': lambda payload: '[]const %s' % type_Type_SummaryProvider(payload),
'mut_slice': lambda payload: '[]%s' % type_Type_SummaryProvider(payload), 'mut_slice': lambda payload: '[]%s' % type_Type_SummaryProvider(payload),
'int_signed': lambda payload: 'i%d' % payload.unsigned, 'int_signed': lambda payload: 'i%d' % payload.unsigned,
'int_unsigned': lambda payload: 'u%d' % payload.unsigned, 'int_unsigned': lambda payload: 'u%d' % payload.unsigned,

View file

@ -18,7 +18,7 @@ class TypePrinter:
'many_mut_pointer': 'Type.Payload.ElemType', 'many_mut_pointer': 'Type.Payload.ElemType',
'c_const_pointer': 'Type.Payload.ElemType', 'c_const_pointer': 'Type.Payload.ElemType',
'c_mut_pointer': 'Type.Payload.ElemType', 'c_mut_pointer': 'Type.Payload.ElemType',
'const_slice': 'Type.Payload.ElemType', 'slice_const': 'Type.Payload.ElemType',
'mut_slice': 'Type.Payload.ElemType', 'mut_slice': 'Type.Payload.ElemType',
'optional': 'Type.Payload.ElemType', 'optional': 'Type.Payload.ElemType',
'optional_single_mut_pointer': 'Type.Payload.ElemType', 'optional_single_mut_pointer': 'Type.Payload.ElemType',