Zir: implement explicit block_comptime instruction

Resolves: #7056
This commit is contained in:
mlugg 2023-03-05 12:39:32 +00:00 committed by Andrew Kelley
parent 602029bb2f
commit ccf670c2b0
18 changed files with 234 additions and 304 deletions

View file

@ -374,7 +374,7 @@ pub const Manifest = struct {
self.failed_file_index = null;
const ext = ".txt";
var manifest_file_path: [self.hex_digest.len + ext.len]u8 = undefined;
var manifest_file_path: [hex_digest_len + ext.len]u8 = undefined;
var bin_digest: BinDigest = undefined;
self.hash.hasher.final(&bin_digest);
@ -389,7 +389,7 @@ pub const Manifest = struct {
self.hash.hasher.update(&bin_digest);
mem.copy(u8, &manifest_file_path, &self.hex_digest);
manifest_file_path[self.hex_digest.len..][0..ext.len].* = ext.*;
manifest_file_path[hex_digest_len..][0..ext.len].* = ext.*;
if (self.files.items.len == 0) {
// If there are no file inputs, we check if the manifest file exists instead of

View file

@ -622,7 +622,7 @@ test "ed25519 test vectors" {
},
};
for (entries) |entry| {
var msg: [entry.msg_hex.len / 2]u8 = undefined;
var msg: [64 / 2]u8 = undefined;
_ = try fmt.hexToBytes(&msg, entry.msg_hex);
var public_key_bytes: [32]u8 = undefined;
_ = try fmt.hexToBytes(&public_key_bytes, entry.public_key_hex);

View file

@ -177,9 +177,9 @@ test "std.enums.directEnumArrayDefault slice" {
/// Cast an enum literal, value, or string to the enum value of type E
/// with the same name.
pub fn nameCast(comptime E: type, comptime value: anytype) E {
comptime {
return comptime blk: {
const V = @TypeOf(value);
if (V == E) return value;
if (V == E) break :blk value;
var name: ?[]const u8 = switch (@typeInfo(V)) {
.EnumLiteral, .Enum => @tagName(value),
.Pointer => if (std.meta.trait.isZigString(V)) value else null,
@ -187,12 +187,12 @@ pub fn nameCast(comptime E: type, comptime value: anytype) E {
};
if (name) |n| {
if (@hasField(E, n)) {
return @field(E, n);
break :blk @field(E, n);
}
@compileError("Enum " ++ @typeName(E) ++ " has no field named " ++ n);
}
@compileError("Cannot cast from " ++ @typeName(@TypeOf(value)) ++ " to " ++ @typeName(E));
}
};
}
test "std.enums.nameCast" {

View file

@ -877,7 +877,7 @@ fn testDivFloor() !void {
/// zero.
pub fn divCeil(comptime T: type, numerator: T, denominator: T) !T {
@setRuntimeSafety(false);
if (comptime std.meta.trait.isNumber(T) and denominator == 0) return error.DivisionByZero;
if ((comptime std.meta.trait.isNumber(T)) and denominator == 0) return error.DivisionByZero;
const info = @typeInfo(T);
switch (info) {
.ComptimeFloat, .Float => return @ceil(numerator / denominator),

View file

@ -549,14 +549,14 @@ test "std.meta.FieldType" {
}
pub fn fieldNames(comptime T: type) *const [fields(T).len][]const u8 {
comptime {
return comptime blk: {
const fieldInfos = fields(T);
var names: [fieldInfos.len][]const u8 = undefined;
for (fieldInfos, 0..) |field, i| {
names[i] = field.name;
}
return &names;
}
break :blk &names;
};
}
test "std.meta.fieldNames" {
@ -590,14 +590,14 @@ test "std.meta.fieldNames" {
/// Given an enum or error set type, returns a pointer to an array containing all tags for that
/// enum or error set.
pub fn tags(comptime T: type) *const [fields(T).len]T {
comptime {
return comptime blk: {
const fieldInfos = fields(T);
var res: [fieldInfos.len]T = undefined;
for (fieldInfos, 0..) |field, i| {
res[i] = @field(T, field.name);
}
return &res;
}
break :blk &res;
};
}
test "std.meta.tags" {

View file

@ -400,18 +400,18 @@ test "isTuple" {
/// *const u8, ?[]const u8, ?*const [N]u8.
/// ```
pub fn isZigString(comptime T: type) bool {
comptime {
return comptime blk: {
// Only pointer types can be strings, no optionals
const info = @typeInfo(T);
if (info != .Pointer) return false;
if (info != .Pointer) break :blk false;
const ptr = &info.Pointer;
// Check for CV qualifiers that would prevent coerction to []const u8
if (ptr.is_volatile or ptr.is_allowzero) return false;
if (ptr.is_volatile or ptr.is_allowzero) break :blk false;
// If it's already a slice, simple check.
if (ptr.size == .Slice) {
return ptr.child == u8;
break :blk ptr.child == u8;
}
// Otherwise check if it's an array type that coerces to slice.
@ -419,12 +419,12 @@ pub fn isZigString(comptime T: type) bool {
const child = @typeInfo(ptr.child);
if (child == .Array) {
const arr = &child.Array;
return arr.child == u8;
break :blk arr.child == u8;
}
}
return false;
}
break :blk false;
};
}
test "isZigString" {

View file

@ -99,7 +99,7 @@ test "parse and render UNIX addresses" {
const fmt_addr = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable;
try std.testing.expectEqualSlices(u8, "/tmp/testpath", fmt_addr);
const too_long = [_]u8{'a'} ** (addr.un.path.len + 1);
const too_long = [_]u8{'a'} ** 200;
try testing.expectError(error.NameTooLong, net.Address.initUnix(too_long[0..]));
}

View file

@ -774,13 +774,13 @@ test "utf8ToUtf16LeWithNull" {
/// Converts a UTF-8 string literal into a UTF-16LE string literal.
pub fn utf8ToUtf16LeStringLiteral(comptime utf8: []const u8) *const [calcUtf16LeLen(utf8) catch unreachable:0]u16 {
comptime {
return comptime blk: {
const len: usize = calcUtf16LeLen(utf8) catch |err| @compileError(err);
var utf16le: [len:0]u16 = [_:0]u16{0} ** len;
const utf16le_len = utf8ToUtf16Le(&utf16le, utf8[0..]) catch |err| @compileError(err);
assert(len == utf16le_len);
return &utf16le;
}
break :blk &utf16le;
};
}
const CalcUtf16LeLenError = Utf8DecodeError || error{Utf8InvalidStartByte};

View file

@ -147,7 +147,9 @@ test "cpuinfo: PowerPC" {
}
const ArmCpuinfoImpl = struct {
cores: [4]CoreInfo = undefined,
const num_cores = 4;
cores: [num_cores]CoreInfo = undefined,
core_no: usize = 0,
have_fields: usize = 0,
@ -162,7 +164,7 @@ const ArmCpuinfoImpl = struct {
const cpu_models = @import("arm.zig").cpu_models;
fn addOne(self: *ArmCpuinfoImpl) void {
if (self.have_fields == 4 and self.core_no < self.cores.len) {
if (self.have_fields == 4 and self.core_no < num_cores) {
if (self.core_no > 0) {
// Deduplicate the core info.
for (self.cores[0..self.core_no]) |it| {
@ -222,7 +224,7 @@ const ArmCpuinfoImpl = struct {
else => false,
};
var known_models: [self.cores.len]?*const Target.Cpu.Model = undefined;
var known_models: [num_cores]?*const Target.Cpu.Model = undefined;
for (self.cores[0..self.core_no], 0..) |core, i| {
known_models[i] = cpu_models.isKnown(.{
.architecture = core.architecture,

View file

@ -137,7 +137,7 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
var gz_instructions: std.ArrayListUnmanaged(Zir.Inst.Index) = .{};
var gen_scope: GenZir = .{
.force_comptime = true,
.is_comptime = true,
.parent = &top_scope.base,
.anon_name_strategy = .parent,
.decl_node_index = 0,
@ -362,11 +362,7 @@ const type_ri: ResultInfo = .{ .rl = .{ .ty = .type_type } };
const coerced_type_ri: ResultInfo = .{ .rl = .{ .coerced_ty = .type_type } };
fn typeExpr(gz: *GenZir, scope: *Scope, type_node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
const prev_force_comptime = gz.force_comptime;
gz.force_comptime = true;
defer gz.force_comptime = prev_force_comptime;
return expr(gz, scope, coerced_type_ri, type_node);
return comptimeExpr(gz, scope, coerced_type_ri, type_node);
}
fn reachableTypeExpr(
@ -375,11 +371,7 @@ fn reachableTypeExpr(
type_node: Ast.Node.Index,
reachable_node: Ast.Node.Index,
) InnerError!Zir.Inst.Ref {
const prev_force_comptime = gz.force_comptime;
gz.force_comptime = true;
defer gz.force_comptime = prev_force_comptime;
return reachableExpr(gz, scope, coerced_type_ri, type_node, reachable_node);
return reachableExprComptime(gz, scope, coerced_type_ri, type_node, reachable_node, true);
}
/// Same as `expr` but fails with a compile error if the result type is `noreturn`.
@ -401,11 +393,11 @@ fn reachableExprComptime(
reachable_node: Ast.Node.Index,
force_comptime: bool,
) InnerError!Zir.Inst.Ref {
const prev_force_comptime = gz.force_comptime;
gz.force_comptime = prev_force_comptime or force_comptime;
defer gz.force_comptime = prev_force_comptime;
const result_inst = if (force_comptime)
try comptimeExpr(gz, scope, ri, node)
else
try expr(gz, scope, ri, node);
const result_inst = try expr(gz, scope, ri, node);
if (gz.refIsNoReturn(result_inst)) {
try gz.astgen.appendErrorNodeNotes(reachable_node, "unreachable code", .{}, &[_]u32{
try gz.astgen.errNoteNode(node, "control flow is diverted here", .{}),
@ -825,7 +817,6 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
_ = try gz.addAsIndex(.{
.tag = .@"unreachable",
.data = .{ .@"unreachable" = .{
.force_comptime = gz.force_comptime,
.src_node = gz.nodeIndexToRelative(node),
} },
});
@ -1578,12 +1569,7 @@ fn arrayInitExprRlPtrInner(
_ = try expr(gz, scope, .{ .rl = .{ .ptr = .{ .inst = elem_ptr } } }, elem_init);
}
const tag: Zir.Inst.Tag = if (gz.force_comptime)
.validate_array_init_comptime
else
.validate_array_init;
_ = try gz.addPlNodePayloadIndex(tag, node, payload_index);
_ = try gz.addPlNodePayloadIndex(.validate_array_init, node, payload_index);
return .void_value;
}
@ -1800,12 +1786,7 @@ fn structInitExprRlPtrInner(
_ = try expr(gz, scope, .{ .rl = .{ .ptr = .{ .inst = field_ptr } } }, field_init);
}
const tag: Zir.Inst.Tag = if (gz.force_comptime)
.validate_struct_init_comptime
else
.validate_struct_init;
_ = try gz.addPlNodePayloadIndex(tag, node, payload_index);
_ = try gz.addPlNodePayloadIndex(.validate_struct_init, node, payload_index);
return Zir.Inst.Ref.void_value;
}
@ -1843,23 +1824,105 @@ fn structInitExprRlTy(
return try gz.addPlNodePayloadIndex(tag, node, payload_index);
}
/// This calls expr in a comptime scope, and is intended to be called as a helper function.
/// The one that corresponds to `comptime` expression syntax is `comptimeExprAst`.
/// This explicitly calls expr in a comptime scope by wrapping it in a `block_comptime` if
/// necessary. It should be used whenever we need to force compile-time evaluation of something,
/// such as a type.
/// The function corresponding to `comptime` expression syntax is `comptimeExprAst`.
fn comptimeExpr(
gz: *GenZir,
scope: *Scope,
ri: ResultInfo,
node: Ast.Node.Index,
) InnerError!Zir.Inst.Ref {
const prev_force_comptime = gz.force_comptime;
gz.force_comptime = true;
defer gz.force_comptime = prev_force_comptime;
if (gz.is_comptime) {
// No need to change anything!
return expr(gz, scope, ri, node);
}
// There's an optimization here: if the body will be evaluated at comptime regardless, there's
// no need to wrap it in a block. This is hard to determine in general, but we can identify a
// common subset of trivially comptime expressions to take down the size of the ZIR a bit.
const tree = gz.astgen.tree;
const main_tokens = tree.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag);
switch (node_tags[node]) {
// Any identifier in `primitive_instrs` is trivially comptime. In particular, this includes
// some common types, so we can elide `block_comptime` for a few common type annotations.
.identifier => {
const ident_token = main_tokens[node];
const ident_name_raw = tree.tokenSlice(ident_token);
if (primitive_instrs.get(ident_name_raw)) |zir_const_ref| {
// No need to worry about result location here, we're not creating a comptime block!
return rvalue(gz, ri, zir_const_ref, node);
}
},
// We can also avoid the block for a few trivial AST tags which are always comptime-known.
.number_literal, .string_literal, .multiline_string_literal, .enum_literal, .error_value => {
// No need to worry about result location here, we're not creating a comptime block!
return expr(gz, scope, ri, node);
},
// Lastly, for labelled blocks, avoid emitting a labelled block directly inside this
// comptime block, because that would be silly! Note that we don't bother doing this for
// unlabelled blocks, since they don't generate blocks at comptime anyway (see `blockExpr`).
.block_two, .block_two_semicolon, .block, .block_semicolon => {
const token_tags = tree.tokens.items(.tag);
const lbrace = main_tokens[node];
if (token_tags[lbrace - 1] == .colon and
token_tags[lbrace - 2] == .identifier)
{
const node_datas = tree.nodes.items(.data);
switch (node_tags[node]) {
.block_two, .block_two_semicolon => {
const stmts: [2]Ast.Node.Index = .{ node_datas[node].lhs, node_datas[node].rhs };
const stmt_slice = if (stmts[0] == 0)
stmts[0..0]
else if (stmts[1] == 0)
stmts[0..1]
else
stmts[0..2];
// Careful! We can't pass in the real result location here, since it may
// refer to runtime memory. A runtime-to-comptime boundary has to remove
// result location information, compute the result, and copy it to the true
// result location at runtime. We do this below as well.
const block_ref = try labeledBlockExpr(gz, scope, .{ .rl = .none }, node, stmt_slice, true);
return rvalue(gz, ri, block_ref, node);
},
.block, .block_semicolon => {
const stmts = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs];
// Replace result location and copy back later - see above.
const block_ref = try labeledBlockExpr(gz, scope, .{ .rl = .none }, node, stmts, true);
return rvalue(gz, ri, block_ref, node);
},
else => unreachable,
}
}
},
// In other cases, we don't optimize anything - we need a wrapper comptime block.
else => {},
}
var block_scope = gz.makeSubBlock(scope);
block_scope.is_comptime = true;
defer block_scope.unstack();
const block_inst = try gz.makeBlockInst(.block_comptime, node);
// Replace result location and copy back later - see above.
const block_result = try expr(&block_scope, scope, .{ .rl = .none }, node);
if (!gz.refIsNoReturn(block_result)) {
_ = try block_scope.addBreak(.@"break", block_inst, block_result);
}
try block_scope.setBlockBody(block_inst);
try gz.instructions.append(gz.astgen.gpa, block_inst);
return rvalue(gz, ri, indexToRef(block_inst), node);
}
/// This one is for an actual `comptime` syntax, and will emit a compile error if
/// the scope already has `force_comptime=true`.
/// the scope is already known to be comptime-evaluated.
/// See `comptimeExpr` for the helper function for calling expr in a comptime scope.
fn comptimeExprAst(
gz: *GenZir,
@ -1868,16 +1931,13 @@ fn comptimeExprAst(
node: Ast.Node.Index,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
if (gz.force_comptime) {
if (gz.is_comptime) {
return astgen.failNode(node, "redundant comptime keyword in already comptime scope", .{});
}
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
const body_node = node_datas[node].lhs;
gz.force_comptime = true;
const result = try expr(gz, scope, ri, body_node);
gz.force_comptime = false;
return result;
return comptimeExpr(gz, scope, ri, body_node);
}
/// Restore the error return trace index. Performs the restore only if the result is a non-error or
@ -1961,7 +2021,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
};
// If we made it here, this block is the target of the break expr
const break_tag: Zir.Inst.Tag = if (block_gz.is_inline or block_gz.force_comptime)
const break_tag: Zir.Inst.Tag = if (block_gz.is_inline)
.break_inline
else
.@"break";
@ -1973,7 +2033,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
try genDefers(parent_gz, scope, parent_scope, .normal_only);
// As our last action before the break, "pop" the error trace if needed
if (!block_gz.force_comptime)
if (!block_gz.is_comptime)
_ = try parent_gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always);
_ = try parent_gz.addBreak(break_tag, block_inst, .void_value);
@ -1986,7 +2046,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
try genDefers(parent_gz, scope, parent_scope, .normal_only);
// As our last action before the break, "pop" the error trace if needed
if (!block_gz.force_comptime)
if (!block_gz.is_comptime)
try restoreErrRetIndex(parent_gz, .{ .block = block_inst }, block_gz.break_result_info, rhs, operand);
switch (block_gz.break_result_info.rl) {
@ -2062,7 +2122,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
continue;
}
const break_tag: Zir.Inst.Tag = if (gen_zir.is_inline or gen_zir.force_comptime)
const break_tag: Zir.Inst.Tag = if (gen_zir.is_inline)
.break_inline
else
.@"break";
@ -2071,7 +2131,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
}
// As our last action before the continue, "pop" the error trace if needed
if (!gen_zir.force_comptime)
if (!gen_zir.is_comptime)
_ = try parent_gz.addRestoreErrRetIndex(.{ .block = continue_block }, .always);
_ = try parent_gz.addBreak(break_tag, continue_block, .void_value);
@ -2116,10 +2176,10 @@ fn blockExpr(
if (token_tags[lbrace - 1] == .colon and
token_tags[lbrace - 2] == .identifier)
{
return labeledBlockExpr(gz, scope, ri, block_node, statements);
return labeledBlockExpr(gz, scope, ri, block_node, statements, false);
}
if (!gz.force_comptime) {
if (!gz.is_comptime) {
// Since this block is unlabeled, its control flow is effectively linear and we
// can *almost* get away with inlining the block here. However, we actually need
// to preserve the .block for Sema, to properly pop the error return trace.
@ -2136,9 +2196,7 @@ fn blockExpr(
if (!block_scope.endsWithNoReturn()) {
// As our last action before the break, "pop" the error trace if needed
_ = try gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always);
const break_tag: Zir.Inst.Tag = if (block_scope.force_comptime) .break_inline else .@"break";
_ = try block_scope.addBreak(break_tag, block_inst, .void_value);
_ = try block_scope.addBreak(.@"break", block_inst, .void_value);
}
try block_scope.setBlockBody(block_inst);
@ -2188,6 +2246,7 @@ fn labeledBlockExpr(
ri: ResultInfo,
block_node: Ast.Node.Index,
statements: []const Ast.Node.Index,
force_comptime: bool,
) InnerError!Zir.Inst.Ref {
const tracy = trace(@src());
defer tracy.end();
@ -2205,16 +2264,16 @@ fn labeledBlockExpr(
// Reserve the Block ZIR instruction index so that we can put it into the GenZir struct
// so that break statements can reference it.
const block_tag: Zir.Inst.Tag = if (gz.force_comptime) .block_inline else .block;
const block_tag: Zir.Inst.Tag = if (force_comptime) .block_comptime else .block;
const block_inst = try gz.makeBlockInst(block_tag, block_node);
try gz.instructions.append(astgen.gpa, block_inst);
var block_scope = gz.makeSubBlock(parent_scope);
block_scope.label = GenZir.Label{
.token = label_token,
.block_inst = block_inst,
};
block_scope.setBreakResultInfo(ri);
if (force_comptime) block_scope.is_comptime = true;
defer block_scope.unstack();
defer block_scope.labeled_breaks.deinit(astgen.gpa);
@ -2222,9 +2281,7 @@ fn labeledBlockExpr(
if (!block_scope.endsWithNoReturn()) {
// As our last action before the return, "pop" the error trace if needed
_ = try gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always);
const break_tag: Zir.Inst.Tag = if (block_scope.force_comptime) .break_inline else .@"break";
_ = try block_scope.addBreak(break_tag, block_inst, .void_value);
_ = try block_scope.addBreak(.@"break", block_inst, .void_value);
}
if (!block_scope.label.?.used) {
@ -2436,6 +2493,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.bitcast,
.bit_or,
.block,
.block_comptime,
.block_inline,
.suspend_block,
.loop,
@ -2610,8 +2668,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.for_len,
.@"try",
.try_ptr,
//.try_inline,
//.try_ptr_inline,
=> break :b false,
.extended => switch (gz.astgen.instructions.items(.data)[inst].extended.opcode) {
@ -2638,7 +2694,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.repeat,
.repeat_inline,
.panic,
.panic_comptime,
.trap,
.check_comptime_control_flow,
=> {
@ -2665,9 +2720,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.store_to_inferred_ptr,
.resolve_inferred_alloc,
.validate_struct_init,
.validate_struct_init_comptime,
.validate_array_init,
.validate_array_init_comptime,
.set_runtime_safety,
.closure_capture,
.memcpy,
@ -2988,7 +3041,7 @@ fn varDecl(
return &sub_scope.base;
}
const is_comptime = gz.force_comptime or
const is_comptime = gz.is_comptime or
tree.nodes.items(.tag)[var_decl.ast.init_node] == .@"comptime";
// Detect whether the initialization expression actually uses the
@ -3133,7 +3186,7 @@ fn varDecl(
const old_rl_ty_inst = gz.rl_ty_inst;
defer gz.rl_ty_inst = old_rl_ty_inst;
const is_comptime = var_decl.comptime_token != null or gz.force_comptime;
const is_comptime = var_decl.comptime_token != null or gz.is_comptime;
var resolve_inferred_alloc: Zir.Inst.Ref = .none;
const var_data: struct {
result_info: ResultInfo,
@ -3211,7 +3264,7 @@ fn emitDbgNode(gz: *GenZir, node: Ast.Node.Index) !void {
// The instruction emitted here is for debugging runtime code.
// If the current block will be evaluated only during semantic analysis
// then no dbg_stmt ZIR instruction is needed.
if (gz.force_comptime) return;
if (gz.is_comptime) return;
const astgen = gz.astgen;
astgen.advanceSourceCursorToNode(node);
@ -3631,7 +3684,7 @@ fn fnDecl(
astgen.advanceSourceCursorToNode(decl_node);
var decl_gz: GenZir = .{
.force_comptime = true,
.is_comptime = true,
.decl_node_index = fn_proto.ast.proto_node,
.decl_line = astgen.source_line,
.parent = scope,
@ -3642,7 +3695,7 @@ fn fnDecl(
defer decl_gz.unstack();
var fn_gz: GenZir = .{
.force_comptime = false,
.is_comptime = false,
.decl_node_index = fn_proto.ast.proto_node,
.decl_line = decl_gz.decl_line,
.parent = &decl_gz.base,
@ -4005,7 +4058,7 @@ fn globalVarDecl(
.decl_node_index = node,
.decl_line = astgen.source_line,
.astgen = astgen,
.force_comptime = true,
.is_comptime = true,
.anon_name_strategy = .parent,
.instructions = gz.instructions,
.instructions_top = gz.instructions.items.len,
@ -4156,7 +4209,7 @@ fn comptimeDecl(
astgen.advanceSourceCursorToNode(node);
var decl_block: GenZir = .{
.force_comptime = true,
.is_comptime = true,
.decl_node_index = node,
.decl_line = astgen.source_line,
.parent = scope,
@ -4210,7 +4263,7 @@ fn usingnamespaceDecl(
astgen.advanceSourceCursorToNode(node);
var decl_block: GenZir = .{
.force_comptime = true,
.is_comptime = true,
.decl_node_index = node,
.decl_line = astgen.source_line,
.parent = scope,
@ -4257,7 +4310,7 @@ fn testDecl(
astgen.advanceSourceCursorToNode(node);
var decl_block: GenZir = .{
.force_comptime = true,
.is_comptime = true,
.decl_node_index = node,
.decl_line = astgen.source_line,
.parent = scope,
@ -4353,7 +4406,7 @@ fn testDecl(
};
var fn_block: GenZir = .{
.force_comptime = false,
.is_comptime = false,
.decl_node_index = node,
.decl_line = decl_block.decl_line,
.parent = &decl_block.base,
@ -4477,7 +4530,7 @@ fn structDeclInner(
.decl_node_index = node,
.decl_line = gz.decl_line,
.astgen = astgen,
.force_comptime = true,
.is_comptime = true,
.instructions = gz.instructions,
.instructions_top = gz.instructions.items.len,
};
@ -4720,7 +4773,7 @@ fn unionDeclInner(
.decl_node_index = node,
.decl_line = gz.decl_line,
.astgen = astgen,
.force_comptime = true,
.is_comptime = true,
.instructions = gz.instructions,
.instructions_top = gz.instructions.items.len,
};
@ -5006,7 +5059,7 @@ fn containerDecl(
.decl_node_index = node,
.decl_line = gz.decl_line,
.astgen = astgen,
.force_comptime = true,
.is_comptime = true,
.instructions = gz.instructions,
.instructions_top = gz.instructions.items.len,
};
@ -5115,7 +5168,7 @@ fn containerDecl(
.decl_node_index = node,
.decl_line = gz.decl_line,
.astgen = astgen,
.force_comptime = true,
.is_comptime = true,
.instructions = gz.instructions,
.instructions_top = gz.instructions.items.len,
};
@ -5304,7 +5357,7 @@ fn tryExpr(
// Then we will save the line/column so that we can emit another one that goes
// "backwards" because we want to evaluate the operand, but then put the debug
// info back at the try keyword for error return tracing.
if (!parent_gz.force_comptime) {
if (!parent_gz.is_comptime) {
try emitDbgNode(parent_gz, node);
}
const try_line = astgen.source_line - parent_gz.decl_line;
@ -5316,17 +5369,7 @@ fn tryExpr(
};
// This could be a pointer or value depending on the `ri` parameter.
const operand = try reachableExpr(parent_gz, scope, operand_ri, operand_node, node);
const is_inline = parent_gz.force_comptime;
const is_inline_bit = @as(u2, @boolToInt(is_inline));
const is_ptr_bit = @as(u2, @boolToInt(operand_ri.rl == .ref)) << 1;
const block_tag: Zir.Inst.Tag = switch (is_inline_bit | is_ptr_bit) {
0b00 => .@"try",
0b01 => .@"try",
//0b01 => .try_inline,
0b10 => .try_ptr,
0b11 => .try_ptr,
//0b11 => .try_ptr_inline,
};
const block_tag: Zir.Inst.Tag = if (operand_ri.rl == .ref) .try_ptr else .@"try";
const try_inst = try parent_gz.makeBlockInst(block_tag, node);
try parent_gz.instructions.append(astgen.gpa, try_inst);
@ -5382,11 +5425,9 @@ fn orelseCatchExpr(
// up for this fact by calling rvalue on the else branch.
const operand = try reachableExpr(&block_scope, &block_scope.base, operand_ri, lhs, rhs);
const cond = try block_scope.addUnNode(cond_op, operand, node);
const condbr_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .condbr_inline else .condbr;
const condbr = try block_scope.addCondBr(condbr_tag, node);
const condbr = try block_scope.addCondBr(.condbr, node);
const block_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .block_inline else .block;
const block = try parent_gz.makeBlockInst(block_tag, node);
const block = try parent_gz.makeBlockInst(.block, node);
try block_scope.setBlockBody(block);
// block_scope unstacked now, can add new instructions to parent_gz
try parent_gz.instructions.append(astgen.gpa, block);
@ -5445,7 +5486,6 @@ fn orelseCatchExpr(
// instructions into place until we know whether to keep store_to_block_ptr
// instructions or not.
const break_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .break_inline else .@"break";
const result = try finishThenElseBlock(
parent_gz,
ri,
@ -5461,7 +5501,7 @@ fn orelseCatchExpr(
rhs,
block,
block,
break_tag,
.@"break",
);
return result;
}
@ -5747,11 +5787,9 @@ fn ifExpr(
}
};
const condbr_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .condbr_inline else .condbr;
const condbr = try block_scope.addCondBr(condbr_tag, node);
const condbr = try block_scope.addCondBr(.condbr, node);
const block_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .block_inline else .block;
const block = try parent_gz.makeBlockInst(block_tag, node);
const block = try parent_gz.makeBlockInst(.block, node);
try block_scope.setBlockBody(block);
// block_scope unstacked now, can add new instructions to parent_gz
try parent_gz.instructions.append(astgen.gpa, block);
@ -5891,7 +5929,6 @@ fn ifExpr(
},
};
const break_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .break_inline else .@"break";
const result = try finishThenElseBlock(
parent_gz,
ri,
@ -5907,7 +5944,7 @@ fn ifExpr(
else_info.src,
block,
block,
break_tag,
.@"break",
);
return result;
}
@ -6043,7 +6080,7 @@ fn whileExpr(
try astgen.checkLabelRedefinition(scope, label_token);
}
const is_inline = parent_gz.force_comptime or while_full.inline_token != null;
const is_inline = while_full.inline_token != null;
const loop_tag: Zir.Inst.Tag = if (is_inline) .block_inline else .loop;
const loop_block = try parent_gz.makeBlockInst(loop_tag, node);
try parent_gz.instructions.append(astgen.gpa, loop_block);
@ -6315,7 +6352,7 @@ fn forExpr(
try astgen.checkLabelRedefinition(scope, label_token);
}
const is_inline = parent_gz.force_comptime or for_full.inline_token != null;
const is_inline = for_full.inline_token != null;
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
const node_tags = tree.nodes.items(.tag);
@ -7114,7 +7151,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
// Then we will save the line/column so that we can emit another one that goes
// "backwards" because we want to evaluate the operand, but then put the debug
// info back at the return keyword for error return tracing.
if (!gz.force_comptime) {
if (!gz.is_comptime) {
try emitDbgNode(gz, node);
}
const ret_line = astgen.source_line - gz.decl_line;
@ -7859,7 +7896,7 @@ fn typeOf(
const typeof_inst = try gz.makeBlockInst(.typeof_builtin, node);
var typeof_scope = gz.makeSubBlock(scope);
typeof_scope.force_comptime = false;
typeof_scope.is_comptime = false;
typeof_scope.c_import = false;
defer typeof_scope.unstack();
@ -7880,7 +7917,7 @@ fn typeOf(
const typeof_inst = try gz.addExtendedMultiOpPayloadIndex(.typeof_peer, payload_index, args.len);
var typeof_scope = gz.makeSubBlock(scope);
typeof_scope.force_comptime = false;
typeof_scope.is_comptime = false;
for (args, 0..) |arg, i| {
const param_ref = try reachableExpr(&typeof_scope, &typeof_scope.base, .{ .rl = .none }, arg, node);
@ -8207,7 +8244,7 @@ fn builtinCall(
},
.panic => {
try emitDbgNode(gz, node);
return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], if (gz.force_comptime) .panic_comptime else .panic);
return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[0], .panic);
},
.trap => {
try emitDbgNode(gz, node);
@ -8431,7 +8468,6 @@ fn builtinCall(
.args = args,
.flags = .{
.is_nosuspend = gz.nosuspend_node != 0,
.is_comptime = gz.force_comptime,
.ensure_result_used = false,
},
});
@ -8644,15 +8680,14 @@ fn simpleUnOp(
operand_node: Ast.Node.Index,
tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref {
const prev_force_comptime = gz.force_comptime;
defer gz.force_comptime = prev_force_comptime;
switch (tag) {
.tag_name, .error_name, .ptr_to_int => try emitDbgNode(gz, node),
.compile_error => gz.force_comptime = true,
else => {},
}
const operand = try expr(gz, scope, operand_ri, operand_node);
const operand = if (tag == .compile_error)
try comptimeExpr(gz, scope, operand_ri, operand_node)
else
try expr(gz, scope, operand_ri, operand_node);
const result = try gz.addUnNode(tag, operand, node);
return rvalue(gz, ri, result, node);
}
@ -8814,7 +8849,7 @@ fn cImport(
if (gz.c_import) return gz.astgen.failNode(node, "cannot nest @cImport", .{});
var block_scope = gz.makeSubBlock(scope);
block_scope.force_comptime = true;
block_scope.is_comptime = true;
block_scope.c_import = true;
defer block_scope.unstack();
@ -8860,7 +8895,7 @@ fn callExpr(
const callee = try calleeExpr(gz, scope, call.ast.fn_expr);
const modifier: std.builtin.CallModifier = blk: {
if (gz.force_comptime) {
if (gz.is_comptime) {
break :blk .compile_time;
}
if (call.async_token != null) {
@ -10875,7 +10910,10 @@ const Scope = struct {
const GenZir = struct {
const base_tag: Scope.Tag = .gen_zir;
base: Scope = Scope{ .tag = base_tag },
force_comptime: bool,
/// Whether we're already in a scope known to be comptime. This is set
/// whenever we know Sema will analyze the current block with `is_comptime`,
/// for instance when we're within a `struct_decl` or a `block_comptime`.
is_comptime: bool,
/// This is set to true for inline loops; false otherwise.
is_inline: bool = false,
c_import: bool = false,
@ -10962,7 +11000,7 @@ const GenZir = struct {
fn makeSubBlock(gz: *GenZir, scope: *Scope) GenZir {
return .{
.force_comptime = gz.force_comptime,
.is_comptime = gz.is_comptime,
.c_import = gz.c_import,
.decl_node_index = gz.decl_node_index,
.decl_line = gz.decl_line,
@ -12405,7 +12443,7 @@ const GenZir = struct {
}
fn addDbgVar(gz: *GenZir, tag: Zir.Inst.Tag, name: u32, inst: Zir.Inst.Ref) !void {
if (gz.force_comptime) return;
if (gz.is_comptime) return;
_ = try gz.add(.{ .tag = tag, .data = .{
.str_op = .{
@ -12416,13 +12454,13 @@ const GenZir = struct {
}
fn addDbgBlockBegin(gz: *GenZir) !void {
if (gz.force_comptime) return;
if (gz.is_comptime) return;
_ = try gz.add(.{ .tag = .dbg_block_begin, .data = undefined });
}
fn addDbgBlockEnd(gz: *GenZir) !void {
if (gz.force_comptime) return;
if (gz.is_comptime) return;
const gpa = gz.astgen.gpa;
const tags = gz.astgen.instructions.items(.tag);
@ -12554,7 +12592,7 @@ fn detectLocalShadowing(
/// Advances the source cursor to the main token of `node` if not in comptime scope.
/// Usually paired with `emitDbgStmt`.
fn maybeAdvanceSourceCursorToMainToken(gz: *GenZir, node: Ast.Node.Index) void {
if (gz.force_comptime) return;
if (gz.is_comptime) return;
const tree = gz.astgen.tree;
const token_starts = tree.tokens.items(.start);
@ -12765,7 +12803,7 @@ fn countBodyLenAfterFixups(astgen: *AstGen, body: []const Zir.Inst.Index) u32 {
}
fn emitDbgStmt(gz: *GenZir, line: u32, column: u32) !void {
if (gz.force_comptime) return;
if (gz.is_comptime) return;
_ = try gz.add(.{ .tag = .dbg_stmt, .data = .{
.dbg_stmt = .{

View file

@ -1112,8 +1112,7 @@ fn analyzeBodyInner(
.ret_load => break sema.zirRetLoad(block, inst),
.ret_err_value => break sema.zirRetErrValue(block, inst),
.@"unreachable" => break sema.zirUnreachable(block, inst),
.panic => break sema.zirPanic(block, inst, false),
.panic_comptime => break sema.zirPanic(block, inst, true),
.panic => break sema.zirPanic(block, inst),
.trap => break sema.zirTrap(block, inst),
// zig fmt: on
@ -1292,22 +1291,12 @@ fn analyzeBodyInner(
continue;
},
.validate_struct_init => {
try sema.zirValidateStructInit(block, inst, false);
i += 1;
continue;
},
.validate_struct_init_comptime => {
try sema.zirValidateStructInit(block, inst, true);
try sema.zirValidateStructInit(block, inst);
i += 1;
continue;
},
.validate_array_init => {
try sema.zirValidateArrayInit(block, inst, false);
i += 1;
continue;
},
.validate_array_init_comptime => {
try sema.zirValidateArrayInit(block, inst, true);
try sema.zirValidateArrayInit(block, inst);
i += 1;
continue;
},
@ -1464,8 +1453,10 @@ fn analyzeBodyInner(
break break_data.inst;
}
},
.block => blk: {
if (!block.is_comptime) break :blk try sema.zirBlock(block, inst);
.block, .block_comptime => blk: {
if (!block.is_comptime) {
break :blk try sema.zirBlock(block, inst, tags[inst] == .block_comptime);
}
// Same as `block_inline`. TODO https://github.com/ziglang/zig/issues/8220
const inst_data = datas[inst].pl_node;
const extra = sema.code.extraData(Zir.Inst.Block, inst_data.payload_index);
@ -1649,38 +1640,6 @@ fn analyzeBodyInner(
break break_data.inst;
}
},
//.try_inline => blk: {
// const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
// const src = inst_data.src();
// const operand_src: LazySrcLoc = .{ .node_offset_bin_lhs = inst_data.src_node };
// const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
// const inline_body = sema.code.extra[extra.end..][0..extra.data.body_len];
// const operand = try sema.resolveInst(extra.data.operand);
// const operand_ty = sema.typeOf(operand);
// const is_ptr = operand_ty.zigTypeTag() == .Pointer;
// const err_union = if (is_ptr)
// try sema.analyzeLoad(block, src, operand, operand_src)
// else
// operand;
// const is_non_err = try sema.analyzeIsNonErrComptimeOnly(block, operand_src, err_union);
// assert(is_non_err != .none);
// const is_non_err_tv = try sema.resolveInstConst(block, operand_src, is_non_err);
// if (is_non_err_tv.val.toBool()) {
// if (is_ptr) {
// break :blk try sema.analyzeErrUnionPayloadPtr(block, src, operand, false, false);
// } else {
// const err_union_ty = sema.typeOf(err_union);
// break :blk try sema.analyzeErrUnionPayload(block, src, err_union_ty, operand, operand_src, false);
// }
// }
// const break_data = (try sema.analyzeBodyBreak(block, inline_body)) orelse
// break always_noreturn;
// if (inst == break_data.block_inst) {
// break :blk try sema.resolveInst(break_data.operand);
// } else {
// break break_data.inst;
// }
//},
.try_ptr => blk: {
if (!block.is_comptime) break :blk try sema.zirTryPtr(block, inst);
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
@ -1707,28 +1666,6 @@ fn analyzeBodyInner(
break break_data.inst;
}
},
//.try_ptr_inline => blk: {
// const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
// const src = inst_data.src();
// const operand_src: LazySrcLoc = .{ .node_offset_bin_lhs = inst_data.src_node };
// const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
// const inline_body = sema.code.extra[extra.end..][0..extra.data.body_len];
// const operand = try sema.resolveInst(extra.data.operand);
// const err_union = try sema.analyzeLoad(block, src, operand, operand_src);
// const is_non_err = try sema.analyzeIsNonErrComptimeOnly(block, operand_src, err_union);
// assert(is_non_err != .none);
// const is_non_err_tv = try sema.resolveInstConst(block, operand_src, is_non_err);
// if (is_non_err_tv.val.toBool()) {
// break :blk try sema.analyzeErrUnionPayloadPtr(block, src, operand, false, false);
// }
// const break_data = (try sema.analyzeBodyBreak(block, inline_body)) orelse
// break always_noreturn;
// if (inst == break_data.block_inst) {
// break :blk try sema.resolveInst(break_data.operand);
// } else {
// break break_data.inst;
// }
//},
.@"defer" => blk: {
const inst_data = sema.code.instructions.items(.data)[inst].@"defer";
const defer_body = sema.code.extra[inst_data.index..][0..inst_data.len];
@ -4175,7 +4112,6 @@ fn zirValidateStructInit(
sema: *Sema,
block: *Block,
inst: Zir.Inst.Index,
is_comptime: bool,
) CompileError!void {
const tracy = trace(@src());
defer tracy.end();
@ -4194,7 +4130,6 @@ fn zirValidateStructInit(
agg_ty,
init_src,
instrs,
is_comptime,
),
.Union => return sema.validateUnionInit(
block,
@ -4202,7 +4137,6 @@ fn zirValidateStructInit(
init_src,
instrs,
object_ptr,
is_comptime,
),
else => unreachable,
}
@ -4215,7 +4149,6 @@ fn validateUnionInit(
init_src: LazySrcLoc,
instrs: []const Zir.Inst.Index,
union_ptr: Air.Inst.Ref,
is_comptime: bool,
) CompileError!void {
if (instrs.len != 1) {
const msg = msg: {
@ -4238,7 +4171,7 @@ fn validateUnionInit(
return sema.failWithOwnedErrorMsg(msg);
}
if ((is_comptime or block.is_comptime) and
if (block.is_comptime and
(try sema.resolveDefinedValue(block, init_src, union_ptr)) != null)
{
// In this case, comptime machinery already did everything. No work to do here.
@ -4342,7 +4275,6 @@ fn validateStructInit(
struct_ty: Type,
init_src: LazySrcLoc,
instrs: []const Zir.Inst.Index,
is_comptime: bool,
) CompileError!void {
const gpa = sema.gpa;
@ -4382,7 +4314,7 @@ fn validateStructInit(
errdefer if (root_msg) |msg| msg.destroy(sema.gpa);
const struct_ptr = try sema.resolveInst(struct_ptr_zir_ref);
if ((is_comptime or block.is_comptime) and
if (block.is_comptime and
(try sema.resolveDefinedValue(block, init_src, struct_ptr)) != null)
{
try sema.resolveStructLayout(struct_ty);
@ -4606,7 +4538,6 @@ fn zirValidateArrayInit(
sema: *Sema,
block: *Block,
inst: Zir.Inst.Index,
is_comptime: bool,
) CompileError!void {
const validate_inst = sema.code.instructions.items(.data)[inst].pl_node;
const init_src = validate_inst.src();
@ -4654,7 +4585,7 @@ fn zirValidateArrayInit(
else => unreachable,
};
if ((is_comptime or block.is_comptime) and
if (block.is_comptime and
(try sema.resolveDefinedValue(block, init_src, array_ptr)) != null)
{
// In this case the comptime machinery will have evaluated the store instructions
@ -5222,12 +5153,12 @@ fn zirCompileLog(
return Air.Inst.Ref.void_value;
}
fn zirPanic(sema: *Sema, block: *Block, inst: Zir.Inst.Index, force_comptime: bool) CompileError!Zir.Inst.Index {
fn zirPanic(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Zir.Inst.Index {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src = inst_data.src();
const msg_inst = try sema.resolveInst(inst_data.operand);
if (block.is_comptime or force_comptime) {
if (block.is_comptime) {
return sema.fail(block, src, "encountered @panic at comptime", .{});
}
try sema.panicWithMsg(block, src, msg_inst);
@ -5441,7 +5372,7 @@ fn zirSuspendBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) Comp
return sema.failWithUseOfAsync(parent_block, src);
}
fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index, force_comptime: bool) CompileError!Air.Inst.Ref {
const tracy = trace(@src());
defer tracy.end();
@ -5479,7 +5410,7 @@ fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErro
.instructions = .{},
.label = &label,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
.is_comptime = parent_block.is_comptime or force_comptime,
.comptime_reason = parent_block.comptime_reason,
.is_typeof = parent_block.is_typeof,
.want_safety = parent_block.want_safety,
@ -17140,7 +17071,7 @@ fn zirUnreachable(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
const inst_data = sema.code.instructions.items(.data)[inst].@"unreachable";
const src = inst_data.src();
if (block.is_comptime or inst_data.force_comptime) {
if (block.is_comptime) {
return sema.fail(block, src, "reached unreachable code", .{});
}
// TODO Add compile error for @optimizeFor occurring too late in a scope.
@ -17410,6 +17341,8 @@ fn analyzeRet(
try inlining.merges.results.append(sema.gpa, operand);
_ = try block.addBr(inlining.merges.block_inst, operand);
return always_noreturn;
} else if (block.is_comptime) {
return sema.fail(block, src, "function called at runtime cannot return value at comptime", .{});
}
try sema.resolveTypeLayout(sema.fn_ret_ty);
@ -17422,6 +17355,7 @@ fn analyzeRet(
}
_ = try block.addUnOp(.ret, operand);
return always_noreturn;
}
@ -21560,7 +21494,7 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
switch (modifier) {
// These can be upgraded to comptime or nosuspend calls.
.auto, .never_tail, .no_async => {
if (extra.flags.is_comptime) {
if (block.is_comptime) {
if (modifier == .never_tail) {
return sema.fail(block, modifier_src, "unable to perform 'never_tail' call at compile-time", .{});
}
@ -21575,12 +21509,12 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
return sema.fail(block, func_src, "modifier '{s}' requires a comptime-known function", .{@tagName(modifier)});
};
if (extra.flags.is_comptime) {
if (block.is_comptime) {
modifier = .compile_time;
}
},
.always_tail => {
if (extra.flags.is_comptime) {
if (block.is_comptime) {
modifier = .compile_time;
}
},
@ -21588,12 +21522,12 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
if (extra.flags.is_nosuspend) {
return sema.fail(block, modifier_src, "modifier 'async_kw' cannot be used inside nosuspend block", .{});
}
if (extra.flags.is_comptime) {
if (block.is_comptime) {
return sema.fail(block, modifier_src, "modifier 'async_kw' cannot be used in combination with comptime function call", .{});
}
},
.never_inline => {
if (extra.flags.is_comptime) {
if (block.is_comptime) {
return sema.fail(block, modifier_src, "unable to perform 'never_inline' call at compile-time", .{});
}
},

View file

@ -258,6 +258,9 @@ pub const Inst = struct {
/// A labeled block of code, which can return a value.
/// Uses the `pl_node` union field. Payload is `Block`.
block,
/// Like `block`, but forces full evaluation of its contents at compile-time.
/// Uses the `pl_node` union field. Payload is `Block`.
block_comptime,
/// A list of instructions which are analyzed in the parent context, without
/// generating a runtime block. Must terminate with an "inline" variant of
/// a noreturn instruction.
@ -338,14 +341,8 @@ pub const Inst = struct {
/// payload value, as if `err_union_payload_unsafe` was executed on the operand.
/// Uses the `pl_node` union field. Payload is `Try`.
@"try",
///// Same as `try` except the operand is coerced to a comptime value, and
///// only the taken branch is analyzed. The block must terminate with an "inline"
///// variant of a noreturn instruction.
//try_inline,
/// Same as `try` except the operand is a pointer and the result is a pointer.
try_ptr,
///// Same as `try_inline` except the operand is a pointer and the result is a pointer.
//try_ptr_inline,
/// An error set type definition. Contains a list of field names.
/// Uses the `pl_node` union field. Payload is `ErrorSetDecl`.
error_set_decl,
@ -723,9 +720,6 @@ pub const Inst = struct {
/// because it must use one of them to find out the struct type.
/// Uses the `pl_node` field. Payload is `Block`.
validate_struct_init,
/// Same as `validate_struct_init` but additionally communicates that the
/// resulting struct initialization value is within a comptime scope.
validate_struct_init_comptime,
/// Given a set of `elem_ptr_imm` instructions, assumes they are all part of an
/// array initialization expression, and emits a compile error if the number of
/// elements does not match the array type.
@ -733,9 +727,6 @@ pub const Inst = struct {
/// because it must use one of them to find out the array type.
/// Uses the `pl_node` field. Payload is `Block`.
validate_array_init,
/// Same as `validate_array_init` but additionally communicates that the
/// resulting array initialization value is within a comptime scope.
validate_array_init_comptime,
/// Check that operand type supports the dereference operand (.*).
/// Uses the `un_node` field.
validate_deref,
@ -806,8 +797,6 @@ pub const Inst = struct {
error_name,
/// Implement builtin `@panic`. Uses `un_node`.
panic,
/// Same as `panic` but forces comptime.
panic_comptime,
/// Implements `@trap`.
/// Uses the `node` field.
trap,
@ -1050,6 +1039,7 @@ pub const Inst = struct {
.bitcast,
.bit_or,
.block,
.block_comptime,
.block_inline,
.suspend_block,
.loop,
@ -1162,9 +1152,7 @@ pub const Inst = struct {
.validate_array_init_ty,
.validate_struct_init_ty,
.validate_struct_init,
.validate_struct_init_comptime,
.validate_array_init,
.validate_array_init_comptime,
.validate_deref,
.struct_init_empty,
.struct_init,
@ -1254,8 +1242,6 @@ pub const Inst = struct {
.ret_type,
.@"try",
.try_ptr,
//.try_inline,
//.try_ptr_inline,
.@"defer",
.defer_err_code,
.save_err_ret_index,
@ -1276,7 +1262,6 @@ pub const Inst = struct {
.repeat,
.repeat_inline,
.panic,
.panic_comptime,
.trap,
.check_comptime_control_flow,
=> true,
@ -1318,9 +1303,7 @@ pub const Inst = struct {
.validate_array_init_ty,
.validate_struct_init_ty,
.validate_struct_init,
.validate_struct_init_comptime,
.validate_array_init,
.validate_array_init_comptime,
.validate_deref,
.@"export",
.export_value,
@ -1365,6 +1348,7 @@ pub const Inst = struct {
.bitcast,
.bit_or,
.block,
.block_comptime,
.block_inline,
.suspend_block,
.loop,
@ -1552,13 +1536,10 @@ pub const Inst = struct {
.repeat,
.repeat_inline,
.panic,
.panic_comptime,
.trap,
.for_len,
.@"try",
.try_ptr,
//.try_inline,
//.try_ptr_inline,
=> false,
.extended => switch (data.extended.opcode) {
@ -1603,6 +1584,7 @@ pub const Inst = struct {
.bit_not = .un_node,
.bit_or = .pl_node,
.block = .pl_node,
.block_comptime = .pl_node,
.block_inline = .pl_node,
.suspend_block = .pl_node,
.bool_not = .un_node,
@ -1624,8 +1606,6 @@ pub const Inst = struct {
.condbr_inline = .pl_node,
.@"try" = .pl_node,
.try_ptr = .pl_node,
//.try_inline = .pl_node,
//.try_ptr_inline = .pl_node,
.error_set_decl = .pl_node,
.error_set_decl_anon = .pl_node,
.error_set_decl_func = .pl_node,
@ -1721,9 +1701,7 @@ pub const Inst = struct {
.validate_array_init_ty = .pl_node,
.validate_struct_init_ty = .un_node,
.validate_struct_init = .pl_node,
.validate_struct_init_comptime = .pl_node,
.validate_array_init = .pl_node,
.validate_array_init_comptime = .pl_node,
.validate_deref = .un_node,
.struct_init_empty = .un_node,
.field_type = .pl_node,
@ -1750,7 +1728,6 @@ pub const Inst = struct {
.embed_file = .un_node,
.error_name = .un_node,
.panic = .un_node,
.panic_comptime = .un_node,
.trap = .node,
.set_runtime_safety = .un_node,
.sqrt = .un_node,
@ -2605,7 +2582,6 @@ pub const Inst = struct {
/// Offset from Decl AST node index.
/// `Tag` determines which kind of AST node this points to.
src_node: i32,
force_comptime: bool,
pub fn src(self: @This()) LazySrcLoc {
return LazySrcLoc.nodeOffset(self.src_node);
@ -2920,9 +2896,8 @@ pub const Inst = struct {
pub const Flags = packed struct {
is_nosuspend: bool,
is_comptime: bool,
ensure_result_used: bool,
_: u29 = undefined,
_: u30 = undefined,
comptime {
if (@sizeOf(Flags) != 4 or @bitSizeOf(Flags) != 32)
@ -3912,7 +3887,7 @@ fn findDeclsInner(
// Block instructions, recurse over the bodies.
.block, .block_inline => {
.block, .block_comptime, .block_inline => {
const inst_data = datas[inst].pl_node;
const extra = zir.extraData(Inst.Block, inst_data.payload_index);
const body = zir.extra[extra.end..][0..extra.data.body_len];
@ -4139,7 +4114,9 @@ pub fn getFnInfo(zir: Zir, fn_inst: Inst.Index) FnInfo {
},
else => unreachable,
};
assert(tags[info.param_block] == .block or tags[info.param_block] == .block_inline);
assert(tags[info.param_block] == .block or
tags[info.param_block] == .block_comptime or
tags[info.param_block] == .block_inline);
const param_block = zir.extraData(Inst.Block, datas[info.param_block].pl_node.payload_index);
const param_body = zir.extra[param_block.end..][0..param_block.data.body_len];
var total_params_len: u32 = 0;

View file

@ -195,7 +195,6 @@ const Writer = struct {
.embed_file,
.error_name,
.panic,
.panic_comptime,
.set_runtime_safety,
.sqrt,
.sin,
@ -365,13 +364,12 @@ const Writer = struct {
.call => try self.writeCall(stream, inst),
.block,
.block_comptime,
.block_inline,
.suspend_block,
.loop,
.validate_struct_init,
.validate_struct_init_comptime,
.validate_array_init,
.validate_array_init_comptime,
.c_import,
.typeof_builtin,
=> try self.writeBlock(stream, inst),
@ -811,7 +809,6 @@ const Writer = struct {
try self.writeFlag(stream, "nodiscard ", extra.flags.ensure_result_used);
try self.writeFlag(stream, "nosuspend ", extra.flags.is_nosuspend);
try self.writeFlag(stream, "comptime ", extra.flags.is_comptime);
try self.writeInstRef(stream, extra.modifier);
try stream.writeAll(", ");

View file

@ -1455,7 +1455,7 @@ test "floatToInt to zero-bit int" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
var a: f32 = 0.0;
const a: f32 = 0.0;
comptime try std.testing.expect(@floatToInt(u0, a) == 0);
}
@ -1507,15 +1507,6 @@ test "optional pointer coerced to optional allowzero pointer" {
try expect(@ptrToInt(q.?) == 4);
}
test "ptrToInt on const inside comptime block" {
var a = comptime blk: {
const b: u8 = 1;
const c = @ptrToInt(&b);
break :blk c;
};
try expect(@intToPtr(*const u8, a).* == 1);
}
test "single item pointer to pointer to array to slice" {
var x: i32 = 1234;
try expect(@as([]const i32, @as(*[1]i32, &x))[0] == 1234);

View file

@ -181,9 +181,7 @@ fn testTryToTrickEvalWithRuntimeIf(b: bool) usize {
const result = if (b) false else true;
_ = result;
}
comptime {
return i;
}
return comptime i;
}
test "@setEvalBranchQuota" {

View file

@ -16,7 +16,6 @@ pub export fn entry2() void {
// backend=stage2
// target=native
//
// :4:15: error: unable to resolve comptime value
// :4:15: note: condition in comptime branch must be comptime-known
// :11:11: error: unable to resolve comptime value
// :11:11: note: condition in comptime branch must be comptime-known
// :4:15: error: unable to evaluate comptime expression
// :4:13: note: operation is runtime due to this operand
// :11:11: error: unable to evaluate comptime expression

View file

@ -5,13 +5,7 @@ export fn b() void {
comptime bar();
}
fn bar() u8 {
const u32_max = @import("std").math.maxInt(u32);
@setEvalBranchQuota(u32_max);
var x: u32 = 0;
while (x != u32_max) : (x +%= 1) {}
return 0;
return 2;
}
// error
@ -21,6 +15,6 @@ fn bar() u8 {
// :2:5: error: value of type 'comptime_int' ignored
// :2:5: note: all non-void values must be used
// :2:5: note: this error can be suppressed by assigning the value to '_'
// :5:17: error: value of type 'u8' ignored
// :5:17: note: all non-void values must be used
// :5:17: note: this error can be suppressed by assigning the value to '_'
// :5:5: error: value of type 'u8' ignored
// :5:5: note: all non-void values must be used
// :5:5: note: this error can be suppressed by assigning the value to '_'

View file

@ -718,7 +718,7 @@ const TestManifestConfigDefaults = struct {
if (@"type" == .@"error") {
return "native";
}
comptime {
return comptime blk: {
var defaults: []const u8 = "";
// TODO should we only return "mainstream" targets by default here?
// TODO we should also specify ABIs explicitly as the backends are
@ -735,8 +735,8 @@ const TestManifestConfigDefaults = struct {
defaults = defaults ++ "x86_64-windows" ++ ",";
// Wasm
defaults = defaults ++ "wasm32-wasi";
return defaults;
}
break :blk defaults;
};
} else if (std.mem.eql(u8, key, "output_mode")) {
return switch (@"type") {
.@"error" => "Obj",