Merge pull request #13497 from Vexu/stage2-fixes

Stage2 bug fixes
This commit is contained in:
Veikka Tuominen 2022-11-13 17:35:57 +02:00 committed by Andrew Kelley
parent 7748d2bb44
commit b494f6a9dc
17 changed files with 329 additions and 65 deletions

View file

@ -339,6 +339,8 @@ pub const ResultInfo = struct {
fn_arg, fn_arg,
/// The expression is the right-hand side of an initializer for a `const` variable /// The expression is the right-hand side of an initializer for a `const` variable
const_init, const_init,
/// The expression is the right-hand side of an assignment expression.
assignment,
/// No specific operator in particular. /// No specific operator in particular.
none, none,
}; };
@ -826,7 +828,13 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
.slice_open => { .slice_open => {
const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs); const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
maybeAdvanceSourceCursorToMainToken(gz, node);
const line = gz.astgen.source_line - gz.decl_line;
const column = gz.astgen.source_column;
const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[node].rhs); const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[node].rhs);
try emitDbgStmt(gz, line, column);
const result = try gz.addPlNode(.slice_start, node, Zir.Inst.SliceStart{ const result = try gz.addPlNode(.slice_start, node, Zir.Inst.SliceStart{
.lhs = lhs, .lhs = lhs,
.start = start, .start = start,
@ -835,9 +843,15 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
}, },
.slice => { .slice => {
const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs); const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
maybeAdvanceSourceCursorToMainToken(gz, node);
const line = gz.astgen.source_line - gz.decl_line;
const column = gz.astgen.source_column;
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice); const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice);
const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.start); const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.start);
const end = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end); const end = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end);
try emitDbgStmt(gz, line, column);
const result = try gz.addPlNode(.slice_end, node, Zir.Inst.SliceEnd{ const result = try gz.addPlNode(.slice_end, node, Zir.Inst.SliceEnd{
.lhs = lhs, .lhs = lhs,
.start = start, .start = start,
@ -847,10 +861,16 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
}, },
.slice_sentinel => { .slice_sentinel => {
const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs); const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
maybeAdvanceSourceCursorToMainToken(gz, node);
const line = gz.astgen.source_line - gz.decl_line;
const column = gz.astgen.source_column;
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel); const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel);
const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.start); const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.start);
const end = if (extra.end != 0) try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end) else .none; const end = if (extra.end != 0) try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, extra.end) else .none;
const sentinel = try expr(gz, scope, .{ .rl = .none }, extra.sentinel); const sentinel = try expr(gz, scope, .{ .rl = .none }, extra.sentinel);
try emitDbgStmt(gz, line, column);
const result = try gz.addPlNode(.slice_sentinel, node, Zir.Inst.SliceSentinel{ const result = try gz.addPlNode(.slice_sentinel, node, Zir.Inst.SliceSentinel{
.lhs = lhs, .lhs = lhs,
.start = start, .start = start,
@ -881,16 +901,26 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
return rvalue(gz, ri, result, node); return rvalue(gz, ri, result, node);
}, },
.unwrap_optional => switch (ri.rl) { .unwrap_optional => switch (ri.rl) {
.ref => return gz.addUnNode( .ref => {
.optional_payload_safe_ptr, const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs),
node, maybeAdvanceSourceCursorToMainToken(gz, node);
), const line = gz.astgen.source_line - gz.decl_line;
else => return rvalue(gz, ri, try gz.addUnNode( const column = gz.astgen.source_column;
.optional_payload_safe, try emitDbgStmt(gz, line, column);
try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs),
node, return gz.addUnNode(.optional_payload_safe_ptr, lhs, node);
), node), },
else => {
const lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs);
maybeAdvanceSourceCursorToMainToken(gz, node);
const line = gz.astgen.source_line - gz.decl_line;
const column = gz.astgen.source_column;
try emitDbgStmt(gz, line, column);
return rvalue(gz, ri, try gz.addUnNode(.optional_payload_safe, lhs, node), node);
},
}, },
.block_two, .block_two_semicolon => { .block_two, .block_two_semicolon => {
const statements = [2]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs }; const statements = [2]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
@ -3216,7 +3246,7 @@ fn assign(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerError!voi
// This intentionally does not support `@"_"` syntax. // This intentionally does not support `@"_"` syntax.
const ident_name = tree.tokenSlice(main_tokens[lhs]); const ident_name = tree.tokenSlice(main_tokens[lhs]);
if (mem.eql(u8, ident_name, "_")) { if (mem.eql(u8, ident_name, "_")) {
_ = try expr(gz, scope, .{ .rl = .discard }, rhs); _ = try expr(gz, scope, .{ .rl = .discard, .ctx = .assignment }, rhs);
return; return;
} }
} }
@ -3239,10 +3269,27 @@ fn assignOp(
const node_datas = tree.nodes.items(.data); const node_datas = tree.nodes.items(.data);
const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs); const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs);
var line: u32 = undefined;
var column: u32 = undefined;
switch (op_inst_tag) {
.add, .sub, .mul, .div, .mod_rem => {
maybeAdvanceSourceCursorToMainToken(gz, infix_node);
line = gz.astgen.source_line - gz.decl_line;
column = gz.astgen.source_column;
},
else => {},
}
const lhs = try gz.addUnNode(.load, lhs_ptr, infix_node); const lhs = try gz.addUnNode(.load, lhs_ptr, infix_node);
const lhs_type = try gz.addUnNode(.typeof, lhs, infix_node); const lhs_type = try gz.addUnNode(.typeof, lhs, infix_node);
const rhs = try expr(gz, scope, .{ .rl = .{ .coerced_ty = lhs_type } }, node_datas[infix_node].rhs); const rhs = try expr(gz, scope, .{ .rl = .{ .coerced_ty = lhs_type } }, node_datas[infix_node].rhs);
switch (op_inst_tag) {
.add, .sub, .mul, .div, .mod_rem => {
try emitDbgStmt(gz, line, column);
},
else => {},
}
const result = try gz.addPlNode(op_inst_tag, infix_node, Zir.Inst.Bin{ const result = try gz.addPlNode(op_inst_tag, infix_node, Zir.Inst.Bin{
.lhs = lhs, .lhs = lhs,
.rhs = rhs, .rhs = rhs,
@ -5294,9 +5341,11 @@ fn orelseCatchExpr(
// up for this fact by calling rvalue on the else branch. // up for this fact by calling rvalue on the else branch.
const operand = try reachableExpr(&block_scope, &block_scope.base, operand_ri, lhs, rhs); const operand = try reachableExpr(&block_scope, &block_scope.base, operand_ri, lhs, rhs);
const cond = try block_scope.addUnNode(cond_op, operand, node); const cond = try block_scope.addUnNode(cond_op, operand, node);
const condbr = try block_scope.addCondBr(.condbr, node); const condbr_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .condbr_inline else .condbr;
const condbr = try block_scope.addCondBr(condbr_tag, node);
const block = try parent_gz.makeBlockInst(.block, node); const block_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .block_inline else .block;
const block = try parent_gz.makeBlockInst(block_tag, node);
try block_scope.setBlockBody(block); try block_scope.setBlockBody(block);
// block_scope unstacked now, can add new instructions to parent_gz // block_scope unstacked now, can add new instructions to parent_gz
try parent_gz.instructions.append(astgen.gpa, block); try parent_gz.instructions.append(astgen.gpa, block);
@ -5471,9 +5520,15 @@ fn addFieldAccess(
const dot_token = main_tokens[node]; const dot_token = main_tokens[node];
const field_ident = dot_token + 1; const field_ident = dot_token + 1;
const str_index = try astgen.identAsString(field_ident); const str_index = try astgen.identAsString(field_ident);
const lhs = try expr(gz, scope, lhs_ri, object_node);
maybeAdvanceSourceCursorToMainToken(gz, node);
const line = gz.astgen.source_line - gz.decl_line;
const column = gz.astgen.source_column;
try emitDbgStmt(gz, line, column);
return gz.addPlNode(tag, node, Zir.Inst.Field{ return gz.addPlNode(tag, node, Zir.Inst.Field{
.lhs = try expr(gz, scope, lhs_ri, object_node), .lhs = lhs,
.field_name_start = str_index, .field_name_start = str_index,
}); });
} }
@ -5484,18 +5539,33 @@ fn arrayAccess(
ri: ResultInfo, ri: ResultInfo,
node: Ast.Node.Index, node: Ast.Node.Index,
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen; const tree = gz.astgen.tree;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data); const node_datas = tree.nodes.items(.data);
switch (ri.rl) { switch (ri.rl) {
.ref => return gz.addPlNode(.elem_ptr_node, node, Zir.Inst.Bin{ .ref => {
.lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs), const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
.rhs = try expr(gz, scope, .{ .rl = .{ .ty = .usize_type } }, node_datas[node].rhs),
}), maybeAdvanceSourceCursorToMainToken(gz, node);
else => return rvalue(gz, ri, try gz.addPlNode(.elem_val_node, node, Zir.Inst.Bin{ const line = gz.astgen.source_line - gz.decl_line;
.lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs), const column = gz.astgen.source_column;
.rhs = try expr(gz, scope, .{ .rl = .{ .ty = .usize_type } }, node_datas[node].rhs),
}), node), const rhs = try expr(gz, scope, .{ .rl = .{ .ty = .usize_type } }, node_datas[node].rhs);
try emitDbgStmt(gz, line, column);
return gz.addPlNode(.elem_ptr_node, node, Zir.Inst.Bin{ .lhs = lhs, .rhs = rhs });
},
else => {
const lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs);
maybeAdvanceSourceCursorToMainToken(gz, node);
const line = gz.astgen.source_line - gz.decl_line;
const column = gz.astgen.source_column;
const rhs = try expr(gz, scope, .{ .rl = .{ .ty = .usize_type } }, node_datas[node].rhs);
try emitDbgStmt(gz, line, column);
return rvalue(gz, ri, try gz.addPlNode(.elem_val_node, node, Zir.Inst.Bin{ .lhs = lhs, .rhs = rhs }), node);
},
} }
} }
@ -5510,10 +5580,26 @@ fn simpleBinOp(
const tree = astgen.tree; const tree = astgen.tree;
const node_datas = tree.nodes.items(.data); const node_datas = tree.nodes.items(.data);
const result = try gz.addPlNode(op_inst_tag, node, Zir.Inst.Bin{ const lhs = try reachableExpr(gz, scope, .{ .rl = .none }, node_datas[node].lhs, node);
.lhs = try reachableExpr(gz, scope, .{ .rl = .none }, node_datas[node].lhs, node), var line: u32 = undefined;
.rhs = try reachableExpr(gz, scope, .{ .rl = .none }, node_datas[node].rhs, node), var column: u32 = undefined;
}); switch (op_inst_tag) {
.add, .sub, .mul, .div, .mod_rem => {
maybeAdvanceSourceCursorToMainToken(gz, node);
line = gz.astgen.source_line - gz.decl_line;
column = gz.astgen.source_column;
},
else => {},
}
const rhs = try reachableExpr(gz, scope, .{ .rl = .none }, node_datas[node].rhs, node);
switch (op_inst_tag) {
.add, .sub, .mul, .div, .mod_rem => {
try emitDbgStmt(gz, line, column);
},
else => {},
}
const result = try gz.addPlNode(op_inst_tag, node, Zir.Inst.Bin{ .lhs = lhs, .rhs = rhs });
return rvalue(gz, ri, result, node); return rvalue(gz, ri, result, node);
} }
@ -5608,9 +5694,11 @@ fn ifExpr(
} }
}; };
const condbr = try block_scope.addCondBr(.condbr, node); const condbr_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .condbr_inline else .condbr;
const condbr = try block_scope.addCondBr(condbr_tag, node);
const block = try parent_gz.makeBlockInst(.block, node); const block_tag: Zir.Inst.Tag = if (parent_gz.force_comptime) .block_inline else .block;
const block = try parent_gz.makeBlockInst(block_tag, node);
try block_scope.setBlockBody(block); try block_scope.setBlockBody(block);
// block_scope unstacked now, can add new instructions to parent_gz // block_scope unstacked now, can add new instructions to parent_gz
try parent_gz.instructions.append(astgen.gpa, block); try parent_gz.instructions.append(astgen.gpa, block);
@ -7084,7 +7172,7 @@ fn localVarRef(
if (local_val.name == name_str_index) { if (local_val.name == name_str_index) {
// Locals cannot shadow anything, so we do not need to look for ambiguous // Locals cannot shadow anything, so we do not need to look for ambiguous
// references in this case. // references in this case.
if (ri.rl == .discard) { if (ri.rl == .discard and ri.ctx == .assignment) {
local_val.discarded = ident_token; local_val.discarded = ident_token;
} else { } else {
local_val.used = ident_token; local_val.used = ident_token;
@ -7107,7 +7195,7 @@ fn localVarRef(
.local_ptr => { .local_ptr => {
const local_ptr = s.cast(Scope.LocalPtr).?; const local_ptr = s.cast(Scope.LocalPtr).?;
if (local_ptr.name == name_str_index) { if (local_ptr.name == name_str_index) {
if (ri.rl == .discard) { if (ri.rl == .discard and ri.ctx == .assignment) {
local_ptr.discarded = ident_token; local_ptr.discarded = ident_token;
} else { } else {
local_ptr.used = ident_token; local_ptr.used = ident_token;
@ -7969,6 +8057,8 @@ fn builtinCall(
return rvalue(gz, ri, result, node); return rvalue(gz, ri, result, node);
}, },
.err_set_cast => { .err_set_cast => {
try emitDbgNode(gz, node);
const result = try gz.addExtendedPayload(.err_set_cast, Zir.Inst.BinNode{ const result = try gz.addExtendedPayload(.err_set_cast, Zir.Inst.BinNode{
.lhs = try typeExpr(gz, scope, params[0]), .lhs = try typeExpr(gz, scope, params[0]),
.rhs = try expr(gz, scope, .{ .rl = .none }, params[1]), .rhs = try expr(gz, scope, .{ .rl = .none }, params[1]),
@ -8274,6 +8364,8 @@ fn typeCast(
rhs_node: Ast.Node.Index, rhs_node: Ast.Node.Index,
tag: Zir.Inst.Tag, tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
try emitDbgNode(gz, node);
const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
.lhs = try typeExpr(gz, scope, lhs_node), .lhs = try typeExpr(gz, scope, lhs_node),
.rhs = try expr(gz, scope, .{ .rl = .none }, rhs_node), .rhs = try expr(gz, scope, .{ .rl = .none }, rhs_node),
@ -8303,6 +8395,10 @@ fn simpleUnOp(
operand_node: Ast.Node.Index, operand_node: Ast.Node.Index,
tag: Zir.Inst.Tag, tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
switch (tag) {
.tag_name, .error_name, .ptr_to_int => try emitDbgNode(gz, node),
else => {},
}
const operand = try expr(gz, scope, operand_ri, operand_node); const operand = try expr(gz, scope, operand_ri, operand_node);
const result = try gz.addUnNode(tag, operand, node); const result = try gz.addUnNode(tag, operand, node);
return rvalue(gz, ri, result, node); return rvalue(gz, ri, result, node);
@ -8375,6 +8471,8 @@ fn divBuiltin(
rhs_node: Ast.Node.Index, rhs_node: Ast.Node.Index,
tag: Zir.Inst.Tag, tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
try emitDbgNode(gz, node);
const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
.lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node), .lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node),
.rhs = try expr(gz, scope, .{ .rl = .none }, rhs_node), .rhs = try expr(gz, scope, .{ .rl = .none }, rhs_node),
@ -8428,8 +8526,15 @@ fn shiftOp(
tag: Zir.Inst.Tag, tag: Zir.Inst.Tag,
) InnerError!Zir.Inst.Ref { ) InnerError!Zir.Inst.Ref {
const lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node); const lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node);
maybeAdvanceSourceCursorToMainToken(gz, node);
const line = gz.astgen.source_line - gz.decl_line;
const column = gz.astgen.source_column;
const log2_int_type = try gz.addUnNode(.typeof_log2_int_type, lhs, lhs_node); const log2_int_type = try gz.addUnNode(.typeof_log2_int_type, lhs, lhs_node);
const rhs = try expr(gz, scope, .{ .rl = .{ .ty = log2_int_type }, .ctx = .shift_op }, rhs_node); const rhs = try expr(gz, scope, .{ .rl = .{ .ty = log2_int_type }, .ctx = .shift_op }, rhs_node);
try emitDbgStmt(gz, line, column);
const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{ const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
.lhs = lhs, .lhs = lhs,
.rhs = rhs, .rhs = rhs,
@ -10668,14 +10773,19 @@ const GenZir = struct {
gz.break_result_info = parent_ri; gz.break_result_info = parent_ri;
}, },
.discard, .none, .ref => { .none, .ref => {
gz.rl_ty_inst = .none; gz.rl_ty_inst = .none;
gz.break_result_info = parent_ri; gz.break_result_info = parent_ri;
}, },
.discard => {
gz.rl_ty_inst = .none;
gz.break_result_info = .{ .rl = .discard };
},
.ptr => |ptr_res| { .ptr => |ptr_res| {
gz.rl_ty_inst = .none; gz.rl_ty_inst = .none;
gz.break_result_info = .{ .rl = .{ .ptr = .{ .inst = ptr_res.inst } } }; gz.break_result_info = .{ .rl = .{ .ptr = .{ .inst = ptr_res.inst } }, .ctx = parent_ri.ctx };
}, },
.inferred_ptr => |ptr| { .inferred_ptr => |ptr| {
@ -12054,6 +12164,18 @@ fn detectLocalShadowing(
}; };
} }
/// Advances the source cursor to the main token of `node` if not in comptime scope.
/// Usually paired with `emitDbgStmt`.
fn maybeAdvanceSourceCursorToMainToken(gz: *GenZir, node: Ast.Node.Index) void {
if (gz.force_comptime) return;
const tree = gz.astgen.tree;
const token_starts = tree.tokens.items(.start);
const main_tokens = tree.nodes.items(.main_token);
const node_start = token_starts[main_tokens[node]];
gz.astgen.advanceSourceCursor(node_start);
}
/// Advances the source cursor to the beginning of `node`. /// Advances the source cursor to the beginning of `node`.
fn advanceSourceCursorToNode(astgen: *AstGen, node: Ast.Node.Index) void { fn advanceSourceCursorToNode(astgen: *AstGen, node: Ast.Node.Index) void {
const tree = astgen.tree; const tree = astgen.tree;

View file

@ -2129,7 +2129,15 @@ fn walkInstruction(
file, file,
parent_scope, parent_scope,
parent_src, parent_src,
getBlockInlineBreak(file.zir, inst_index), getBlockInlineBreak(file.zir, inst_index) orelse {
const res = DocData.WalkResult{ .expr = .{
.comptimeExpr = self.comptime_exprs.items.len,
} };
try self.comptime_exprs.append(self.arena, .{
.code = "if (...) { ... }",
});
return res;
},
need_type, need_type,
); );
}, },
@ -3155,7 +3163,7 @@ fn walkDecls(
2 => { 2 => {
// decl test // decl test
const decl_being_tested = scope.resolveDeclName(doc_comment_index); const decl_being_tested = scope.resolveDeclName(doc_comment_index);
const func_index = getBlockInlineBreak(file.zir, value_index); const func_index = getBlockInlineBreak(file.zir, value_index).?;
const pl_node = data[Zir.refToIndex(func_index).?].pl_node; const pl_node = data[Zir.refToIndex(func_index).?].pl_node;
const fn_src = try self.srcLocInfo(file, pl_node.src_node, decl_src); const fn_src = try self.srcLocInfo(file, pl_node.src_node, decl_src);
@ -4301,12 +4309,13 @@ fn walkRef(
} }
} }
fn getBlockInlineBreak(zir: Zir, inst_index: usize) Zir.Inst.Ref { fn getBlockInlineBreak(zir: Zir, inst_index: usize) ?Zir.Inst.Ref {
const tags = zir.instructions.items(.tag); const tags = zir.instructions.items(.tag);
const data = zir.instructions.items(.data); const data = zir.instructions.items(.data);
const pl_node = data[inst_index].pl_node; const pl_node = data[inst_index].pl_node;
const extra = zir.extraData(Zir.Inst.Block, pl_node.payload_index); const extra = zir.extraData(Zir.Inst.Block, pl_node.payload_index);
const break_index = zir.extra[extra.end..][extra.data.body_len - 1]; const break_index = zir.extra[extra.end..][extra.data.body_len - 1];
if (tags[break_index] == .condbr_inline) return null;
std.debug.assert(tags[break_index] == .break_inline); std.debug.assert(tags[break_index] == .break_inline);
return data[break_index].@"break".operand; return data[break_index].@"break".operand;
} }

View file

@ -1491,6 +1491,8 @@ fn analyzeBodyInner(
return err; return err;
}; };
const inline_body = if (cond.val.toBool()) then_body else else_body; const inline_body = if (cond.val.toBool()) then_body else else_body;
try sema.maybeErrorUnwrapCondbr(block, inline_body, extra.data.condition, cond_src);
const old_runtime_index = block.runtime_index; const old_runtime_index = block.runtime_index;
defer block.runtime_index = old_runtime_index; defer block.runtime_index = old_runtime_index;
const break_data = (try sema.analyzeBodyBreak(block, inline_body)) orelse const break_data = (try sema.analyzeBodyBreak(block, inline_body)) orelse
@ -5658,14 +5660,14 @@ fn lookupInNamespace(
const src_file = block.namespace.file_scope; const src_file = block.namespace.file_scope;
const gpa = sema.gpa; const gpa = sema.gpa;
var checked_namespaces: std.AutoArrayHashMapUnmanaged(*Namespace, void) = .{}; var checked_namespaces: std.AutoArrayHashMapUnmanaged(*Namespace, bool) = .{};
defer checked_namespaces.deinit(gpa); defer checked_namespaces.deinit(gpa);
// Keep track of name conflicts for error notes. // Keep track of name conflicts for error notes.
var candidates: std.ArrayListUnmanaged(Decl.Index) = .{}; var candidates: std.ArrayListUnmanaged(Decl.Index) = .{};
defer candidates.deinit(gpa); defer candidates.deinit(gpa);
try checked_namespaces.put(gpa, namespace, {}); try checked_namespaces.put(gpa, namespace, namespace.file_scope == src_file);
var check_i: usize = 0; var check_i: usize = 0;
while (check_i < checked_namespaces.count()) : (check_i += 1) { while (check_i < checked_namespaces.count()) : (check_i += 1) {
@ -5674,7 +5676,7 @@ fn lookupInNamespace(
// Skip decls which are not marked pub, which are in a different // Skip decls which are not marked pub, which are in a different
// file than the `a.b`/`@hasDecl` syntax. // file than the `a.b`/`@hasDecl` syntax.
const decl = mod.declPtr(decl_index); const decl = mod.declPtr(decl_index);
if (decl.is_pub or src_file == decl.getFileScope()) { if (decl.is_pub or (src_file == decl.getFileScope() and checked_namespaces.values()[check_i])) {
try candidates.append(gpa, decl_index); try candidates.append(gpa, decl_index);
} }
} }
@ -5693,7 +5695,7 @@ fn lookupInNamespace(
try sema.ensureDeclAnalyzed(sub_usingnamespace_decl_index); try sema.ensureDeclAnalyzed(sub_usingnamespace_decl_index);
const ns_ty = sub_usingnamespace_decl.val.castTag(.ty).?.data; const ns_ty = sub_usingnamespace_decl.val.castTag(.ty).?.data;
const sub_ns = ns_ty.getNamespace().?; const sub_ns = ns_ty.getNamespace().?;
try checked_namespaces.put(gpa, sub_ns, {}); try checked_namespaces.put(gpa, sub_ns, src_file == sub_usingnamespace_decl.getFileScope());
} }
} }
@ -6331,6 +6333,7 @@ fn analyzeCall(
.instructions = .{}, .instructions = .{},
.label = null, .label = null,
.inlining = &inlining, .inlining = &inlining,
.is_typeof = block.is_typeof,
.is_comptime = is_comptime_call, .is_comptime = is_comptime_call,
.comptime_reason = comptime_reason, .comptime_reason = comptime_reason,
.error_return_trace_index = block.error_return_trace_index, .error_return_trace_index = block.error_return_trace_index,
@ -16530,9 +16533,6 @@ fn zirSaveErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE
// This is only relevant at runtime. // This is only relevant at runtime.
if (block.is_comptime or block.is_typeof) return; if (block.is_comptime or block.is_typeof) return;
// This is only relevant within functions.
if (sema.func == null) return;
const save_index = inst_data.operand == .none or b: { const save_index = inst_data.operand == .none or b: {
const operand = try sema.resolveInst(inst_data.operand); const operand = try sema.resolveInst(inst_data.operand);
const operand_ty = sema.typeOf(operand); const operand_ty = sema.typeOf(operand);
@ -20179,8 +20179,8 @@ fn analyzeShuffle(
.elem_type = elem_ty, .elem_type = elem_ty,
}); });
if (maybe_a_len == null) a = try sema.addConstUndef(a_ty); if (maybe_a_len == null) a = try sema.addConstUndef(a_ty) else a = try sema.coerce(block, a_ty, a, a_src);
if (maybe_b_len == null) b = try sema.addConstUndef(b_ty); if (maybe_b_len == null) b = try sema.addConstUndef(b_ty) else b = try sema.coerce(block, b_ty, b, b_src);
const operand_info = [2]std.meta.Tuple(&.{ u64, LazySrcLoc, Type }){ const operand_info = [2]std.meta.Tuple(&.{ u64, LazySrcLoc, Type }){
.{ a_len, a_src, a_ty }, .{ a_len, a_src, a_ty },
@ -27503,9 +27503,6 @@ fn analyzeLoad(
if (try sema.pointerDeref(block, src, ptr_val, ptr_ty)) |elem_val| { if (try sema.pointerDeref(block, src, ptr_val, ptr_ty)) |elem_val| {
return sema.addConstant(elem_ty, elem_val); return sema.addConstant(elem_ty, elem_val);
} }
if (block.is_typeof) {
return sema.addConstUndef(elem_ty);
}
} }
return block.addTyOp(.load, elem_ty, ptr); return block.addTyOp(.load, elem_ty, ptr);

View file

@ -1966,7 +1966,7 @@ pub const Object = struct {
for (tuple.types) |field_ty, i| { for (tuple.types) |field_ty, i| {
const field_val = tuple.values[i]; const field_val = tuple.values[i];
if (field_val.tag() != .unreachable_value) continue; if (field_val.tag() != .unreachable_value or !field_ty.hasRuntimeBits()) continue;
const field_size = field_ty.abiSize(target); const field_size = field_ty.abiSize(target);
const field_align = field_ty.abiAlignment(target); const field_align = field_ty.abiAlignment(target);
@ -2901,7 +2901,7 @@ pub const DeclGen = struct {
for (tuple.types) |field_ty, i| { for (tuple.types) |field_ty, i| {
const field_val = tuple.values[i]; const field_val = tuple.values[i];
if (field_val.tag() != .unreachable_value) continue; if (field_val.tag() != .unreachable_value or !field_ty.hasRuntimeBits()) continue;
const field_align = field_ty.abiAlignment(target); const field_align = field_ty.abiAlignment(target);
big_align = @max(big_align, field_align); big_align = @max(big_align, field_align);
@ -3198,7 +3198,8 @@ pub const DeclGen = struct {
/// There are other similar cases handled here as well. /// There are other similar cases handled here as well.
fn lowerPtrElemTy(dg: *DeclGen, elem_ty: Type) Allocator.Error!*llvm.Type { fn lowerPtrElemTy(dg: *DeclGen, elem_ty: Type) Allocator.Error!*llvm.Type {
const lower_elem_ty = switch (elem_ty.zigTypeTag()) { const lower_elem_ty = switch (elem_ty.zigTypeTag()) {
.Opaque, .Fn => true, .Opaque => true,
.Fn => !elem_ty.fnInfo().is_generic,
.Array => elem_ty.childType().hasRuntimeBitsIgnoreComptime(), .Array => elem_ty.childType().hasRuntimeBitsIgnoreComptime(),
else => elem_ty.hasRuntimeBitsIgnoreComptime(), else => elem_ty.hasRuntimeBitsIgnoreComptime(),
}; };
@ -4145,7 +4146,9 @@ pub const DeclGen = struct {
} }
const is_fn_body = decl.ty.zigTypeTag() == .Fn; const is_fn_body = decl.ty.zigTypeTag() == .Fn;
if (!is_fn_body and !decl.ty.hasRuntimeBits()) { if ((!is_fn_body and !decl.ty.hasRuntimeBits()) or
(is_fn_body and decl.ty.fnInfo().is_generic))
{
return self.lowerPtrToVoid(tv.ty); return self.lowerPtrToVoid(tv.ty);
} }
@ -8671,9 +8674,9 @@ pub const FuncGen = struct {
const arena = arena_allocator.allocator(); const arena = arena_allocator.allocator();
const mod = self.dg.module; const mod = self.dg.module;
const llvm_fn_name = try std.fmt.allocPrintZ(arena, "__zig_is_named_enum_value_{s}", .{ const fqn = try mod.declPtr(enum_decl).getFullyQualifiedName(mod);
try mod.declPtr(enum_decl).getFullyQualifiedName(mod), defer self.gpa.free(fqn);
}); const llvm_fn_name = try std.fmt.allocPrintZ(arena, "__zig_is_named_enum_value_{s}", .{fqn});
var int_tag_type_buffer: Type.Payload.Bits = undefined; var int_tag_type_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = enum_ty.intTagType(&int_tag_type_buffer); const int_tag_ty = enum_ty.intTagType(&int_tag_type_buffer);
@ -8752,9 +8755,9 @@ pub const FuncGen = struct {
const arena = arena_allocator.allocator(); const arena = arena_allocator.allocator();
const mod = self.dg.module; const mod = self.dg.module;
const llvm_fn_name = try std.fmt.allocPrintZ(arena, "__zig_tag_name_{s}", .{ const fqn = try mod.declPtr(enum_decl).getFullyQualifiedName(mod);
try mod.declPtr(enum_decl).getFullyQualifiedName(mod), defer self.gpa.free(fqn);
}); const llvm_fn_name = try std.fmt.allocPrintZ(arena, "__zig_tag_name_{s}", .{fqn});
const slice_ty = Type.initTag(.const_slice_u8_sentinel_0); const slice_ty = Type.initTag(.const_slice_u8_sentinel_0);
const llvm_ret_ty = try self.dg.lowerType(slice_ty); const llvm_ret_ty = try self.dg.lowerType(slice_ty);
@ -10204,7 +10207,7 @@ fn llvmFieldIndex(
const tuple = ty.tupleFields(); const tuple = ty.tupleFields();
var llvm_field_index: c_uint = 0; var llvm_field_index: c_uint = 0;
for (tuple.types) |field_ty, i| { for (tuple.types) |field_ty, i| {
if (tuple.values[i].tag() != .unreachable_value) continue; if (tuple.values[i].tag() != .unreachable_value or !field_ty.hasRuntimeBits()) continue;
const field_align = field_ty.abiAlignment(target); const field_align = field_ty.abiAlignment(target);
big_align = @max(big_align, field_align); big_align = @max(big_align, field_align);
@ -10216,7 +10219,7 @@ fn llvmFieldIndex(
llvm_field_index += 1; llvm_field_index += 1;
} }
if (field_index == i) { if (field_index <= i) {
ptr_pl_buf.* = .{ ptr_pl_buf.* = .{
.data = .{ .data = .{
.pointee_type = field_ty, .pointee_type = field_ty,
@ -10249,7 +10252,7 @@ fn llvmFieldIndex(
llvm_field_index += 1; llvm_field_index += 1;
} }
if (field_index == i) { if (field_index <= i) {
ptr_pl_buf.* = .{ ptr_pl_buf.* = .{
.data = .{ .data = .{
.pointee_type = field.ty, .pointee_type = field.ty,
@ -10768,7 +10771,7 @@ fn isByRef(ty: Type) bool {
const tuple = ty.tupleFields(); const tuple = ty.tupleFields();
var count: usize = 0; var count: usize = 0;
for (tuple.values) |field_val, i| { for (tuple.values) |field_val, i| {
if (field_val.tag() != .unreachable_value) continue; if (field_val.tag() != .unreachable_value or !tuple.types[i].hasRuntimeBits()) continue;
count += 1; count += 1;
if (count > max_fields_byval) return true; if (count > max_fields_byval) return true;

View file

@ -640,7 +640,9 @@ pub const Type = extern union {
const a_info = a.fnInfo(); const a_info = a.fnInfo();
const b_info = b.fnInfo(); const b_info = b.fnInfo();
if (!eql(a_info.return_type, b_info.return_type, mod)) if (a_info.return_type.tag() != .generic_poison and
b_info.return_type.tag() != .generic_poison and
!eql(a_info.return_type, b_info.return_type, mod))
return false; return false;
if (a_info.is_var_args != b_info.is_var_args) if (a_info.is_var_args != b_info.is_var_args)
@ -5757,7 +5759,7 @@ pub const Type = extern union {
for (tuple.types) |field_ty, i| { for (tuple.types) |field_ty, i| {
const field_val = tuple.values[i]; const field_val = tuple.values[i];
if (field_val.tag() != .unreachable_value) { if (field_val.tag() != .unreachable_value or !field_ty.hasRuntimeBits()) {
// comptime field // comptime field
if (i == index) return offset; if (i == index) return offset;
continue; continue;

View file

@ -1499,3 +1499,11 @@ test "non-optional and optional array elements concatenated" {
var index: usize = 0; var index: usize = 0;
try expect(array[index].? == 'A'); try expect(array[index].? == 'A');
} }
test "inline call in @TypeOf inherits is_inline property" {
const S = struct {
inline fn doNothing() void {}
const T = @TypeOf(doNothing());
};
try expect(S.T == void);
}

View file

@ -405,3 +405,15 @@ test "null sentinel pointer passed as generic argument" {
}; };
try S.doTheTest((@intToPtr([*:null]const [*c]const u8, 8))); try S.doTheTest((@intToPtr([*:null]const [*c]const u8, 8)));
} }
test "generic function passed as comptime argument" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
const S = struct {
fn doMath(comptime f: fn (type, i32, i32) error{Overflow}!i32, a: i32, b: i32) !void {
const result = try f(i32, a, b);
try expect(result == 11);
}
};
try S.doMath(std.math.add, 5, 6);
}

View file

@ -489,3 +489,20 @@ test "ptrCast comptime known slice to C pointer" {
var p = @ptrCast([*c]const u8, s); var p = @ptrCast([*c]const u8, s);
try std.testing.expectEqualStrings(s, std.mem.sliceTo(p, 0)); try std.testing.expectEqualStrings(s, std.mem.sliceTo(p, 0));
} }
test "ptrToInt on a generic function" {
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64 and builtin.os.tag != .linux) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag != .linux) return error.SkipZigTest; // TODO
const S = struct {
fn generic(i: anytype) @TypeOf(i) {
return i;
}
fn doTheTest(a: anytype) !void {
try expect(@ptrToInt(a) != 0);
}
};
try S.doTheTest(&S.generic);
}

View file

@ -1398,3 +1398,23 @@ test "under-aligned struct field" {
const result = std.mem.readIntNative(u64, array[4..12]); const result = std.mem.readIntNative(u64, array[4..12]);
try expect(result == 1234); try expect(result == 1234);
} }
test "address of zero-bit field is equal to address of only field" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
{
const A = struct { b: void = {}, u: u8 };
var a = A{ .u = 0 };
const a_ptr = @fieldParentPtr(A, "b", &a.b);
try std.testing.expectEqual(&a, a_ptr);
}
{
const A = struct { u: u8, b: void = {} };
var a = A{ .u = 0 };
const a_ptr = @fieldParentPtr(A, "b", &a.b);
try std.testing.expectEqual(&a, a_ptr);
}
}

View file

@ -323,3 +323,13 @@ test "zero sized struct in tuple handled correctly" {
var s: State = undefined; var s: State = undefined;
try expect(s.do() == 0); try expect(s.do() == 0);
} }
test "tuple type with void field and a runtime field" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
const T = std.meta.Tuple(&[_]type{ usize, void });
var t: T = .{ 5, {} };
try expect(t[0] == 5);
}

View file

@ -75,3 +75,7 @@ test {
const a = AA.b(42); const a = AA.b(42);
try expect(a.x == AA.c().expected); try expect(a.x == AA.c().expected);
} }
comptime {
_ = @import("usingnamespace/file_1.zig");
}

View file

@ -0,0 +1 @@
pub const A = 123;

View file

@ -0,0 +1,9 @@
const std = @import("std");
const expect = std.testing.expect;
const imports = @import("imports.zig");
const A = 456;
test {
try expect(imports.A == 123);
}

View file

@ -0,0 +1,5 @@
const file_0 = @import("file_0.zig");
const file_1 = @import("file_1.zig");
pub usingnamespace file_0;
pub usingnamespace file_1;

View file

@ -3,6 +3,7 @@ const builtin = @import("builtin");
const mem = std.mem; const mem = std.mem;
const math = std.math; const math = std.math;
const expect = std.testing.expect; const expect = std.testing.expect;
const expectEqual = std.testing.expectEqual;
test "implicit cast vector to array - bool" { test "implicit cast vector to array - bool" {
if (builtin.zig_backend == .stage1) { if (builtin.zig_backend == .stage1) {
@ -1231,3 +1232,17 @@ test "modRem with zero divisor" {
_ = zeros[0]; _ = zeros[0];
} }
} }
test "array operands to shuffle are coerced to vectors" {
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
const mask = [5]i32{ -1, 0, 1, 2, 3 };
var a = [5]u32{ 3, 5, 7, 9, 0 };
var b = @shuffle(u32, a, @splat(5, @as(u24, 0)), mask);
try expectEqual([_]u32{ 0, 3, 5, 7, 9 }, b);
}

View file

@ -0,0 +1,22 @@
pub export fn entry1() void {
var x: u32 = 3;
_ = @shuffle(u32, [_]u32{0}, @splat(1, @as(u32, 0)), [_]i8{
if (x > 1) 1 else -1,
});
}
pub export fn entry2() void {
var y: ?i8 = -1;
_ = @shuffle(u32, [_]u32{0}, @splat(1, @as(u32, 0)), [_]i8{
y orelse 1,
});
}
// error
// backend=stage2
// target=native
//
// :4:15: error: unable to resolve comptime value
// :4:15: note: condition in comptime branch must be comptime-known
// :11:11: error: unable to resolve comptime value
// :11:11: note: condition in comptime branch must be comptime-known

View file

@ -3,6 +3,14 @@ export fn foo() void {
x += 1; x += 1;
_ = x; _ = x;
} }
export fn bar() void {
var b: u32 = 1;
_ = blk: {
const a = 1;
b = a;
break :blk a;
};
}
// error // error
// backend=stage2 // backend=stage2