Merge pull request #22397 from Techatrix/type-safe-ast

improve type safety of std.zig.Ast
This commit is contained in:
Matthew Lugg 2025-03-12 02:22:41 +00:00 committed by GitHub
commit d0911786c9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 5407 additions and 5707 deletions

File diff suppressed because it is too large Load diff

View file

@ -220,7 +220,7 @@ pub fn main() !void {
mem.eql(u8, msg, "unused function parameter") or
mem.eql(u8, msg, "unused capture"))
{
const ident_token = item.data.token;
const ident_token = item.data.token.unwrap().?;
try more_fixups.unused_var_decls.put(gpa, ident_token, {});
} else {
std.debug.print("found other ZIR error: '{s}'\n", .{msg});

View file

@ -98,29 +98,26 @@ const ScanDeclsAction = enum { add, remove };
fn scanDecls(w: *Walk, members: []const Ast.Node.Index, action: ScanDeclsAction) Error!void {
const ast = w.ast;
const gpa = w.gpa;
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
const token_tags = ast.tokens.items(.tag);
for (members) |member_node| {
const name_token = switch (node_tags[member_node]) {
const name_token = switch (ast.nodeTag(member_node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> main_tokens[member_node] + 1,
=> ast.nodeMainToken(member_node) + 1,
.fn_proto_simple,
.fn_proto_multi,
.fn_proto_one,
.fn_proto,
.fn_decl,
=> main_tokens[member_node] + 1,
=> ast.nodeMainToken(member_node) + 1,
else => continue,
};
assert(token_tags[name_token] == .identifier);
assert(ast.tokenTag(name_token) == .identifier);
const name_bytes = ast.tokenSlice(name_token);
switch (action) {
@ -145,12 +142,10 @@ fn scanDecls(w: *Walk, members: []const Ast.Node.Index, action: ScanDeclsAction)
fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
const ast = w.ast;
const datas = ast.nodes.items(.data);
switch (ast.nodes.items(.tag)[decl]) {
switch (ast.nodeTag(decl)) {
.fn_decl => {
const fn_proto = datas[decl].lhs;
const fn_proto, const body_node = ast.nodeData(decl).node_and_node;
try walkExpression(w, fn_proto);
const body_node = datas[decl].rhs;
if (!isFnBodyGutted(ast, body_node)) {
w.replace_names.clearRetainingCapacity();
try w.transformations.append(.{ .gut_function = decl });
@ -167,7 +162,7 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
.@"usingnamespace" => {
try w.transformations.append(.{ .delete_node = decl });
const expr = datas[decl].lhs;
const expr = ast.nodeData(decl).node;
try walkExpression(w, expr);
},
@ -179,7 +174,7 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
.test_decl => {
try w.transformations.append(.{ .delete_node = decl });
try walkExpression(w, datas[decl].rhs);
try walkExpression(w, ast.nodeData(decl).opt_token_and_node[1]);
},
.container_field_init,
@ -202,14 +197,10 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
const ast = w.ast;
const token_tags = ast.tokens.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
const node_tags = ast.nodes.items(.tag);
const datas = ast.nodes.items(.data);
switch (node_tags[node]) {
switch (ast.nodeTag(node)) {
.identifier => {
const name_ident = main_tokens[node];
assert(token_tags[name_ident] == .identifier);
const name_ident = ast.nodeMainToken(node);
assert(ast.tokenTag(name_ident) == .identifier);
const name_bytes = ast.tokenSlice(name_ident);
_ = w.unreferenced_globals.swapRemove(name_bytes);
if (w.replace_names.get(name_bytes)) |index| {
@ -230,64 +221,36 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
.block_two,
.block_two_semicolon,
=> {
const statements = [2]Ast.Node.Index{ datas[node].lhs, datas[node].rhs };
if (datas[node].lhs == 0) {
return walkBlock(w, node, statements[0..0]);
} else if (datas[node].rhs == 0) {
return walkBlock(w, node, statements[0..1]);
} else {
return walkBlock(w, node, statements[0..2]);
}
},
.block,
.block_semicolon,
=> {
const statements = ast.extra_data[datas[node].lhs..datas[node].rhs];
var buf: [2]Ast.Node.Index = undefined;
const statements = ast.blockStatements(&buf, node).?;
return walkBlock(w, node, statements);
},
.@"errdefer" => {
const expr = datas[node].rhs;
const expr = ast.nodeData(node).opt_token_and_node[1];
return walkExpression(w, expr);
},
.@"defer" => {
const expr = datas[node].rhs;
return walkExpression(w, expr);
},
.@"comptime", .@"nosuspend" => {
const block = datas[node].lhs;
return walkExpression(w, block);
},
.@"suspend" => {
const body = datas[node].lhs;
return walkExpression(w, body);
},
.@"catch" => {
try walkExpression(w, datas[node].lhs); // target
try walkExpression(w, datas[node].rhs); // fallback
.@"defer",
.@"comptime",
.@"nosuspend",
.@"suspend",
=> {
return walkExpression(w, ast.nodeData(node).node);
},
.field_access => {
const field_access = datas[node];
try walkExpression(w, field_access.lhs);
try walkExpression(w, ast.nodeData(node).node_and_token[0]);
},
.error_union,
.switch_range,
=> {
const infix = datas[node];
try walkExpression(w, infix.lhs);
return walkExpression(w, infix.rhs);
},
.for_range => {
const infix = datas[node];
try walkExpression(w, infix.lhs);
if (infix.rhs != 0) {
return walkExpression(w, infix.rhs);
const start, const opt_end = ast.nodeData(node).node_and_opt_node;
try walkExpression(w, start);
if (opt_end.unwrap()) |end| {
return walkExpression(w, end);
}
},
@ -337,17 +300,21 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
.sub,
.sub_wrap,
.sub_sat,
.@"catch",
.error_union,
.switch_range,
.@"orelse",
.array_access,
=> {
const infix = datas[node];
try walkExpression(w, infix.lhs);
try walkExpression(w, infix.rhs);
const lhs, const rhs = ast.nodeData(node).node_and_node;
try walkExpression(w, lhs);
try walkExpression(w, rhs);
},
.assign_destructure => {
const full = ast.assignDestructure(node);
for (full.ast.variables) |variable_node| {
switch (node_tags[variable_node]) {
switch (ast.nodeTag(variable_node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
@ -366,15 +333,12 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
.negation_wrap,
.optional_type,
.address_of,
=> {
return walkExpression(w, datas[node].lhs);
},
.@"try",
.@"resume",
.@"await",
.deref,
=> {
return walkExpression(w, datas[node].lhs);
return walkExpression(w, ast.nodeData(node).node);
},
.array_type,
@ -426,51 +390,40 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
return walkCall(w, ast.fullCall(&buf, node).?);
},
.array_access => {
const suffix = datas[node];
try walkExpression(w, suffix.lhs);
try walkExpression(w, suffix.rhs);
},
.slice_open, .slice, .slice_sentinel => return walkSlice(w, node, ast.fullSlice(node).?),
.deref => {
try walkExpression(w, datas[node].lhs);
},
.unwrap_optional => {
try walkExpression(w, datas[node].lhs);
try walkExpression(w, ast.nodeData(node).node_and_token[0]);
},
.@"break" => {
const label_token = datas[node].lhs;
const target = datas[node].rhs;
if (label_token == 0 and target == 0) {
const label_token, const target = ast.nodeData(node).opt_token_and_opt_node;
if (label_token == .none and target == .none) {
// no expressions
} else if (label_token == 0 and target != 0) {
try walkExpression(w, target);
} else if (label_token != 0 and target == 0) {
try walkIdentifier(w, label_token);
} else if (label_token != 0 and target != 0) {
try walkExpression(w, target);
} else if (label_token == .none and target != .none) {
try walkExpression(w, target.unwrap().?);
} else if (label_token != .none and target == .none) {
try walkIdentifier(w, label_token.unwrap().?);
} else if (label_token != .none and target != .none) {
try walkExpression(w, target.unwrap().?);
}
},
.@"continue" => {
const label = datas[node].lhs;
if (label != 0) {
return walkIdentifier(w, label); // label
const opt_label = ast.nodeData(node).opt_token_and_opt_node[0];
if (opt_label.unwrap()) |label| {
return walkIdentifier(w, label);
}
},
.@"return" => {
if (datas[node].lhs != 0) {
try walkExpression(w, datas[node].lhs);
if (ast.nodeData(node).opt_node.unwrap()) |lhs| {
try walkExpression(w, lhs);
}
},
.grouped_expression => {
try walkExpression(w, datas[node].lhs);
try walkExpression(w, ast.nodeData(node).node_and_token[0]);
},
.container_decl,
@ -491,13 +444,11 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
},
.error_set_decl => {
const error_token = main_tokens[node];
const lbrace = error_token + 1;
const rbrace = datas[node].rhs;
const lbrace, const rbrace = ast.nodeData(node).token_and_token;
var i = lbrace + 1;
while (i < rbrace) : (i += 1) {
switch (token_tags[i]) {
switch (ast.tokenTag(i)) {
.doc_comment => unreachable, // TODO
.identifier => try walkIdentifier(w, i),
.comma => {},
@ -506,17 +457,13 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
}
},
.builtin_call_two, .builtin_call_two_comma => {
if (datas[node].lhs == 0) {
return walkBuiltinCall(w, node, &.{});
} else if (datas[node].rhs == 0) {
return walkBuiltinCall(w, node, &.{datas[node].lhs});
} else {
return walkBuiltinCall(w, node, &.{ datas[node].lhs, datas[node].rhs });
}
},
.builtin_call, .builtin_call_comma => {
const params = ast.extra_data[datas[node].lhs..datas[node].rhs];
.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
.builtin_call_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const params = ast.builtinCallParams(&buf, node).?;
return walkBuiltinCall(w, node, params);
},
@ -530,20 +477,16 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
},
.anyframe_type => {
if (datas[node].rhs != 0) {
return walkExpression(w, datas[node].rhs);
}
_, const child_type = ast.nodeData(node).token_and_node;
return walkExpression(w, child_type);
},
.@"switch",
.switch_comma,
=> {
const condition = datas[node].lhs;
const extra = ast.extraData(datas[node].rhs, Ast.Node.SubRange);
const cases = ast.extra_data[extra.start..extra.end];
try walkExpression(w, condition); // condition expression
try walkExpressions(w, cases);
const full = ast.fullSwitch(node).?;
try walkExpression(w, full.ast.condition); // condition expression
try walkExpressions(w, full.ast.cases);
},
.switch_case_one,
@ -570,7 +513,7 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
=> return walkAsm(w, ast.fullAsm(node).?),
.enum_literal => {
return walkIdentifier(w, main_tokens[node]); // name
return walkIdentifier(w, ast.nodeMainToken(node)); // name
},
.fn_decl => unreachable,
@ -592,66 +535,66 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
fn walkGlobalVarDecl(w: *Walk, decl_node: Ast.Node.Index, var_decl: Ast.full.VarDecl) Error!void {
_ = decl_node;
if (var_decl.ast.type_node != 0) {
try walkExpression(w, var_decl.ast.type_node);
if (var_decl.ast.type_node.unwrap()) |type_node| {
try walkExpression(w, type_node);
}
if (var_decl.ast.align_node != 0) {
try walkExpression(w, var_decl.ast.align_node);
if (var_decl.ast.align_node.unwrap()) |align_node| {
try walkExpression(w, align_node);
}
if (var_decl.ast.addrspace_node != 0) {
try walkExpression(w, var_decl.ast.addrspace_node);
if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
try walkExpression(w, addrspace_node);
}
if (var_decl.ast.section_node != 0) {
try walkExpression(w, var_decl.ast.section_node);
if (var_decl.ast.section_node.unwrap()) |section_node| {
try walkExpression(w, section_node);
}
if (var_decl.ast.init_node != 0) {
if (!isUndefinedIdent(w.ast, var_decl.ast.init_node)) {
try w.transformations.append(.{ .replace_with_undef = var_decl.ast.init_node });
if (var_decl.ast.init_node.unwrap()) |init_node| {
if (!isUndefinedIdent(w.ast, init_node)) {
try w.transformations.append(.{ .replace_with_undef = init_node });
}
try walkExpression(w, var_decl.ast.init_node);
try walkExpression(w, init_node);
}
}
fn walkLocalVarDecl(w: *Walk, var_decl: Ast.full.VarDecl) Error!void {
try walkIdentifierNew(w, var_decl.ast.mut_token + 1); // name
if (var_decl.ast.type_node != 0) {
try walkExpression(w, var_decl.ast.type_node);
if (var_decl.ast.type_node.unwrap()) |type_node| {
try walkExpression(w, type_node);
}
if (var_decl.ast.align_node != 0) {
try walkExpression(w, var_decl.ast.align_node);
if (var_decl.ast.align_node.unwrap()) |align_node| {
try walkExpression(w, align_node);
}
if (var_decl.ast.addrspace_node != 0) {
try walkExpression(w, var_decl.ast.addrspace_node);
if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
try walkExpression(w, addrspace_node);
}
if (var_decl.ast.section_node != 0) {
try walkExpression(w, var_decl.ast.section_node);
if (var_decl.ast.section_node.unwrap()) |section_node| {
try walkExpression(w, section_node);
}
if (var_decl.ast.init_node != 0) {
if (!isUndefinedIdent(w.ast, var_decl.ast.init_node)) {
try w.transformations.append(.{ .replace_with_undef = var_decl.ast.init_node });
if (var_decl.ast.init_node.unwrap()) |init_node| {
if (!isUndefinedIdent(w.ast, init_node)) {
try w.transformations.append(.{ .replace_with_undef = init_node });
}
try walkExpression(w, var_decl.ast.init_node);
try walkExpression(w, init_node);
}
}
fn walkContainerField(w: *Walk, field: Ast.full.ContainerField) Error!void {
if (field.ast.type_expr != 0) {
try walkExpression(w, field.ast.type_expr); // type
if (field.ast.type_expr.unwrap()) |type_expr| {
try walkExpression(w, type_expr); // type
}
if (field.ast.align_expr != 0) {
try walkExpression(w, field.ast.align_expr); // alignment
if (field.ast.align_expr.unwrap()) |align_expr| {
try walkExpression(w, align_expr); // alignment
}
if (field.ast.value_expr != 0) {
try walkExpression(w, field.ast.value_expr); // value
if (field.ast.value_expr.unwrap()) |value_expr| {
try walkExpression(w, value_expr); // value
}
}
@ -662,18 +605,17 @@ fn walkBlock(
) Error!void {
_ = block_node;
const ast = w.ast;
const node_tags = ast.nodes.items(.tag);
for (statements) |stmt| {
switch (node_tags[stmt]) {
switch (ast.nodeTag(stmt)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(stmt).?;
if (var_decl.ast.init_node != 0 and
isUndefinedIdent(w.ast, var_decl.ast.init_node))
if (var_decl.ast.init_node != .none and
isUndefinedIdent(w.ast, var_decl.ast.init_node.unwrap().?))
{
try w.transformations.append(.{ .delete_var_decl = .{
.var_decl_node = stmt,
@ -704,15 +646,15 @@ fn walkBlock(
fn walkArrayType(w: *Walk, array_type: Ast.full.ArrayType) Error!void {
try walkExpression(w, array_type.ast.elem_count);
if (array_type.ast.sentinel != 0) {
try walkExpression(w, array_type.ast.sentinel);
if (array_type.ast.sentinel.unwrap()) |sentinel| {
try walkExpression(w, sentinel);
}
return walkExpression(w, array_type.ast.elem_type);
}
fn walkArrayInit(w: *Walk, array_init: Ast.full.ArrayInit) Error!void {
if (array_init.ast.type_expr != 0) {
try walkExpression(w, array_init.ast.type_expr); // T
if (array_init.ast.type_expr.unwrap()) |type_expr| {
try walkExpression(w, type_expr); // T
}
for (array_init.ast.elements) |elem_init| {
try walkExpression(w, elem_init);
@ -725,8 +667,8 @@ fn walkStructInit(
struct_init: Ast.full.StructInit,
) Error!void {
_ = struct_node;
if (struct_init.ast.type_expr != 0) {
try walkExpression(w, struct_init.ast.type_expr); // T
if (struct_init.ast.type_expr.unwrap()) |type_expr| {
try walkExpression(w, type_expr); // T
}
for (struct_init.ast.fields) |field_init| {
try walkExpression(w, field_init);
@ -746,18 +688,17 @@ fn walkSlice(
_ = slice_node;
try walkExpression(w, slice.ast.sliced);
try walkExpression(w, slice.ast.start);
if (slice.ast.end != 0) {
try walkExpression(w, slice.ast.end);
if (slice.ast.end.unwrap()) |end| {
try walkExpression(w, end);
}
if (slice.ast.sentinel != 0) {
try walkExpression(w, slice.ast.sentinel);
if (slice.ast.sentinel.unwrap()) |sentinel| {
try walkExpression(w, sentinel);
}
}
fn walkIdentifier(w: *Walk, name_ident: Ast.TokenIndex) Error!void {
const ast = w.ast;
const token_tags = ast.tokens.items(.tag);
assert(token_tags[name_ident] == .identifier);
assert(ast.tokenTag(name_ident) == .identifier);
const name_bytes = ast.tokenSlice(name_ident);
_ = w.unreferenced_globals.swapRemove(name_bytes);
}
@ -773,8 +714,8 @@ fn walkContainerDecl(
container_decl: Ast.full.ContainerDecl,
) Error!void {
_ = container_decl_node;
if (container_decl.ast.arg != 0) {
try walkExpression(w, container_decl.ast.arg);
if (container_decl.ast.arg.unwrap()) |arg| {
try walkExpression(w, arg);
}
try walkMembers(w, container_decl.ast.members);
}
@ -785,14 +726,13 @@ fn walkBuiltinCall(
params: []const Ast.Node.Index,
) Error!void {
const ast = w.ast;
const main_tokens = ast.nodes.items(.main_token);
const builtin_token = main_tokens[call_node];
const builtin_token = ast.nodeMainToken(call_node);
const builtin_name = ast.tokenSlice(builtin_token);
const info = BuiltinFn.list.get(builtin_name).?;
switch (info.tag) {
.import => {
const operand_node = params[0];
const str_lit_token = main_tokens[operand_node];
const str_lit_token = ast.nodeMainToken(operand_node);
const token_bytes = ast.tokenSlice(str_lit_token);
if (std.mem.endsWith(u8, token_bytes, ".zig\"")) {
const imported_string = std.zig.string_literal.parseAlloc(w.arena, token_bytes) catch
@ -821,29 +761,30 @@ fn walkFnProto(w: *Walk, fn_proto: Ast.full.FnProto) Error!void {
{
var it = fn_proto.iterate(ast);
while (it.next()) |param| {
if (param.type_expr != 0) {
try walkExpression(w, param.type_expr);
if (param.type_expr) |type_expr| {
try walkExpression(w, type_expr);
}
}
}
if (fn_proto.ast.align_expr != 0) {
try walkExpression(w, fn_proto.ast.align_expr);
if (fn_proto.ast.align_expr.unwrap()) |align_expr| {
try walkExpression(w, align_expr);
}
if (fn_proto.ast.addrspace_expr != 0) {
try walkExpression(w, fn_proto.ast.addrspace_expr);
if (fn_proto.ast.addrspace_expr.unwrap()) |addrspace_expr| {
try walkExpression(w, addrspace_expr);
}
if (fn_proto.ast.section_expr != 0) {
try walkExpression(w, fn_proto.ast.section_expr);
if (fn_proto.ast.section_expr.unwrap()) |section_expr| {
try walkExpression(w, section_expr);
}
if (fn_proto.ast.callconv_expr != 0) {
try walkExpression(w, fn_proto.ast.callconv_expr);
if (fn_proto.ast.callconv_expr.unwrap()) |callconv_expr| {
try walkExpression(w, callconv_expr);
}
try walkExpression(w, fn_proto.ast.return_type);
const return_type = fn_proto.ast.return_type.unwrap().?;
try walkExpression(w, return_type);
}
fn walkExpressions(w: *Walk, expressions: []const Ast.Node.Index) Error!void {
@ -860,16 +801,13 @@ fn walkSwitchCase(w: *Walk, switch_case: Ast.full.SwitchCase) Error!void {
}
fn walkWhile(w: *Walk, node_index: Ast.Node.Index, while_node: Ast.full.While) Error!void {
assert(while_node.ast.cond_expr != 0);
assert(while_node.ast.then_expr != 0);
// Perform these transformations in this priority order:
// 1. If the `else` expression is missing or an empty block, replace the condition with `if (true)` if it is not already.
// 2. If the `then` block is empty, replace the condition with `if (false)` if it is not already.
// 3. If the condition is `if (true)`, replace the `if` expression with the contents of the `then` expression.
// 4. If the condition is `if (false)`, replace the `if` expression with the contents of the `else` expression.
if (!isTrueIdent(w.ast, while_node.ast.cond_expr) and
(while_node.ast.else_expr == 0 or isEmptyBlock(w.ast, while_node.ast.else_expr)))
(while_node.ast.else_expr == .none or isEmptyBlock(w.ast, while_node.ast.else_expr.unwrap().?)))
{
try w.transformations.ensureUnusedCapacity(1);
w.transformations.appendAssumeCapacity(.{ .replace_with_true = while_node.ast.cond_expr });
@ -886,45 +824,39 @@ fn walkWhile(w: *Walk, node_index: Ast.Node.Index, while_node: Ast.full.While) E
try w.transformations.ensureUnusedCapacity(1);
w.transformations.appendAssumeCapacity(.{ .replace_node = .{
.to_replace = node_index,
.replacement = while_node.ast.else_expr,
.replacement = while_node.ast.else_expr.unwrap().?,
} });
}
try walkExpression(w, while_node.ast.cond_expr); // condition
if (while_node.ast.cont_expr != 0) {
try walkExpression(w, while_node.ast.cont_expr);
if (while_node.ast.cont_expr.unwrap()) |cont_expr| {
try walkExpression(w, cont_expr);
}
if (while_node.ast.then_expr != 0) {
try walkExpression(w, while_node.ast.then_expr);
}
if (while_node.ast.else_expr != 0) {
try walkExpression(w, while_node.ast.else_expr);
if (while_node.ast.else_expr.unwrap()) |else_expr| {
try walkExpression(w, else_expr);
}
}
fn walkFor(w: *Walk, for_node: Ast.full.For) Error!void {
try walkParamList(w, for_node.ast.inputs);
if (for_node.ast.then_expr != 0) {
try walkExpression(w, for_node.ast.then_expr);
}
if (for_node.ast.else_expr != 0) {
try walkExpression(w, for_node.ast.else_expr);
if (for_node.ast.else_expr.unwrap()) |else_expr| {
try walkExpression(w, else_expr);
}
}
fn walkIf(w: *Walk, node_index: Ast.Node.Index, if_node: Ast.full.If) Error!void {
assert(if_node.ast.cond_expr != 0);
assert(if_node.ast.then_expr != 0);
// Perform these transformations in this priority order:
// 1. If the `else` expression is missing or an empty block, replace the condition with `if (true)` if it is not already.
// 2. If the `then` block is empty, replace the condition with `if (false)` if it is not already.
// 3. If the condition is `if (true)`, replace the `if` expression with the contents of the `then` expression.
// 4. If the condition is `if (false)`, replace the `if` expression with the contents of the `else` expression.
if (!isTrueIdent(w.ast, if_node.ast.cond_expr) and
(if_node.ast.else_expr == 0 or isEmptyBlock(w.ast, if_node.ast.else_expr)))
(if_node.ast.else_expr == .none or isEmptyBlock(w.ast, if_node.ast.else_expr.unwrap().?)))
{
try w.transformations.ensureUnusedCapacity(1);
w.transformations.appendAssumeCapacity(.{ .replace_with_true = if_node.ast.cond_expr });
@ -941,17 +873,14 @@ fn walkIf(w: *Walk, node_index: Ast.Node.Index, if_node: Ast.full.If) Error!void
try w.transformations.ensureUnusedCapacity(1);
w.transformations.appendAssumeCapacity(.{ .replace_node = .{
.to_replace = node_index,
.replacement = if_node.ast.else_expr,
.replacement = if_node.ast.else_expr.unwrap().?,
} });
}
try walkExpression(w, if_node.ast.cond_expr); // condition
if (if_node.ast.then_expr != 0) {
try walkExpression(w, if_node.ast.then_expr);
}
if (if_node.ast.else_expr != 0) {
try walkExpression(w, if_node.ast.else_expr);
if (if_node.ast.else_expr.unwrap()) |else_expr| {
try walkExpression(w, else_expr);
}
}
@ -971,25 +900,13 @@ fn walkParamList(w: *Walk, params: []const Ast.Node.Index) Error!void {
/// Check if it is already gutted (i.e. its body replaced with `@trap()`).
fn isFnBodyGutted(ast: *const Ast, body_node: Ast.Node.Index) bool {
// skip over discards
const node_tags = ast.nodes.items(.tag);
const datas = ast.nodes.items(.data);
var statements_buf: [2]Ast.Node.Index = undefined;
const statements = switch (node_tags[body_node]) {
const statements = switch (ast.nodeTag(body_node)) {
.block_two,
.block_two_semicolon,
=> blk: {
statements_buf[0..2].* = .{ datas[body_node].lhs, datas[body_node].rhs };
break :blk if (datas[body_node].lhs == 0)
statements_buf[0..0]
else if (datas[body_node].rhs == 0)
statements_buf[0..1]
else
statements_buf[0..2];
},
.block,
.block_semicolon,
=> ast.extra_data[datas[body_node].lhs..datas[body_node].rhs],
=> ast.blockStatements(&statements_buf, body_node).?,
else => return false,
};
@ -1012,27 +929,20 @@ const StmtCategory = enum {
};
fn categorizeStmt(ast: *const Ast, stmt: Ast.Node.Index) StmtCategory {
const node_tags = ast.nodes.items(.tag);
const datas = ast.nodes.items(.data);
const main_tokens = ast.nodes.items(.main_token);
switch (node_tags[stmt]) {
.builtin_call_two, .builtin_call_two_comma => {
if (datas[stmt].lhs == 0) {
return categorizeBuiltinCall(ast, main_tokens[stmt], &.{});
} else if (datas[stmt].rhs == 0) {
return categorizeBuiltinCall(ast, main_tokens[stmt], &.{datas[stmt].lhs});
} else {
return categorizeBuiltinCall(ast, main_tokens[stmt], &.{ datas[stmt].lhs, datas[stmt].rhs });
}
},
.builtin_call, .builtin_call_comma => {
const params = ast.extra_data[datas[stmt].lhs..datas[stmt].rhs];
return categorizeBuiltinCall(ast, main_tokens[stmt], params);
switch (ast.nodeTag(stmt)) {
.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
.builtin_call_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const params = ast.builtinCallParams(&buf, stmt).?;
return categorizeBuiltinCall(ast, ast.nodeMainToken(stmt), params);
},
.assign => {
const infix = datas[stmt];
if (isDiscardIdent(ast, infix.lhs) and node_tags[infix.rhs] == .identifier) {
const name_bytes = ast.tokenSlice(main_tokens[infix.rhs]);
const lhs, const rhs = ast.nodeData(stmt).node_and_node;
if (isDiscardIdent(ast, lhs) and ast.nodeTag(rhs) == .identifier) {
const name_bytes = ast.tokenSlice(ast.nodeMainToken(rhs));
if (std.mem.eql(u8, name_bytes, "undefined")) {
return .discard_undefined;
} else {
@ -1074,11 +984,9 @@ fn isFalseIdent(ast: *const Ast, node: Ast.Node.Index) bool {
}
fn isMatchingIdent(ast: *const Ast, node: Ast.Node.Index, string: []const u8) bool {
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
switch (node_tags[node]) {
switch (ast.nodeTag(node)) {
.identifier => {
const token_index = main_tokens[node];
const token_index = ast.nodeMainToken(node);
const name_bytes = ast.tokenSlice(token_index);
return std.mem.eql(u8, name_bytes, string);
},
@ -1087,11 +995,10 @@ fn isMatchingIdent(ast: *const Ast, node: Ast.Node.Index, string: []const u8) bo
}
fn isEmptyBlock(ast: *const Ast, node: Ast.Node.Index) bool {
const node_tags = ast.nodes.items(.tag);
const node_data = ast.nodes.items(.data);
switch (node_tags[node]) {
switch (ast.nodeTag(node)) {
.block_two => {
return node_data[node].lhs == 0 and node_data[node].rhs == 0;
const opt_lhs, const opt_rhs = ast.nodeData(node).opt_node_and_opt_node;
return opt_lhs == .none and opt_rhs == .none;
},
else => return false,
}

View file

@ -15,8 +15,7 @@ parent: Index,
pub const ExtraInfo = struct {
is_pub: bool,
name: []const u8,
/// This might not be a doc_comment token in which case there are no doc comments.
first_doc_comment: Ast.TokenIndex,
first_doc_comment: Ast.OptionalTokenIndex,
};
pub const Index = enum(u32) {
@ -34,16 +33,14 @@ pub fn is_pub(d: *const Decl) bool {
pub fn extra_info(d: *const Decl) ExtraInfo {
const ast = d.file.get_ast();
const token_tags = ast.tokens.items(.tag);
const node_tags = ast.nodes.items(.tag);
switch (node_tags[d.ast_node]) {
switch (ast.nodeTag(d.ast_node)) {
.root => return .{
.name = "",
.is_pub = true,
.first_doc_comment = if (token_tags[0] == .container_doc_comment)
0
.first_doc_comment = if (ast.tokenTag(0) == .container_doc_comment)
.fromToken(0)
else
token_tags.len - 1,
.none,
},
.global_var_decl,
@ -53,7 +50,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
=> {
const var_decl = ast.fullVarDecl(d.ast_node).?;
const name_token = var_decl.ast.mut_token + 1;
assert(token_tags[name_token] == .identifier);
assert(ast.tokenTag(name_token) == .identifier);
const ident_name = ast.tokenSlice(name_token);
return .{
.name = ident_name,
@ -71,7 +68,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
var buf: [1]Ast.Node.Index = undefined;
const fn_proto = ast.fullFnProto(&buf, d.ast_node).?;
const name_token = fn_proto.name_token.?;
assert(token_tags[name_token] == .identifier);
assert(ast.tokenTag(name_token) == .identifier);
const ident_name = ast.tokenSlice(name_token);
return .{
.name = ident_name,
@ -89,9 +86,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
pub fn value_node(d: *const Decl) ?Ast.Node.Index {
const ast = d.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const token_tags = ast.tokens.items(.tag);
return switch (node_tags[d.ast_node]) {
return switch (ast.nodeTag(d.ast_node)) {
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
@ -106,8 +101,8 @@ pub fn value_node(d: *const Decl) ?Ast.Node.Index {
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(d.ast_node).?;
if (token_tags[var_decl.ast.mut_token] == .keyword_const)
return var_decl.ast.init_node;
if (ast.tokenTag(var_decl.ast.mut_token) == .keyword_const)
return var_decl.ast.init_node.unwrap();
return null;
},
@ -148,20 +143,13 @@ pub fn get_child(decl: *const Decl, name: []const u8) ?Decl.Index {
pub fn get_type_fn_return_type_fn(decl: *const Decl) ?Decl.Index {
if (decl.get_type_fn_return_expr()) |return_expr| {
const ast = decl.file.get_ast();
const node_tags = ast.nodes.items(.tag);
switch (node_tags[return_expr]) {
.call, .call_comma, .call_one, .call_one_comma => {
const node_data = ast.nodes.items(.data);
const function = node_data[return_expr].lhs;
const token = ast.nodes.items(.main_token)[function];
var buffer: [1]Ast.Node.Index = undefined;
const call = ast.fullCall(&buffer, return_expr) orelse return null;
const token = ast.nodeMainToken(call.ast.fn_expr);
const name = ast.tokenSlice(token);
if (decl.lookup(name)) |function_decl| {
return function_decl;
}
},
else => {},
}
}
return null;
}
@ -171,36 +159,19 @@ pub fn get_type_fn_return_expr(decl: *const Decl) ?Ast.Node.Index {
switch (decl.categorize()) {
.type_function => {
const ast = decl.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_data = ast.nodes.items(.data);
const body_node = node_data[decl.ast_node].rhs;
if (body_node == 0) return null;
switch (node_tags[body_node]) {
.block, .block_semicolon => {
const statements = ast.extra_data[node_data[body_node].lhs..node_data[body_node].rhs];
// Look for the return statement
const body_node = ast.nodeData(decl.ast_node).node_and_node[1];
var buf: [2]Ast.Node.Index = undefined;
const statements = ast.blockStatements(&buf, body_node) orelse return null;
for (statements) |stmt| {
if (node_tags[stmt] == .@"return") {
return node_data[stmt].lhs;
if (ast.nodeTag(stmt) == .@"return") {
return ast.nodeData(stmt).node;
}
}
return null;
},
.block_two, .block_two_semicolon => {
if (node_tags[node_data[body_node].lhs] == .@"return") {
return node_data[node_data[body_node].lhs].lhs;
}
if (node_data[body_node].rhs != 0 and
node_tags[node_data[body_node].rhs] == .@"return")
{
return node_data[node_data[body_node].rhs].lhs;
}
return null;
},
else => return null,
}
},
else => return null,
}
}
@ -269,16 +240,15 @@ pub fn append_parent_ns(list: *std.ArrayListUnmanaged(u8), parent: Decl.Index) O
}
}
pub fn findFirstDocComment(ast: *const Ast, token: Ast.TokenIndex) Ast.TokenIndex {
const token_tags = ast.tokens.items(.tag);
pub fn findFirstDocComment(ast: *const Ast, token: Ast.TokenIndex) Ast.OptionalTokenIndex {
var it = token;
while (it > 0) {
it -= 1;
if (token_tags[it] != .doc_comment) {
return it + 1;
if (ast.tokenTag(it) != .doc_comment) {
return .fromToken(it + 1);
}
}
return it;
return .none;
}
/// Successively looks up each component.

View file

@ -91,12 +91,10 @@ pub const File = struct {
pub fn categorize_decl(file_index: File.Index, node: Ast.Node.Index) Category {
const ast = file_index.get_ast();
const node_tags = ast.nodes.items(.tag);
const token_tags = ast.tokens.items(.tag);
switch (node_tags[node]) {
switch (ast.nodeTag(node)) {
.root => {
for (ast.rootDecls()) |member| {
switch (node_tags[member]) {
switch (ast.nodeTag(member)) {
.container_field_init,
.container_field_align,
.container_field,
@ -113,10 +111,12 @@ pub const File = struct {
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(node).?;
if (token_tags[var_decl.ast.mut_token] == .keyword_var)
if (ast.tokenTag(var_decl.ast.mut_token) == .keyword_var)
return .{ .global_variable = node };
const init_node = var_decl.ast.init_node.unwrap() orelse
return .{ .global_const = node };
return categorize_expr(file_index, var_decl.ast.init_node);
return categorize_expr(file_index, init_node);
},
.fn_proto,
@ -139,7 +139,7 @@ pub const File = struct {
node: Ast.Node.Index,
full: Ast.full.FnProto,
) Category {
return switch (categorize_expr(file_index, full.ast.return_type)) {
return switch (categorize_expr(file_index, full.ast.return_type.unwrap().?)) {
.namespace, .container, .error_set, .type_type => .{ .type_function = node },
else => .{ .function = node },
};
@ -155,12 +155,8 @@ pub const File = struct {
pub fn categorize_expr(file_index: File.Index, node: Ast.Node.Index) Category {
const file = file_index.get();
const ast = file_index.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
const main_tokens = ast.nodes.items(.main_token);
const token_tags = ast.tokens.items(.tag);
//log.debug("categorize_expr tag {s}", .{@tagName(node_tags[node])});
return switch (node_tags[node]) {
//log.debug("categorize_expr tag {s}", .{@tagName(ast.nodeTag(node))});
return switch (ast.nodeTag(node)) {
.container_decl,
.container_decl_trailing,
.container_decl_arg,
@ -176,11 +172,11 @@ pub const File = struct {
=> {
var buf: [2]Ast.Node.Index = undefined;
const container_decl = ast.fullContainerDecl(&buf, node).?;
if (token_tags[container_decl.ast.main_token] != .keyword_struct) {
if (ast.tokenTag(container_decl.ast.main_token) != .keyword_struct) {
return .{ .container = node };
}
for (container_decl.ast.members) |member| {
switch (node_tags[member]) {
switch (ast.nodeTag(member)) {
.container_field_init,
.container_field_align,
.container_field,
@ -196,7 +192,7 @@ pub const File = struct {
=> .{ .error_set = node },
.identifier => {
const name_token = ast.nodes.items(.main_token)[node];
const name_token = ast.nodeMainToken(node);
const ident_name = ast.tokenSlice(name_token);
if (std.mem.eql(u8, ident_name, "type"))
return .type_type;
@ -217,9 +213,7 @@ pub const File = struct {
},
.field_access => {
const object_node = node_datas[node].lhs;
const dot_token = main_tokens[node];
const field_ident = dot_token + 1;
const object_node, const field_ident = ast.nodeData(node).node_and_token;
const field_name = ast.tokenSlice(field_ident);
switch (categorize_expr(file_index, object_node)) {
@ -232,20 +226,13 @@ pub const File = struct {
return .{ .global_const = node };
},
.builtin_call_two, .builtin_call_two_comma => {
if (node_datas[node].lhs == 0) {
const params = [_]Ast.Node.Index{};
return categorize_builtin_call(file_index, node, &params);
} else if (node_datas[node].rhs == 0) {
const params = [_]Ast.Node.Index{node_datas[node].lhs};
return categorize_builtin_call(file_index, node, &params);
} else {
const params = [_]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
return categorize_builtin_call(file_index, node, &params);
}
},
.builtin_call, .builtin_call_comma => {
const params = ast.extra_data[node_datas[node].lhs..node_datas[node].rhs];
.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
.builtin_call_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const params = ast.builtinCallParams(&buf, node).?;
return categorize_builtin_call(file_index, node, params);
},
@ -266,9 +253,9 @@ pub const File = struct {
.@"if",
=> {
const if_full = ast.fullIf(node).?;
if (if_full.ast.else_expr != 0) {
if (if_full.ast.else_expr.unwrap()) |else_expr| {
const then_cat = categorize_expr_deep(file_index, if_full.ast.then_expr);
const else_cat = categorize_expr_deep(file_index, if_full.ast.else_expr);
const else_cat = categorize_expr_deep(file_index, else_expr);
if (then_cat == .type_type and else_cat == .type_type) {
return .type_type;
} else if (then_cat == .error_set and else_cat == .error_set) {
@ -327,11 +314,10 @@ pub const File = struct {
params: []const Ast.Node.Index,
) Category {
const ast = file_index.get_ast();
const main_tokens = ast.nodes.items(.main_token);
const builtin_token = main_tokens[node];
const builtin_token = ast.nodeMainToken(node);
const builtin_name = ast.tokenSlice(builtin_token);
if (std.mem.eql(u8, builtin_name, "@import")) {
const str_lit_token = main_tokens[params[0]];
const str_lit_token = ast.nodeMainToken(params[0]);
const str_bytes = ast.tokenSlice(str_lit_token);
const file_path = std.zig.string_literal.parseAlloc(gpa, str_bytes) catch @panic("OOM");
defer gpa.free(file_path);
@ -364,14 +350,12 @@ pub const File = struct {
fn categorize_switch(file_index: File.Index, node: Ast.Node.Index) Category {
const ast = file_index.get_ast();
const node_datas = ast.nodes.items(.data);
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.SubRange);
const case_nodes = ast.extra_data[extra.start..extra.end];
const full = ast.fullSwitch(node).?;
var all_type_type = true;
var all_error_set = true;
var any_type = false;
if (case_nodes.len == 0) return .{ .global_const = node };
for (case_nodes) |case_node| {
if (full.ast.cases.len == 0) return .{ .global_const = node };
for (full.ast.cases) |case_node| {
const case = ast.fullSwitchCase(case_node).?;
switch (categorize_expr_deep(file_index, case.ast.target_expr)) {
.type_type => {
@ -417,8 +401,8 @@ pub fn add_file(file_name: []const u8, bytes: []u8) !File.Index {
const scope = try gpa.create(Scope);
scope.* = .{ .tag = .top };
const decl_index = try file_index.add_decl(0, .none);
try struct_decl(&w, scope, decl_index, 0, ast.containerDeclRoot());
const decl_index = try file_index.add_decl(.root, .none);
try struct_decl(&w, scope, decl_index, .root, ast.containerDeclRoot());
const file = file_index.get();
shrinkToFit(&file.ident_decls);
@ -512,13 +496,12 @@ pub const Scope = struct {
}
pub fn lookup(start_scope: *Scope, ast: *const Ast, name: []const u8) ?Ast.Node.Index {
const main_tokens = ast.nodes.items(.main_token);
var it: *Scope = start_scope;
while (true) switch (it.tag) {
.top => break,
.local => {
const local: *Local = @alignCast(@fieldParentPtr("base", it));
const name_token = main_tokens[local.var_node] + 1;
const name_token = ast.nodeMainToken(local.var_node) + 1;
const ident_name = ast.tokenSlice(name_token);
if (std.mem.eql(u8, ident_name, name)) {
return local.var_node;
@ -545,8 +528,6 @@ fn struct_decl(
container_decl: Ast.full.ContainerDecl,
) Oom!void {
const ast = w.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
const namespace = try gpa.create(Scope.Namespace);
namespace.* = .{
@ -556,7 +537,7 @@ fn struct_decl(
try w.file.get().scopes.putNoClobber(gpa, node, &namespace.base);
try w.scanDecls(namespace, container_decl.ast.members);
for (container_decl.ast.members) |member| switch (node_tags[member]) {
for (container_decl.ast.members) |member| switch (ast.nodeTag(member)) {
.container_field_init,
.container_field_align,
.container_field,
@ -576,7 +557,7 @@ fn struct_decl(
try w.file.get().doctests.put(gpa, member, doctest_node);
}
const decl_index = try w.file.add_decl(member, parent_decl);
const body = if (node_tags[member] == .fn_decl) node_datas[member].rhs else 0;
const body = if (ast.nodeTag(member) == .fn_decl) ast.nodeData(member).node_and_node[1].toOptional() else .none;
try w.fn_decl(&namespace.base, decl_index, body, full);
},
@ -591,9 +572,9 @@ fn struct_decl(
.@"comptime",
.@"usingnamespace",
=> try w.expr(&namespace.base, parent_decl, node_datas[member].lhs),
=> try w.expr(&namespace.base, parent_decl, ast.nodeData(member).node),
.test_decl => try w.expr(&namespace.base, parent_decl, node_datas[member].rhs),
.test_decl => try w.expr(&namespace.base, parent_decl, ast.nodeData(member).opt_token_and_node[1]),
else => unreachable,
};
@ -640,13 +621,13 @@ fn fn_decl(
w: *Walk,
scope: *Scope,
parent_decl: Decl.Index,
body: Ast.Node.Index,
body: Ast.Node.OptionalIndex,
full: Ast.full.FnProto,
) Oom!void {
for (full.ast.params) |param| {
try expr(w, scope, parent_decl, param);
}
try expr(w, scope, parent_decl, full.ast.return_type);
try expr(w, scope, parent_decl, full.ast.return_type.unwrap().?);
try maybe_expr(w, scope, parent_decl, full.ast.align_expr);
try maybe_expr(w, scope, parent_decl, full.ast.addrspace_expr);
try maybe_expr(w, scope, parent_decl, full.ast.section_expr);
@ -654,17 +635,13 @@ fn fn_decl(
try maybe_expr(w, scope, parent_decl, body);
}
fn maybe_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
if (node != 0) return expr(w, scope, parent_decl, node);
fn maybe_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.OptionalIndex) Oom!void {
if (node.unwrap()) |n| return expr(w, scope, parent_decl, n);
}
fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
assert(node != 0);
const ast = w.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
const main_tokens = ast.nodes.items(.main_token);
switch (node_tags[node]) {
switch (ast.nodeTag(node)) {
.root => unreachable, // Top-level declaration.
.@"usingnamespace" => unreachable, // Top-level declaration.
.test_decl => unreachable, // Top-level declaration.
@ -745,8 +722,9 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.array_access,
.switch_range,
=> {
try expr(w, scope, parent_decl, node_datas[node].lhs);
try expr(w, scope, parent_decl, node_datas[node].rhs);
const lhs, const rhs = ast.nodeData(node).node_and_node;
try expr(w, scope, parent_decl, lhs);
try expr(w, scope, parent_decl, rhs);
},
.assign_destructure => {
@ -759,35 +737,33 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.bit_not,
.negation,
.negation_wrap,
.@"return",
.deref,
.address_of,
.optional_type,
.unwrap_optional,
.grouped_expression,
.@"comptime",
.@"nosuspend",
.@"suspend",
.@"await",
.@"resume",
.@"try",
=> try maybe_expr(w, scope, parent_decl, node_datas[node].lhs),
=> try expr(w, scope, parent_decl, ast.nodeData(node).node),
.unwrap_optional,
.grouped_expression,
=> try expr(w, scope, parent_decl, ast.nodeData(node).node_and_token[0]),
.@"return" => try maybe_expr(w, scope, parent_decl, ast.nodeData(node).opt_node),
.anyframe_type,
.@"break",
=> try maybe_expr(w, scope, parent_decl, node_datas[node].rhs),
.anyframe_type => try expr(w, scope, parent_decl, ast.nodeData(node).token_and_node[1]),
.@"break" => try maybe_expr(w, scope, parent_decl, ast.nodeData(node).opt_token_and_opt_node[1]),
.identifier => {
const ident_token = main_tokens[node];
const ident_token = ast.nodeMainToken(node);
const ident_name = ast.tokenSlice(ident_token);
if (scope.lookup(ast, ident_name)) |var_node| {
try w.file.get().ident_decls.put(gpa, ident_token, var_node);
}
},
.field_access => {
const object_node = node_datas[node].lhs;
const dot_token = main_tokens[node];
const field_ident = dot_token + 1;
const object_node, const field_ident = ast.nodeData(node).node_and_token;
try w.file.get().token_parents.put(gpa, field_ident, node);
// This will populate the left-most field object if it is an
// identifier, allowing rendering code to piece together the link.
@ -818,20 +794,13 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
try expr(w, scope, parent_decl, full.ast.template);
},
.builtin_call_two, .builtin_call_two_comma => {
if (node_datas[node].lhs == 0) {
const params = [_]Ast.Node.Index{};
return builtin_call(w, scope, parent_decl, node, &params);
} else if (node_datas[node].rhs == 0) {
const params = [_]Ast.Node.Index{node_datas[node].lhs};
return builtin_call(w, scope, parent_decl, node, &params);
} else {
const params = [_]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
return builtin_call(w, scope, parent_decl, node, &params);
}
},
.builtin_call, .builtin_call_comma => {
const params = ast.extra_data[node_datas[node].lhs..node_datas[node].rhs];
.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
.builtin_call_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const params = ast.builtinCallParams(&buf, node).?;
return builtin_call(w, scope, parent_decl, node, params);
},
@ -871,9 +840,10 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.for_simple, .@"for" => {
const full = ast.fullFor(node).?;
for (full.ast.inputs) |input| {
if (node_tags[input] == .for_range) {
try expr(w, scope, parent_decl, node_datas[input].lhs);
try maybe_expr(w, scope, parent_decl, node_datas[input].rhs);
if (ast.nodeTag(input) == .for_range) {
const start, const end = ast.nodeData(input).node_and_opt_node;
try expr(w, scope, parent_decl, start);
try maybe_expr(w, scope, parent_decl, end);
} else {
try expr(w, scope, parent_decl, input);
}
@ -886,18 +856,13 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.slice_open => return slice(w, scope, parent_decl, ast.sliceOpen(node)),
.slice_sentinel => return slice(w, scope, parent_decl, ast.sliceSentinel(node)),
.block_two, .block_two_semicolon => {
const statements = [2]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
if (node_datas[node].lhs == 0) {
return block(w, scope, parent_decl, statements[0..0]);
} else if (node_datas[node].rhs == 0) {
return block(w, scope, parent_decl, statements[0..1]);
} else {
return block(w, scope, parent_decl, statements[0..2]);
}
},
.block, .block_semicolon => {
const statements = ast.extra_data[node_datas[node].lhs..node_datas[node].rhs];
.block_two,
.block_two_semicolon,
.block,
.block_semicolon,
=> {
var buf: [2]Ast.Node.Index = undefined;
const statements = ast.blockStatements(&buf, node).?;
return block(w, scope, parent_decl, statements);
},
@ -933,17 +898,16 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
},
.array_type_sentinel => {
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
try expr(w, scope, parent_decl, node_datas[node].lhs);
const len_expr, const extra_index = ast.nodeData(node).node_and_extra;
const extra = ast.extraData(extra_index, Ast.Node.ArrayTypeSentinel);
try expr(w, scope, parent_decl, len_expr);
try expr(w, scope, parent_decl, extra.elem_type);
try expr(w, scope, parent_decl, extra.sentinel);
},
.@"switch", .switch_comma => {
const operand_node = node_datas[node].lhs;
try expr(w, scope, parent_decl, operand_node);
const extra = ast.extraData(node_datas[node].rhs, Ast.Node.SubRange);
const case_nodes = ast.extra_data[extra.start..extra.end];
for (case_nodes) |case_node| {
const full = ast.fullSwitch(node).?;
try expr(w, scope, parent_decl, full.ast.condition);
for (full.ast.cases) |case_node| {
const case = ast.fullSwitchCase(case_node).?;
for (case.ast.values) |value_node| {
try expr(w, scope, parent_decl, value_node);
@ -992,7 +956,7 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.fn_proto,
=> {
var buf: [1]Ast.Node.Index = undefined;
return fn_decl(w, scope, parent_decl, 0, ast.fullFnProto(&buf, node).?);
return fn_decl(w, scope, parent_decl, .none, ast.fullFnProto(&buf, node).?);
},
}
}
@ -1012,8 +976,7 @@ fn builtin_call(
params: []const Ast.Node.Index,
) Oom!void {
const ast = w.file.get_ast();
const main_tokens = ast.nodes.items(.main_token);
const builtin_token = main_tokens[node];
const builtin_token = ast.nodeMainToken(node);
const builtin_name = ast.tokenSlice(builtin_token);
if (std.mem.eql(u8, builtin_name, "@This")) {
try w.file.get().node_decls.put(gpa, node, scope.getNamespaceDecl());
@ -1031,13 +994,11 @@ fn block(
statements: []const Ast.Node.Index,
) Oom!void {
const ast = w.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
var scope = parent_scope;
for (statements) |node| {
switch (node_tags[node]) {
switch (ast.nodeTag(node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
@ -1058,11 +1019,10 @@ fn block(
log.debug("walk assign_destructure not implemented yet", .{});
},
.grouped_expression => try expr(w, scope, parent_decl, node_datas[node].lhs),
.grouped_expression => try expr(w, scope, parent_decl, ast.nodeData(node).node_and_token[0]),
.@"defer",
.@"errdefer",
=> try expr(w, scope, parent_decl, node_datas[node].rhs),
.@"defer" => try expr(w, scope, parent_decl, ast.nodeData(node).node),
.@"errdefer" => try expr(w, scope, parent_decl, ast.nodeData(node).opt_token_and_node[1]),
else => try expr(w, scope, parent_decl, node),
}
@ -1078,18 +1038,14 @@ fn while_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, full: Ast.full.W
fn scanDecls(w: *Walk, namespace: *Scope.Namespace, members: []const Ast.Node.Index) Oom!void {
const ast = w.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
const token_tags = ast.tokens.items(.tag);
const node_datas = ast.nodes.items(.data);
for (members) |member_node| {
const name_token = switch (node_tags[member_node]) {
const name_token = switch (ast.nodeTag(member_node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> main_tokens[member_node] + 1,
=> ast.nodeMainToken(member_node) + 1,
.fn_proto_simple,
.fn_proto_multi,
@ -1097,18 +1053,20 @@ fn scanDecls(w: *Walk, namespace: *Scope.Namespace, members: []const Ast.Node.In
.fn_proto,
.fn_decl,
=> blk: {
const ident = main_tokens[member_node] + 1;
if (token_tags[ident] != .identifier) continue;
const ident = ast.nodeMainToken(member_node) + 1;
if (ast.tokenTag(ident) != .identifier) continue;
break :blk ident;
},
.test_decl => {
const ident_token = node_datas[member_node].lhs;
const is_doctest = token_tags[ident_token] == .identifier;
const opt_ident_token = ast.nodeData(member_node).opt_token_and_node[0];
if (opt_ident_token.unwrap()) |ident_token| {
const is_doctest = ast.tokenTag(ident_token) == .identifier;
if (is_doctest) {
const token_bytes = ast.tokenSlice(ident_token);
try namespace.doctests.put(gpa, token_bytes, member_node);
}
}
continue;
},

View file

@ -41,14 +41,10 @@ pub fn fileSourceHtml(
var field_access_buffer: std.ArrayListUnmanaged(u8) = .empty;
};
const token_tags = ast.tokens.items(.tag);
const token_starts = ast.tokens.items(.start);
const main_tokens = ast.nodes.items(.main_token);
const start_token = ast.firstToken(root_node);
const end_token = ast.lastToken(root_node) + 1;
var cursor: usize = token_starts[start_token];
var cursor: usize = ast.tokenStart(start_token);
var indent: usize = 0;
if (std.mem.lastIndexOf(u8, ast.source[0..cursor], "\n")) |newline_index| {
@ -64,8 +60,8 @@ pub fn fileSourceHtml(
var next_annotate_index: usize = 0;
for (
token_tags[start_token..end_token],
token_starts[start_token..end_token],
ast.tokens.items(.tag)[start_token..end_token],
ast.tokens.items(.start)[start_token..end_token],
start_token..,
) |tag, start, token_index| {
const between = ast.source[cursor..start];
@ -184,7 +180,7 @@ pub fn fileSourceHtml(
.identifier => i: {
if (options.fn_link != .none) {
const fn_link = options.fn_link.get();
const fn_token = main_tokens[fn_link.ast_node];
const fn_token = ast.nodeMainToken(fn_link.ast_node);
if (token_index == fn_token + 1) {
try out.appendSlice(gpa, "<a class=\"tok-fn\" href=\"#");
_ = missing_feature_url_escape;
@ -196,7 +192,7 @@ pub fn fileSourceHtml(
}
}
if (token_index > 0 and token_tags[token_index - 1] == .keyword_fn) {
if (token_index > 0 and ast.tokenTag(token_index - 1) == .keyword_fn) {
try out.appendSlice(gpa, "<span class=\"tok-fn\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
@ -358,16 +354,11 @@ fn walkFieldAccesses(
node: Ast.Node.Index,
) Oom!void {
const ast = file_index.get_ast();
const node_tags = ast.nodes.items(.tag);
assert(node_tags[node] == .field_access);
const node_datas = ast.nodes.items(.data);
const main_tokens = ast.nodes.items(.main_token);
const object_node = node_datas[node].lhs;
const dot_token = main_tokens[node];
const field_ident = dot_token + 1;
switch (node_tags[object_node]) {
assert(ast.nodeTag(node) == .field_access);
const object_node, const field_ident = ast.nodeData(node).node_and_token;
switch (ast.nodeTag(object_node)) {
.identifier => {
const lhs_ident = main_tokens[object_node];
const lhs_ident = ast.nodeMainToken(object_node);
try resolveIdentLink(file_index, out, lhs_ident);
},
.field_access => {

View file

@ -124,7 +124,9 @@ fn query_exec_fallible(query: []const u8, ignore_case: bool) !void {
@memcpy(g.full_path_search_text_lower.items, g.full_path_search_text.items);
const ast = decl.file.get_ast();
try collect_docs(&g.doc_search_text, ast, info.first_doc_comment);
if (info.first_doc_comment.unwrap()) |first_doc_comment| {
try collect_docs(&g.doc_search_text, ast, first_doc_comment);
}
if (ignore_case) {
ascii_lower(g.full_path_search_text_lower.items);
@ -227,18 +229,15 @@ const ErrorIdentifier = packed struct(u64) {
fn hasDocs(ei: ErrorIdentifier) bool {
const decl_index = ei.decl_index;
const ast = decl_index.get().file.get_ast();
const token_tags = ast.tokens.items(.tag);
const token_index = ei.token_index;
if (token_index == 0) return false;
return token_tags[token_index - 1] == .doc_comment;
return ast.tokenTag(token_index - 1) == .doc_comment;
}
fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
const decl_index = ei.decl_index;
const ast = decl_index.get().file.get_ast();
const name = ast.tokenSlice(ei.token_index);
const first_doc_comment = Decl.findFirstDocComment(ast, ei.token_index);
const has_docs = ast.tokens.items(.tag)[first_doc_comment] == .doc_comment;
const has_link = base_decl != decl_index;
try out.appendSlice(gpa, "<dt>");
@ -253,7 +252,7 @@ const ErrorIdentifier = packed struct(u64) {
}
try out.appendSlice(gpa, "</dt>");
if (has_docs) {
if (Decl.findFirstDocComment(ast, ei.token_index).unwrap()) |first_doc_comment| {
try out.appendSlice(gpa, "<dd>");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</dd>");
@ -319,17 +318,16 @@ fn addErrorsFromExpr(
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
switch (decl.file.categorize_expr(node)) {
.error_set => |n| switch (node_tags[n]) {
.error_set => |n| switch (ast.nodeTag(n)) {
.error_set_decl => {
try addErrorsFromNode(decl_index, out, node);
},
.merge_error_sets => {
try addErrorsFromExpr(decl_index, out, node_datas[node].lhs);
try addErrorsFromExpr(decl_index, out, node_datas[node].rhs);
const lhs, const rhs = ast.nodeData(n).node_and_node;
try addErrorsFromExpr(decl_index, out, lhs);
try addErrorsFromExpr(decl_index, out, rhs);
},
else => unreachable,
},
@ -347,11 +345,9 @@ fn addErrorsFromNode(
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const main_tokens = ast.nodes.items(.main_token);
const token_tags = ast.tokens.items(.tag);
const error_token = main_tokens[node];
const error_token = ast.nodeMainToken(node);
var tok_i = error_token + 2;
while (true) : (tok_i += 1) switch (token_tags[tok_i]) {
while (true) : (tok_i += 1) switch (ast.tokenTag(tok_i)) {
.doc_comment, .comma => {},
.identifier => {
const name = ast.tokenSlice(tok_i);
@ -391,15 +387,13 @@ fn decl_fields_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
switch (decl.categorize()) {
.type_function => {
const node_tags = ast.nodes.items(.tag);
// If the type function returns a reference to another type function, get the fields from there
if (decl.get_type_fn_return_type_fn()) |function_decl| {
return decl_fields_fallible(function_decl);
}
// If the type function returns a container, such as a `struct`, read that container's fields
if (decl.get_type_fn_return_expr()) |return_expr| {
switch (node_tags[return_expr]) {
switch (ast.nodeTag(return_expr)) {
.container_decl, .container_decl_trailing, .container_decl_two, .container_decl_two_trailing, .container_decl_arg, .container_decl_arg_trailing => {
return ast_decl_fields_fallible(ast, return_expr);
},
@ -420,10 +414,9 @@ fn ast_decl_fields_fallible(ast: *Ast, ast_index: Ast.Node.Index) ![]Ast.Node.In
var result: std.ArrayListUnmanaged(Ast.Node.Index) = .empty;
};
g.result.clearRetainingCapacity();
const node_tags = ast.nodes.items(.tag);
var buf: [2]Ast.Node.Index = undefined;
const container_decl = ast.fullContainerDecl(&buf, ast_index) orelse return &.{};
for (container_decl.ast.members) |member_node| switch (node_tags[member_node]) {
for (container_decl.ast.members) |member_node| switch (ast.nodeTag(member_node)) {
.container_field_init,
.container_field_align,
.container_field,
@ -478,9 +471,8 @@ fn decl_field_html_fallible(
try out.appendSlice(gpa, "</code></pre>");
const field = ast.fullContainerField(field_node).?;
const first_doc_comment = Decl.findFirstDocComment(ast, field.firstToken());
if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
if (Decl.findFirstDocComment(ast, field.firstToken()).unwrap()) |first_doc_comment| {
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</div>");
@ -494,14 +486,13 @@ fn decl_param_html_fallible(
) !void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const token_tags = ast.tokens.items(.tag);
const colon = ast.firstToken(param_node) - 1;
const name_token = colon - 1;
const first_doc_comment = f: {
var it = ast.firstToken(param_node);
while (it > 0) {
it -= 1;
switch (token_tags[it]) {
switch (ast.tokenTag(it)) {
.doc_comment, .colon, .identifier, .keyword_comptime, .keyword_noalias => {},
else => break,
}
@ -516,7 +507,7 @@ fn decl_param_html_fallible(
try fileSourceHtml(decl.file, out, param_node, .{});
try out.appendSlice(gpa, "</code></pre>");
if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
if (ast.tokenTag(first_doc_comment) == .doc_comment) {
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</div>");
@ -526,10 +517,8 @@ fn decl_param_html_fallible(
export fn decl_fn_proto_html(decl_index: Decl.Index, linkify_fn_name: bool) String {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
const proto_node = switch (node_tags[decl.ast_node]) {
.fn_decl => node_datas[decl.ast_node].lhs,
const proto_node = switch (ast.nodeTag(decl.ast_node)) {
.fn_decl => ast.nodeData(decl.ast_node).node_and_node[0],
.fn_proto,
.fn_proto_one,
@ -586,17 +575,16 @@ export fn decl_parent(decl_index: Decl.Index) Decl.Index {
return decl.parent;
}
export fn fn_error_set(decl_index: Decl.Index) Ast.Node.Index {
export fn fn_error_set(decl_index: Decl.Index) Ast.Node.OptionalIndex {
const decl = decl_index.get();
const ast = decl.file.get_ast();
var buf: [1]Ast.Node.Index = undefined;
const full = ast.fullFnProto(&buf, decl.ast_node).?;
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
return switch (node_tags[full.ast.return_type]) {
.error_set_decl => full.ast.return_type,
.error_union => node_datas[full.ast.return_type].lhs,
else => 0,
const return_type = full.ast.return_type.unwrap().?;
return switch (ast.nodeTag(return_type)) {
.error_set_decl => return_type.toOptional(),
.error_union => ast.nodeData(return_type).node_and_node[0].toOptional(),
else => .none,
};
}
@ -609,21 +597,19 @@ export fn decl_file_path(decl_index: Decl.Index) String {
export fn decl_category_name(decl_index: Decl.Index) String {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const token_tags = ast.tokens.items(.tag);
const name = switch (decl.categorize()) {
.namespace, .container => |node| {
const node_tags = ast.nodes.items(.tag);
if (node_tags[decl.ast_node] == .root)
if (ast.nodeTag(decl.ast_node) == .root)
return String.init("struct");
string_result.clearRetainingCapacity();
var buf: [2]Ast.Node.Index = undefined;
const container_decl = ast.fullContainerDecl(&buf, node).?;
if (container_decl.layout_token) |t| {
if (token_tags[t] == .keyword_extern) {
if (ast.tokenTag(t) == .keyword_extern) {
string_result.appendSlice(gpa, "extern ") catch @panic("OOM");
}
}
const main_token_tag = token_tags[container_decl.ast.main_token];
const main_token_tag = ast.tokenTag(container_decl.ast.main_token);
string_result.appendSlice(gpa, main_token_tag.lexeme().?) catch @panic("OOM");
return String.init(string_result.items);
},
@ -656,7 +642,9 @@ export fn decl_name(decl_index: Decl.Index) String {
export fn decl_docs_html(decl_index: Decl.Index, short: bool) String {
const decl = decl_index.get();
string_result.clearRetainingCapacity();
render_docs(&string_result, decl_index, decl.extra_info().first_doc_comment, short) catch @panic("OOM");
if (decl.extra_info().first_doc_comment.unwrap()) |first_doc_comment| {
render_docs(&string_result, decl_index, first_doc_comment, short) catch @panic("OOM");
}
return String.init(string_result.items);
}
@ -665,10 +653,9 @@ fn collect_docs(
ast: *const Ast,
first_doc_comment: Ast.TokenIndex,
) Oom!void {
const token_tags = ast.tokens.items(.tag);
list.clearRetainingCapacity();
var it = first_doc_comment;
while (true) : (it += 1) switch (token_tags[it]) {
while (true) : (it += 1) switch (ast.tokenTag(it)) {
.doc_comment, .container_doc_comment => {
// It is tempting to trim this string but think carefully about how
// that will affect the markdown parser.
@ -687,12 +674,11 @@ fn render_docs(
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
const token_tags = ast.tokens.items(.tag);
var parser = try markdown.Parser.init(gpa);
defer parser.deinit();
var it = first_doc_comment;
while (true) : (it += 1) switch (token_tags[it]) {
while (true) : (it += 1) switch (ast.tokenTag(it)) {
.doc_comment, .container_doc_comment => {
const line = ast.tokenSlice(it)[3..];
if (short and line.len == 0) break;
@ -767,9 +753,9 @@ export fn decl_type_html(decl_index: Decl.Index) String {
t: {
// If there is an explicit type, use it.
if (ast.fullVarDecl(decl.ast_node)) |var_decl| {
if (var_decl.ast.type_node != 0) {
if (var_decl.ast.type_node.unwrap()) |type_node| {
string_result.appendSlice(gpa, "<code>") catch @panic("OOM");
fileSourceHtml(decl.file, &string_result, var_decl.ast.type_node, .{
fileSourceHtml(decl.file, &string_result, type_node, .{
.skip_comments = true,
.collapse_whitespace = true,
}) catch |e| {

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -92,27 +92,26 @@ fn containerDecl(
full: Ast.full.ContainerDecl,
) !void {
const tree = astrl.tree;
const token_tags = tree.tokens.items(.tag);
switch (token_tags[full.ast.main_token]) {
switch (tree.tokenTag(full.ast.main_token)) {
.keyword_struct => {
if (full.ast.arg != 0) {
_ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
if (full.ast.arg.unwrap()) |arg| {
_ = try astrl.expr(arg, block, ResultInfo.type_only);
}
for (full.ast.members) |member_node| {
_ = try astrl.expr(member_node, block, ResultInfo.none);
}
},
.keyword_union => {
if (full.ast.arg != 0) {
_ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
if (full.ast.arg.unwrap()) |arg| {
_ = try astrl.expr(arg, block, ResultInfo.type_only);
}
for (full.ast.members) |member_node| {
_ = try astrl.expr(member_node, block, ResultInfo.none);
}
},
.keyword_enum => {
if (full.ast.arg != 0) {
_ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
if (full.ast.arg.unwrap()) |arg| {
_ = try astrl.expr(arg, block, ResultInfo.type_only);
}
for (full.ast.members) |member_node| {
_ = try astrl.expr(member_node, block, ResultInfo.none);
@ -130,10 +129,7 @@ fn containerDecl(
/// Returns true if `rl` provides a result pointer and the expression consumes it.
fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultInfo) Allocator.Error!bool {
const tree = astrl.tree;
const token_tags = tree.tokens.items(.tag);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
switch (node_tags[node]) {
switch (tree.nodeTag(node)) {
.root,
.switch_case_one,
.switch_case_inline_one,
@ -145,8 +141,12 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.asm_input,
=> unreachable,
.@"errdefer", .@"defer" => {
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
.@"errdefer" => {
_ = try astrl.expr(tree.nodeData(node).opt_token_and_node[1], block, ResultInfo.none);
return false;
},
.@"defer" => {
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
@ -155,21 +155,22 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.container_field,
=> {
const full = tree.fullContainerField(node).?;
_ = try astrl.expr(full.ast.type_expr, block, ResultInfo.type_only);
if (full.ast.align_expr != 0) {
_ = try astrl.expr(full.ast.align_expr, block, ResultInfo.type_only);
const type_expr = full.ast.type_expr.unwrap().?;
_ = try astrl.expr(type_expr, block, ResultInfo.type_only);
if (full.ast.align_expr.unwrap()) |align_expr| {
_ = try astrl.expr(align_expr, block, ResultInfo.type_only);
}
if (full.ast.value_expr != 0) {
_ = try astrl.expr(full.ast.value_expr, block, ResultInfo.type_only);
if (full.ast.value_expr.unwrap()) |value_expr| {
_ = try astrl.expr(value_expr, block, ResultInfo.type_only);
}
return false;
},
.@"usingnamespace" => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
return false;
},
.test_decl => {
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
_ = try astrl.expr(tree.nodeData(node).opt_token_and_node[1], block, ResultInfo.none);
return false;
},
.global_var_decl,
@ -178,17 +179,17 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.aligned_var_decl,
=> {
const full = tree.fullVarDecl(node).?;
const init_ri = if (full.ast.type_node != 0) init_ri: {
_ = try astrl.expr(full.ast.type_node, block, ResultInfo.type_only);
const init_ri = if (full.ast.type_node.unwrap()) |type_node| init_ri: {
_ = try astrl.expr(type_node, block, ResultInfo.type_only);
break :init_ri ResultInfo.typed_ptr;
} else ResultInfo.inferred_ptr;
if (full.ast.init_node == 0) {
const init_node = full.ast.init_node.unwrap() orelse {
// No init node, so we're done.
return false;
}
switch (token_tags[full.ast.mut_token]) {
};
switch (tree.tokenTag(full.ast.mut_token)) {
.keyword_const => {
const init_consumes_rl = try astrl.expr(full.ast.init_node, block, init_ri);
const init_consumes_rl = try astrl.expr(init_node, block, init_ri);
if (init_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
}
@ -197,7 +198,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.keyword_var => {
// We'll create an alloc either way, so don't care if the
// result pointer is consumed.
_ = try astrl.expr(full.ast.init_node, block, init_ri);
_ = try astrl.expr(init_node, block, init_ri);
return false;
},
else => unreachable,
@ -213,8 +214,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
return false;
},
.assign => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.typed_ptr);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.typed_ptr);
return false;
},
.assign_shl,
@ -235,13 +237,15 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.assign_mul_wrap,
.assign_mul_sat,
=> {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.none);
return false;
},
.shl, .shr => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.add,
@ -267,33 +271,38 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.less_or_equal,
.array_cat,
=> {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.none);
return false;
},
.array_mult => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.error_union, .merge_error_sets => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.none);
return false;
},
.bool_and,
.bool_or,
=> {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.type_only);
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.bool_not => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
return false;
},
.bit_not, .negation, .negation_wrap => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
@ -313,17 +322,13 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.error_set_decl,
=> return false,
.builtin_call_two, .builtin_call_two_comma => {
if (node_datas[node].lhs == 0) {
return astrl.builtinCall(block, ri, node, &.{});
} else if (node_datas[node].rhs == 0) {
return astrl.builtinCall(block, ri, node, &.{node_datas[node].lhs});
} else {
return astrl.builtinCall(block, ri, node, &.{ node_datas[node].lhs, node_datas[node].rhs });
}
},
.builtin_call, .builtin_call_comma => {
const params = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs];
.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
.builtin_call_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
const params = tree.builtinCallParams(&buf, node).?;
return astrl.builtinCall(block, ri, node, params);
},
@ -342,7 +347,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
for (full.ast.params) |param_node| {
_ = try astrl.expr(param_node, block, ResultInfo.type_only);
}
return switch (node_tags[node]) {
return switch (tree.nodeTag(node)) {
.call_one,
.call_one_comma,
.call,
@ -358,8 +363,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
},
.@"return" => {
if (node_datas[node].lhs != 0) {
const ret_val_consumes_rl = try astrl.expr(node_datas[node].lhs, block, ResultInfo.typed_ptr);
if (tree.nodeData(node).opt_node.unwrap()) |lhs| {
const ret_val_consumes_rl = try astrl.expr(lhs, block, ResultInfo.typed_ptr);
if (ret_val_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
}
@ -368,7 +373,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
},
.field_access => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
const lhs, _ = tree.nodeData(node).node_and_token;
_ = try astrl.expr(lhs, block, ResultInfo.none);
return false;
},
@ -380,15 +386,15 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
_ = try astrl.expr(full.ast.cond_expr, block, ResultInfo.type_only); // bool
}
if (full.ast.else_expr == 0) {
_ = try astrl.expr(full.ast.then_expr, block, ResultInfo.none);
return false;
} else {
if (full.ast.else_expr.unwrap()) |else_expr| {
const then_uses_rl = try astrl.expr(full.ast.then_expr, block, ri);
const else_uses_rl = try astrl.expr(full.ast.else_expr, block, ri);
const else_uses_rl = try astrl.expr(else_expr, block, ri);
const uses_rl = then_uses_rl or else_uses_rl;
if (uses_rl) try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
return uses_rl;
} else {
_ = try astrl.expr(full.ast.then_expr, block, ResultInfo.none);
return false;
}
},
@ -409,12 +415,12 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.ri = ri,
.consumes_res_ptr = false,
};
if (full.ast.cont_expr != 0) {
_ = try astrl.expr(full.ast.cont_expr, &new_block, ResultInfo.none);
if (full.ast.cont_expr.unwrap()) |cont_expr| {
_ = try astrl.expr(cont_expr, &new_block, ResultInfo.none);
}
_ = try astrl.expr(full.ast.then_expr, &new_block, ResultInfo.none);
const else_consumes_rl = if (full.ast.else_expr != 0) else_rl: {
break :else_rl try astrl.expr(full.ast.else_expr, block, ri);
const else_consumes_rl = if (full.ast.else_expr.unwrap()) |else_expr| else_rl: {
break :else_rl try astrl.expr(else_expr, block, ri);
} else false;
if (new_block.consumes_res_ptr or else_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
@ -430,10 +436,11 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
break :label try astrl.identString(label_token);
} else null;
for (full.ast.inputs) |input| {
if (node_tags[input] == .for_range) {
_ = try astrl.expr(node_datas[input].lhs, block, ResultInfo.type_only);
if (node_datas[input].rhs != 0) {
_ = try astrl.expr(node_datas[input].rhs, block, ResultInfo.type_only);
if (tree.nodeTag(input) == .for_range) {
const lhs, const opt_rhs = tree.nodeData(input).node_and_opt_node;
_ = try astrl.expr(lhs, block, ResultInfo.type_only);
if (opt_rhs.unwrap()) |rhs| {
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
}
} else {
_ = try astrl.expr(input, block, ResultInfo.none);
@ -447,8 +454,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.consumes_res_ptr = false,
};
_ = try astrl.expr(full.ast.then_expr, &new_block, ResultInfo.none);
const else_consumes_rl = if (full.ast.else_expr != 0) else_rl: {
break :else_rl try astrl.expr(full.ast.else_expr, block, ri);
const else_consumes_rl = if (full.ast.else_expr.unwrap()) |else_expr| else_rl: {
break :else_rl try astrl.expr(else_expr, block, ri);
} else false;
if (new_block.consumes_res_ptr or else_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
@ -459,66 +466,68 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
},
.slice_open => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
const sliced, const start = tree.nodeData(node).node_and_node;
_ = try astrl.expr(sliced, block, ResultInfo.none);
_ = try astrl.expr(start, block, ResultInfo.type_only);
return false;
},
.slice => {
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice);
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
const sliced, const extra_index = tree.nodeData(node).node_and_extra;
const extra = tree.extraData(extra_index, Ast.Node.Slice);
_ = try astrl.expr(sliced, block, ResultInfo.none);
_ = try astrl.expr(extra.start, block, ResultInfo.type_only);
_ = try astrl.expr(extra.end, block, ResultInfo.type_only);
return false;
},
.slice_sentinel => {
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel);
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
const sliced, const extra_index = tree.nodeData(node).node_and_extra;
const extra = tree.extraData(extra_index, Ast.Node.SliceSentinel);
_ = try astrl.expr(sliced, block, ResultInfo.none);
_ = try astrl.expr(extra.start, block, ResultInfo.type_only);
if (extra.end != 0) {
_ = try astrl.expr(extra.end, block, ResultInfo.type_only);
if (extra.end.unwrap()) |end| {
_ = try astrl.expr(end, block, ResultInfo.type_only);
}
_ = try astrl.expr(extra.sentinel, block, ResultInfo.none);
return false;
},
.deref => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
.address_of => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
.optional_type => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
return false;
},
.grouped_expression,
.@"try",
.@"await",
.@"nosuspend",
=> return astrl.expr(tree.nodeData(node).node, block, ri),
.grouped_expression,
.unwrap_optional,
=> return astrl.expr(node_datas[node].lhs, block, ri),
=> return astrl.expr(tree.nodeData(node).node_and_token[0], block, ri),
.block_two, .block_two_semicolon => {
if (node_datas[node].lhs == 0) {
return astrl.blockExpr(block, ri, node, &.{});
} else if (node_datas[node].rhs == 0) {
return astrl.blockExpr(block, ri, node, &.{node_datas[node].lhs});
} else {
return astrl.blockExpr(block, ri, node, &.{ node_datas[node].lhs, node_datas[node].rhs });
}
},
.block, .block_semicolon => {
const statements = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs];
.block_two,
.block_two_semicolon,
.block,
.block_semicolon,
=> {
var buf: [2]Ast.Node.Index = undefined;
const statements = tree.blockStatements(&buf, node).?;
return astrl.blockExpr(block, ri, node, statements);
},
.anyframe_type => {
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
_, const child_type = tree.nodeData(node).token_and_node;
_ = try astrl.expr(child_type, block, ResultInfo.type_only);
return false;
},
.@"catch", .@"orelse" => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
const rhs_consumes_rl = try astrl.expr(node_datas[node].rhs, block, ri);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
const rhs_consumes_rl = try astrl.expr(rhs, block, ri);
if (rhs_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
}
@ -532,19 +541,19 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
=> {
const full = tree.fullPtrType(node).?;
_ = try astrl.expr(full.ast.child_type, block, ResultInfo.type_only);
if (full.ast.sentinel != 0) {
_ = try astrl.expr(full.ast.sentinel, block, ResultInfo.type_only);
if (full.ast.sentinel.unwrap()) |sentinel| {
_ = try astrl.expr(sentinel, block, ResultInfo.type_only);
}
if (full.ast.addrspace_node != 0) {
_ = try astrl.expr(full.ast.addrspace_node, block, ResultInfo.type_only);
if (full.ast.addrspace_node.unwrap()) |addrspace_node| {
_ = try astrl.expr(addrspace_node, block, ResultInfo.type_only);
}
if (full.ast.align_node != 0) {
_ = try astrl.expr(full.ast.align_node, block, ResultInfo.type_only);
if (full.ast.align_node.unwrap()) |align_node| {
_ = try astrl.expr(align_node, block, ResultInfo.type_only);
}
if (full.ast.bit_range_start != 0) {
assert(full.ast.bit_range_end != 0);
_ = try astrl.expr(full.ast.bit_range_start, block, ResultInfo.type_only);
_ = try astrl.expr(full.ast.bit_range_end, block, ResultInfo.type_only);
if (full.ast.bit_range_start.unwrap()) |bit_range_start| {
const bit_range_end = full.ast.bit_range_end.unwrap().?;
_ = try astrl.expr(bit_range_start, block, ResultInfo.type_only);
_ = try astrl.expr(bit_range_end, block, ResultInfo.type_only);
}
return false;
},
@ -568,63 +577,66 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
},
.@"break" => {
if (node_datas[node].rhs == 0) {
const opt_label, const opt_rhs = tree.nodeData(node).opt_token_and_opt_node;
const rhs = opt_rhs.unwrap() orelse {
// Breaks with void are not interesting
return false;
}
};
var opt_cur_block = block;
if (node_datas[node].lhs == 0) {
// No label - we're breaking from a loop.
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
if (cur_block.is_loop) break;
}
} else {
const break_label = try astrl.identString(node_datas[node].lhs);
if (opt_label.unwrap()) |label_token| {
const break_label = try astrl.identString(label_token);
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
const block_label = cur_block.label orelse continue;
if (std.mem.eql(u8, block_label, break_label)) break;
}
} else {
// No label - we're breaking from a loop.
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
if (cur_block.is_loop) break;
}
}
if (opt_cur_block) |target_block| {
const consumes_break_rl = try astrl.expr(node_datas[node].rhs, block, target_block.ri);
const consumes_break_rl = try astrl.expr(rhs, block, target_block.ri);
if (consumes_break_rl) target_block.consumes_res_ptr = true;
} else {
// No corresponding scope to break from - AstGen will emit an error.
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.none);
}
return false;
},
.array_type => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.type_only);
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.array_type_sentinel => {
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
const len_expr, const extra_index = tree.nodeData(node).node_and_extra;
const extra = tree.extraData(extra_index, Ast.Node.ArrayTypeSentinel);
_ = try astrl.expr(len_expr, block, ResultInfo.type_only);
_ = try astrl.expr(extra.elem_type, block, ResultInfo.type_only);
_ = try astrl.expr(extra.sentinel, block, ResultInfo.type_only);
return false;
},
.array_access => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
const lhs, const rhs = tree.nodeData(node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.@"comptime" => {
// AstGen will emit an error if the scope is already comptime, so we can assume it is
// not. This means the result location is not forwarded.
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
.@"switch", .switch_comma => {
const operand_node = node_datas[node].lhs;
const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SubRange);
const case_nodes = tree.extra_data[extra.start..extra.end];
const operand_node, const extra_index = tree.nodeData(node).node_and_extra;
const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
_ = try astrl.expr(operand_node, block, ResultInfo.none);
@ -632,9 +644,10 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
for (case_nodes) |case_node| {
const case = tree.fullSwitchCase(case_node).?;
for (case.ast.values) |item_node| {
if (node_tags[item_node] == .switch_range) {
_ = try astrl.expr(node_datas[item_node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[item_node].rhs, block, ResultInfo.none);
if (tree.nodeTag(item_node) == .switch_range) {
const lhs, const rhs = tree.nodeData(item_node).node_and_node;
_ = try astrl.expr(lhs, block, ResultInfo.none);
_ = try astrl.expr(rhs, block, ResultInfo.none);
} else {
_ = try astrl.expr(item_node, block, ResultInfo.none);
}
@ -649,11 +662,11 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
return any_prong_consumed_rl;
},
.@"suspend" => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
.@"resume" => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
@ -669,9 +682,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullArrayInit(&buf, node).?;
if (full.ast.type_expr != 0) {
if (full.ast.type_expr.unwrap()) |type_expr| {
// Explicitly typed init does not participate in RLS
_ = try astrl.expr(full.ast.type_expr, block, ResultInfo.none);
_ = try astrl.expr(type_expr, block, ResultInfo.none);
for (full.ast.elements) |elem_init| {
_ = try astrl.expr(elem_init, block, ResultInfo.type_only);
}
@ -706,9 +719,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullStructInit(&buf, node).?;
if (full.ast.type_expr != 0) {
if (full.ast.type_expr.unwrap()) |type_expr| {
// Explicitly typed init does not participate in RLS
_ = try astrl.expr(full.ast.type_expr, block, ResultInfo.none);
_ = try astrl.expr(type_expr, block, ResultInfo.none);
for (full.ast.fields) |field_init| {
_ = try astrl.expr(field_init, block, ResultInfo.type_only);
}
@ -736,33 +749,35 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.fn_proto_one,
.fn_proto,
.fn_decl,
=> {
=> |tag| {
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
const body_node = if (node_tags[node] == .fn_decl) node_datas[node].rhs else 0;
const body_node = if (tag == .fn_decl) tree.nodeData(node).node_and_node[1].toOptional() else .none;
{
var it = full.iterate(tree);
while (it.next()) |param| {
if (param.anytype_ellipsis3 == null) {
_ = try astrl.expr(param.type_expr, block, ResultInfo.type_only);
const type_expr = param.type_expr.?;
_ = try astrl.expr(type_expr, block, ResultInfo.type_only);
}
}
}
if (full.ast.align_expr != 0) {
_ = try astrl.expr(full.ast.align_expr, block, ResultInfo.type_only);
if (full.ast.align_expr.unwrap()) |align_expr| {
_ = try astrl.expr(align_expr, block, ResultInfo.type_only);
}
if (full.ast.addrspace_expr != 0) {
_ = try astrl.expr(full.ast.addrspace_expr, block, ResultInfo.type_only);
if (full.ast.addrspace_expr.unwrap()) |addrspace_expr| {
_ = try astrl.expr(addrspace_expr, block, ResultInfo.type_only);
}
if (full.ast.section_expr != 0) {
_ = try astrl.expr(full.ast.section_expr, block, ResultInfo.type_only);
if (full.ast.section_expr.unwrap()) |section_expr| {
_ = try astrl.expr(section_expr, block, ResultInfo.type_only);
}
if (full.ast.callconv_expr != 0) {
_ = try astrl.expr(full.ast.callconv_expr, block, ResultInfo.type_only);
if (full.ast.callconv_expr.unwrap()) |callconv_expr| {
_ = try astrl.expr(callconv_expr, block, ResultInfo.type_only);
}
_ = try astrl.expr(full.ast.return_type, block, ResultInfo.type_only);
if (body_node != 0) {
_ = try astrl.expr(body_node, block, ResultInfo.none);
const return_type = full.ast.return_type.unwrap().?;
_ = try astrl.expr(return_type, block, ResultInfo.type_only);
if (body_node.unwrap()) |body| {
_ = try astrl.expr(body, block, ResultInfo.none);
}
return false;
},
@ -771,8 +786,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
fn identString(astrl: *AstRlAnnotate, token: Ast.TokenIndex) ![]const u8 {
const tree = astrl.tree;
const token_tags = tree.tokens.items(.tag);
assert(token_tags[token] == .identifier);
assert(tree.tokenTag(token) == .identifier);
const ident_name = tree.tokenSlice(token);
if (!std.mem.startsWith(u8, ident_name, "@")) {
return ident_name;
@ -785,13 +799,9 @@ fn identString(astrl: *AstRlAnnotate, token: Ast.TokenIndex) ![]const u8 {
fn blockExpr(astrl: *AstRlAnnotate, parent_block: ?*Block, ri: ResultInfo, node: Ast.Node.Index, statements: []const Ast.Node.Index) !bool {
const tree = astrl.tree;
const token_tags = tree.tokens.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
const lbrace = main_tokens[node];
if (token_tags[lbrace - 1] == .colon and
token_tags[lbrace - 2] == .identifier)
{
const lbrace = tree.nodeMainToken(node);
if (tree.isTokenPrecededByTags(lbrace, &.{ .identifier, .colon })) {
// Labeled block
var new_block: Block = .{
.parent = parent_block,
@ -820,8 +830,7 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
_ = ri; // Currently, no builtin consumes its result location.
const tree = astrl.tree;
const main_tokens = tree.nodes.items(.main_token);
const builtin_token = main_tokens[node];
const builtin_token = tree.nodeMainToken(node);
const builtin_name = tree.tokenSlice(builtin_token);
const info = BuiltinFn.list.get(builtin_name) orelse return false;
if (info.param_count) |expected| {

View file

@ -481,13 +481,13 @@ pub const Wip = struct {
const item = zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
extra_index = item.end;
const err_span = blk: {
if (item.data.node != 0) {
break :blk tree.nodeToSpan(item.data.node);
}
const token_starts = tree.tokens.items(.start);
const start = token_starts[item.data.token] + item.data.byte_offset;
const end = start + @as(u32, @intCast(tree.tokenSlice(item.data.token).len)) - item.data.byte_offset;
if (item.data.node.unwrap()) |node| {
break :blk tree.nodeToSpan(node);
} else if (item.data.token.unwrap()) |token| {
const start = tree.tokenStart(token) + item.data.byte_offset;
const end = start + @as(u32, @intCast(tree.tokenSlice(token).len)) - item.data.byte_offset;
break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
} else unreachable;
};
const err_loc = std.zig.findLineColumn(source, err_span.main);
@ -516,13 +516,13 @@ pub const Wip = struct {
const note_item = zir.extraData(Zir.Inst.CompileErrors.Item, body_elem);
const msg = zir.nullTerminatedString(note_item.data.msg);
const span = blk: {
if (note_item.data.node != 0) {
break :blk tree.nodeToSpan(note_item.data.node);
}
const token_starts = tree.tokens.items(.start);
const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
const end = start + @as(u32, @intCast(tree.tokenSlice(note_item.data.token).len)) - item.data.byte_offset;
if (note_item.data.node.unwrap()) |node| {
break :blk tree.nodeToSpan(node);
} else if (note_item.data.token.unwrap()) |token| {
const start = tree.tokenStart(token) + note_item.data.byte_offset;
const end = start + @as(u32, @intCast(tree.tokenSlice(token).len)) - item.data.byte_offset;
break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
} else unreachable;
};
const loc = std.zig.findLineColumn(source, span.main);
@ -560,13 +560,14 @@ pub const Wip = struct {
for (zoir.compile_errors) |err| {
const err_span: std.zig.Ast.Span = span: {
if (err.token == std.zig.Zoir.CompileError.invalid_token) {
break :span tree.nodeToSpan(err.node_or_offset);
}
const token_start = tree.tokens.items(.start)[err.token];
if (err.token.unwrap()) |token| {
const token_start = tree.tokenStart(token);
const start = token_start + err.node_or_offset;
const end = token_start + @as(u32, @intCast(tree.tokenSlice(err.token).len));
const end = token_start + @as(u32, @intCast(tree.tokenSlice(token).len));
break :span .{ .start = start, .end = end, .main = start };
} else {
break :span tree.nodeToSpan(@enumFromInt(err.node_or_offset));
}
};
const err_loc = std.zig.findLineColumn(source, err_span.main);
@ -588,13 +589,14 @@ pub const Wip = struct {
for (notes_start.., err.first_note.., 0..err.note_count) |eb_note_idx, zoir_note_idx, _| {
const note = zoir.error_notes[zoir_note_idx];
const note_span: std.zig.Ast.Span = span: {
if (note.token == std.zig.Zoir.CompileError.invalid_token) {
break :span tree.nodeToSpan(note.node_or_offset);
}
const token_start = tree.tokens.items(.start)[note.token];
if (note.token.unwrap()) |token| {
const token_start = tree.tokenStart(token);
const start = token_start + note.node_or_offset;
const end = token_start + @as(u32, @intCast(tree.tokenSlice(note.token).len));
const end = token_start + @as(u32, @intCast(tree.tokenSlice(token).len));
break :span .{ .start = start, .end = end, .main = start };
} else {
break :span tree.nodeToSpan(@enumFromInt(note.node_or_offset));
}
};
const note_loc = std.zig.findLineColumn(source, note_span.main);

File diff suppressed because it is too large Load diff

View file

@ -80,9 +80,18 @@ pub fn extraData(code: Zir, comptime T: type, index: usize) ExtraData(T) {
Inst.Declaration.Name,
std.zig.SimpleComptimeReason,
NullTerminatedString,
// Ast.TokenIndex is missing because it is a u32.
Ast.OptionalTokenIndex,
Ast.Node.Index,
Ast.Node.OptionalIndex,
=> @enumFromInt(code.extra[i]),
i32,
Ast.TokenOffset,
Ast.OptionalTokenOffset,
Ast.Node.Offset,
Ast.Node.OptionalOffset,
=> @enumFromInt(@as(i32, @bitCast(code.extra[i]))),
Inst.Call.Flags,
Inst.BuiltinCall.Flags,
Inst.SwitchBlock.Bits,
@ -1904,22 +1913,22 @@ pub const Inst = struct {
/// `small` is `fields_len: u16`.
tuple_decl,
/// Implements the `@This` builtin.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
this,
/// Implements the `@returnAddress` builtin.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
ret_addr,
/// Implements the `@src` builtin.
/// `operand` is payload index to `LineColumn`.
builtin_src,
/// Implements the `@errorReturnTrace` builtin.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
error_return_trace,
/// Implements the `@frame` builtin.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
frame,
/// Implements the `@frameAddress` builtin.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
frame_address,
/// Same as `alloc` from `Tag` but may contain an alignment instruction.
/// `operand` is payload index to `AllocExtended`.
@ -2004,9 +2013,9 @@ pub const Inst = struct {
/// `operand` is payload index to `UnNode`.
await_nosuspend,
/// Implements `@breakpoint`.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
breakpoint,
/// Implement builtin `@disableInstrumentation`. `operand` is `src_node: i32`.
/// Implement builtin `@disableInstrumentation`. `operand` is `src_node: Ast.Node.Offset`.
disable_instrumentation,
/// Implement builtin `@disableIntrinsics`. `operand` is `src_node: i32`.
disable_intrinsics,
@ -2040,7 +2049,7 @@ pub const Inst = struct {
/// `operand` is payload index to `UnNode`.
c_va_end,
/// Implement builtin `@cVaStart`.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
c_va_start,
/// Implements the following builtins:
/// `@ptrCast`, `@alignCast`, `@addrSpaceCast`, `@constCast`, `@volatileCast`.
@ -2067,7 +2076,7 @@ pub const Inst = struct {
/// `operand` is payload index to `UnNode`.
work_group_id,
/// Implements the `@inComptime` builtin.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
in_comptime,
/// Restores the error return index to its last saved state in a given
/// block. If the block is `.none`, restores to the state from the point
@ -2077,7 +2086,7 @@ pub const Inst = struct {
/// `small` is undefined.
restore_err_ret_index,
/// Retrieves a value from the current type declaration scope's closure.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
/// `small` is closure index.
closure_get,
/// Used as a placeholder instruction which is just a dummy index for Sema to replace
@ -2091,7 +2100,7 @@ pub const Inst = struct {
/// Uses the `pl_node` union field with payload `FieldParentPtr`.
field_parent_ptr,
/// Get a type or value from `std.builtin`.
/// `operand` is `src_node: i32`.
/// `operand` is `src_node: Ast.Node.Offset`.
/// `small` is an `Inst.BuiltinValue`.
builtin_value,
/// Provide a `@branchHint` for the current block.
@ -2286,28 +2295,28 @@ pub const Inst = struct {
/// Used for unary operators, with an AST node source location.
un_node: struct {
/// Offset from Decl AST node index.
src_node: i32,
src_node: Ast.Node.Offset,
/// The meaning of this operand depends on the corresponding `Tag`.
operand: Ref,
},
/// Used for unary operators, with a token source location.
un_tok: struct {
/// Offset from Decl AST token index.
src_tok: Ast.TokenIndex,
src_tok: Ast.TokenOffset,
/// The meaning of this operand depends on the corresponding `Tag`.
operand: Ref,
},
pl_node: struct {
/// Offset from Decl AST node index.
/// `Tag` determines which kind of AST node this points to.
src_node: i32,
src_node: Ast.Node.Offset,
/// index into extra.
/// `Tag` determines what lives there.
payload_index: u32,
},
pl_tok: struct {
/// Offset from Decl AST token index.
src_tok: Ast.TokenIndex,
src_tok: Ast.TokenOffset,
/// index into extra.
/// `Tag` determines what lives there.
payload_index: u32,
@ -2328,16 +2337,16 @@ pub const Inst = struct {
/// Offset into `string_bytes`. Null-terminated.
start: NullTerminatedString,
/// Offset from Decl AST token index.
src_tok: u32,
src_tok: Ast.TokenOffset,
pub fn get(self: @This(), code: Zir) [:0]const u8 {
return code.nullTerminatedString(self.start);
}
},
/// Offset from Decl AST token index.
tok: Ast.TokenIndex,
tok: Ast.TokenOffset,
/// Offset from Decl AST node index.
node: i32,
node: Ast.Node.Offset,
int: u64,
float: f64,
ptr_type: struct {
@ -2358,14 +2367,14 @@ pub const Inst = struct {
int_type: struct {
/// Offset from Decl AST node index.
/// `Tag` determines which kind of AST node this points to.
src_node: i32,
src_node: Ast.Node.Offset,
signedness: std.builtin.Signedness,
bit_count: u16,
},
@"unreachable": struct {
/// Offset from Decl AST node index.
/// `Tag` determines which kind of AST node this points to.
src_node: i32,
src_node: Ast.Node.Offset,
},
@"break": struct {
operand: Ref,
@ -2377,7 +2386,7 @@ pub const Inst = struct {
/// with an AST node source location.
inst_node: struct {
/// Offset from Decl AST node index.
src_node: i32,
src_node: Ast.Node.Offset,
/// The meaning of this operand depends on the corresponding `Tag`.
inst: Index,
},
@ -2456,9 +2465,7 @@ pub const Inst = struct {
};
pub const Break = struct {
pub const no_src_node = std.math.maxInt(i32);
operand_src_node: i32,
operand_src_node: Ast.Node.OptionalOffset,
block_inst: Index,
};
@ -2467,7 +2474,7 @@ pub const Inst = struct {
/// 1. Input for every inputs_len
/// 2. clobber: NullTerminatedString // index into string_bytes (null terminated) for every clobbers_len.
pub const Asm = struct {
src_node: i32,
src_node: Ast.Node.Offset,
// null-terminated string index
asm_source: NullTerminatedString,
/// 1 bit for each outputs_len: whether it uses `-> T` or not.
@ -2582,7 +2589,7 @@ pub const Inst = struct {
/// Trailing: operand: Ref, // for each `operands_len` (stored in `small`).
pub const NodeMultiOp = struct {
src_node: i32,
src_node: Ast.Node.Offset,
};
/// This data is stored inside extra, with trailing operands according to `body_len`.
@ -3033,7 +3040,7 @@ pub const Inst = struct {
/// Trailing:
/// 0. operand: Ref // for each `operands_len`
pub const TypeOfPeer = struct {
src_node: i32,
src_node: Ast.Node.Offset,
body_len: u32,
body_index: u32,
};
@ -3084,7 +3091,7 @@ pub const Inst = struct {
/// 4. host_size: Ref // if `has_bit_range` flag is set
pub const PtrType = struct {
elem_type: Ref,
src_node: i32,
src_node: Ast.Node.Offset,
};
pub const ArrayTypeSentinel = struct {
@ -3116,7 +3123,7 @@ pub const Inst = struct {
start: Ref,
len: Ref,
sentinel: Ref,
start_src_node_offset: i32,
start_src_node_offset: Ast.Node.Offset,
};
/// The meaning of these operands depends on the corresponding `Tag`.
@ -3126,13 +3133,13 @@ pub const Inst = struct {
};
pub const BinNode = struct {
node: i32,
node: Ast.Node.Offset,
lhs: Ref,
rhs: Ref,
};
pub const UnNode = struct {
node: i32,
node: Ast.Node.Offset,
operand: Ref,
};
@ -3186,7 +3193,7 @@ pub const Inst = struct {
pub const SwitchBlockErrUnion = struct {
operand: Ref,
bits: Bits,
main_src_node_offset: i32,
main_src_node_offset: Ast.Node.Offset,
pub const Bits = packed struct(u32) {
/// If true, one or more prongs have multiple items.
@ -3592,7 +3599,7 @@ pub const Inst = struct {
/// init: Inst.Ref, // `.none` for non-`comptime` fields
/// }
pub const TupleDecl = struct {
src_node: i32, // relative
src_node: Ast.Node.Offset,
};
/// Trailing:
@ -3666,7 +3673,7 @@ pub const Inst = struct {
};
pub const Cmpxchg = struct {
node: i32,
node: Ast.Node.Offset,
ptr: Ref,
expected_value: Ref,
new_value: Ref,
@ -3706,7 +3713,7 @@ pub const Inst = struct {
};
pub const FieldParentPtr = struct {
src_node: i32,
src_node: Ast.Node.Offset,
parent_ptr_type: Ref,
field_name: Ref,
field_ptr: Ref,
@ -3720,7 +3727,7 @@ pub const Inst = struct {
};
pub const Select = struct {
node: i32,
node: Ast.Node.Offset,
elem_type: Ref,
pred: Ref,
a: Ref,
@ -3728,7 +3735,7 @@ pub const Inst = struct {
};
pub const AsyncCall = struct {
node: i32,
node: Ast.Node.Offset,
frame_buffer: Ref,
result_ptr: Ref,
fn_ptr: Ref,
@ -3753,7 +3760,7 @@ pub const Inst = struct {
/// 0. type_inst: Ref, // if small 0b000X is set
/// 1. align_inst: Ref, // if small 0b00X0 is set
pub const AllocExtended = struct {
src_node: i32,
src_node: Ast.Node.Offset,
pub const Small = packed struct {
has_type: bool,
@ -3778,9 +3785,9 @@ pub const Inst = struct {
pub const Item = struct {
/// null terminated string index
msg: NullTerminatedString,
node: Ast.Node.Index,
/// If node is 0 then this will be populated.
token: Ast.TokenIndex,
node: Ast.Node.OptionalIndex,
/// If node is .none then this will be populated.
token: Ast.OptionalTokenIndex,
/// Can be used in combination with `token`.
byte_offset: u32,
/// 0 or a payload index of a `Block`, each is a payload
@ -3818,7 +3825,7 @@ pub const Inst = struct {
};
pub const Src = struct {
node: i32,
node: Ast.Node.Offset,
line: u32,
column: u32,
};
@ -3833,7 +3840,7 @@ pub const Inst = struct {
/// The value being destructured.
operand: Ref,
/// The `destructure_assign` node.
destructure_node: i32,
destructure_node: Ast.Node.Offset,
/// The expected field count.
expect_len: u32,
};
@ -3848,7 +3855,7 @@ pub const Inst = struct {
};
pub const RestoreErrRetIndex = struct {
src_node: i32,
src_node: Ast.Node.Offset,
/// If `.none`, restore the trace to its state upon function entry.
block: Ref,
/// If `.none`, restore unconditionally.

View file

@ -228,8 +228,8 @@ pub const NullTerminatedString = enum(u32) {
pub const CompileError = extern struct {
msg: NullTerminatedString,
token: Ast.TokenIndex,
/// If `token == invalid_token`, this is an `Ast.Node.Index`.
token: Ast.OptionalTokenIndex,
/// If `token == .none`, this is an `Ast.Node.Index`.
/// Otherwise, this is a byte offset into `token`.
node_or_offset: u32,
@ -243,14 +243,12 @@ pub const CompileError = extern struct {
pub const Note = extern struct {
msg: NullTerminatedString,
token: Ast.TokenIndex,
/// If `token == invalid_token`, this is an `Ast.Node.Index`.
token: Ast.OptionalTokenIndex,
/// If `token == .none`, this is an `Ast.Node.Index`.
/// Otherwise, this is a byte offset into `token`.
node_or_offset: u32,
};
pub const invalid_token: Ast.TokenIndex = std.math.maxInt(Ast.TokenIndex);
comptime {
assert(std.meta.hasUniqueRepresentation(CompileError));
assert(std.meta.hasUniqueRepresentation(Note));

View file

@ -48,7 +48,7 @@ pub fn generate(gpa: Allocator, tree: Ast, options: Options) Allocator.Error!Zoi
}
if (tree.errors.len == 0) {
const root_ast_node = tree.nodes.items(.data)[0].lhs;
const root_ast_node = tree.rootDecls()[0];
try zg.nodes.append(gpa, undefined); // index 0; root node
try zg.expr(root_ast_node, .root);
} else {
@ -97,11 +97,8 @@ pub fn generate(gpa: Allocator, tree: Ast, options: Options) Allocator.Error!Zoi
fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator.Error!void {
const gpa = zg.gpa;
const tree = zg.tree;
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
switch (node_tags[node]) {
switch (tree.nodeTag(node)) {
.root => unreachable,
.@"usingnamespace" => unreachable,
.test_decl => unreachable,
@ -173,7 +170,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
.bool_not,
.bit_not,
.negation_wrap,
=> try zg.addErrorTok(main_tokens[node], "operator '{s}' is not allowed in ZON", .{tree.tokenSlice(main_tokens[node])}),
=> try zg.addErrorTok(tree.nodeMainToken(node), "operator '{s}' is not allowed in ZON", .{tree.tokenSlice(tree.nodeMainToken(node))}),
.error_union,
.merge_error_sets,
@ -251,23 +248,20 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
.slice_sentinel,
=> try zg.addErrorNode(node, "slice operator is not allowed in ZON", .{}),
.deref, .address_of => try zg.addErrorTok(main_tokens[node], "pointers are not available in ZON", .{}),
.unwrap_optional => try zg.addErrorTok(main_tokens[node], "optionals are not available in ZON", .{}),
.deref, .address_of => try zg.addErrorTok(tree.nodeMainToken(node), "pointers are not available in ZON", .{}),
.unwrap_optional => try zg.addErrorTok(tree.nodeMainToken(node), "optionals are not available in ZON", .{}),
.error_value => try zg.addErrorNode(node, "errors are not available in ZON", .{}),
.array_access => try zg.addErrorTok(node, "array indexing is not allowed in ZON", .{}),
.array_access => try zg.addErrorNode(node, "array indexing is not allowed in ZON", .{}),
.block_two,
.block_two_semicolon,
.block,
.block_semicolon,
=> {
const size = switch (node_tags[node]) {
.block_two, .block_two_semicolon => @intFromBool(node_datas[node].lhs != 0) + @intFromBool(node_datas[node].rhs != 0),
.block, .block_semicolon => node_datas[node].rhs - node_datas[node].lhs,
else => unreachable,
};
if (size == 0) {
var buffer: [2]Ast.Node.Index = undefined;
const statements = tree.blockStatements(&buffer, node).?;
if (statements.len == 0) {
try zg.addErrorNodeNotes(node, "void literals are not available in ZON", .{}, &.{
try zg.errNoteNode(node, "void union payloads can be represented by enum literals", .{}),
});
@ -288,9 +282,9 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
var buf: [2]Ast.Node.Index = undefined;
const type_node = if (tree.fullArrayInit(&buf, node)) |full|
full.ast.type_expr
full.ast.type_expr.unwrap().?
else if (tree.fullStructInit(&buf, node)) |full|
full.ast.type_expr
full.ast.type_expr.unwrap().?
else
unreachable;
@ -300,18 +294,18 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
},
.grouped_expression => {
try zg.addErrorTokNotes(main_tokens[node], "expression grouping is not allowed in ZON", .{}, &.{
try zg.errNoteTok(main_tokens[node], "these parentheses are always redundant", .{}),
try zg.addErrorTokNotes(tree.nodeMainToken(node), "expression grouping is not allowed in ZON", .{}, &.{
try zg.errNoteTok(tree.nodeMainToken(node), "these parentheses are always redundant", .{}),
});
return zg.expr(node_datas[node].lhs, dest_node);
return zg.expr(tree.nodeData(node).node_and_token[0], dest_node);
},
.negation => {
const child_node = node_datas[node].lhs;
switch (node_tags[child_node]) {
const child_node = tree.nodeData(node).node;
switch (tree.nodeTag(child_node)) {
.number_literal => return zg.numberLiteral(child_node, node, dest_node, .negative),
.identifier => {
const child_ident = tree.tokenSlice(main_tokens[child_node]);
const child_ident = tree.tokenSlice(tree.nodeMainToken(child_node));
if (mem.eql(u8, child_ident, "inf")) {
zg.setNode(dest_node, .{
.tag = .neg_inf,
@ -323,7 +317,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
},
else => {},
}
try zg.addErrorTok(main_tokens[node], "expected number or 'inf' after '-'", .{});
try zg.addErrorTok(tree.nodeMainToken(node), "expected number or 'inf' after '-'", .{});
},
.number_literal => try zg.numberLiteral(node, node, dest_node, .positive),
.char_literal => try zg.charLiteral(node, dest_node),
@ -331,7 +325,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
.identifier => try zg.identifier(node, dest_node),
.enum_literal => {
const str_index = zg.identAsString(main_tokens[node]) catch |err| switch (err) {
const str_index = zg.identAsString(tree.nodeMainToken(node)) catch |err| switch (err) {
error.BadString => undefined, // doesn't matter, there's an error
error.OutOfMemory => |e| return e,
};
@ -369,7 +363,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullArrayInit(&buf, node).?;
assert(full.ast.elements.len != 0); // Otherwise it would be a struct init
assert(full.ast.type_expr == 0); // The tag was `array_init_dot_*`
assert(full.ast.type_expr == .none); // The tag was `array_init_dot_*`
const first_elem: u32 = @intCast(zg.nodes.len);
try zg.nodes.resize(gpa, zg.nodes.len + full.ast.elements.len);
@ -398,7 +392,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
=> {
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullStructInit(&buf, node).?;
assert(full.ast.type_expr == 0); // The tag was `struct_init_dot_*`
assert(full.ast.type_expr == .none); // The tag was `struct_init_dot_*`
if (full.ast.fields.len == 0) {
zg.setNode(dest_node, .{
@ -460,7 +454,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
const tree = zg.tree;
assert(tree.tokens.items(.tag)[ident_token] == .identifier);
assert(tree.tokenTag(ident_token) == .identifier);
const ident_name = tree.tokenSlice(ident_token);
if (!mem.startsWith(u8, ident_name, "@")) {
const start = zg.string_bytes.items.len;
@ -493,19 +487,16 @@ fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
/// Estimates the size of a string node without parsing it.
pub fn strLitSizeHint(tree: Ast, node: Ast.Node.Index) usize {
switch (tree.nodes.items(.tag)[node]) {
switch (tree.nodeTag(node)) {
// Parsed string literals are typically around the size of the raw strings.
.string_literal => {
const token = tree.nodes.items(.main_token)[node];
const token = tree.nodeMainToken(node);
const raw_string = tree.tokenSlice(token);
return raw_string.len;
},
// Multiline string literal lengths can be computed exactly.
.multiline_string_literal => {
const first_tok, const last_tok = bounds: {
const node_data = tree.nodes.items(.data)[node];
break :bounds .{ node_data.lhs, node_data.rhs };
};
const first_tok, const last_tok = tree.nodeData(node).token_and_token;
var size = tree.tokenSlice(first_tok)[2..].len;
for (first_tok + 1..last_tok + 1) |tok_idx| {
@ -524,17 +515,14 @@ pub fn parseStrLit(
node: Ast.Node.Index,
writer: anytype,
) error{OutOfMemory}!std.zig.string_literal.Result {
switch (tree.nodes.items(.tag)[node]) {
switch (tree.nodeTag(node)) {
.string_literal => {
const token = tree.nodes.items(.main_token)[node];
const token = tree.nodeMainToken(node);
const raw_string = tree.tokenSlice(token);
return std.zig.string_literal.parseWrite(writer, raw_string);
},
.multiline_string_literal => {
const first_tok, const last_tok = bounds: {
const node_data = tree.nodes.items(.data)[node];
break :bounds .{ node_data.lhs, node_data.rhs };
};
const first_tok, const last_tok = tree.nodeData(node).token_and_token;
// First line: do not append a newline.
{
@ -572,7 +560,7 @@ fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) !StringLiteralResult {
switch (try parseStrLit(zg.tree, str_node, zg.string_bytes.writer(zg.gpa))) {
.success => {},
.failure => |err| {
const token = zg.tree.nodes.items(.main_token)[str_node];
const token = zg.tree.nodeMainToken(str_node);
const raw_string = zg.tree.tokenSlice(token);
try zg.lowerStrLitError(err, token, raw_string, 0);
return error.BadString;
@ -620,7 +608,7 @@ fn identAsString(zg: *ZonGen, ident_token: Ast.TokenIndex) !Zoir.NullTerminatedS
fn numberLiteral(zg: *ZonGen, num_node: Ast.Node.Index, src_node: Ast.Node.Index, dest_node: Zoir.Node.Index, sign: enum { negative, positive }) !void {
const tree = zg.tree;
const num_token = tree.nodes.items(.main_token)[num_node];
const num_token = tree.nodeMainToken(num_node);
const num_bytes = tree.tokenSlice(num_token);
switch (std.zig.parseNumberLiteral(num_bytes)) {
@ -724,8 +712,8 @@ fn setBigIntLiteralNode(zg: *ZonGen, dest_node: Zoir.Node.Index, src_node: Ast.N
fn charLiteral(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !void {
const tree = zg.tree;
assert(tree.nodes.items(.tag)[node] == .char_literal);
const main_token = tree.nodes.items(.main_token)[node];
assert(tree.nodeTag(node) == .char_literal);
const main_token = tree.nodeMainToken(node);
const slice = tree.tokenSlice(main_token);
switch (std.zig.parseCharLiteral(slice)) {
.success => |codepoint| zg.setNode(dest_node, .{
@ -739,8 +727,8 @@ fn charLiteral(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !v
fn identifier(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !void {
const tree = zg.tree;
assert(tree.nodes.items(.tag)[node] == .identifier);
const main_token = tree.nodes.items(.main_token)[node];
assert(tree.nodeTag(node) == .identifier);
const main_token = tree.nodeMainToken(node);
const ident = tree.tokenSlice(main_token);
const tag: Zoir.Node.Repr.Tag = t: {
@ -823,8 +811,8 @@ fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, a
return .{
.msg = @enumFromInt(message_idx),
.token = Zoir.CompileError.invalid_token,
.node_or_offset = node,
.token = .none,
.node_or_offset = @intFromEnum(node),
};
}
@ -836,33 +824,33 @@ fn errNoteTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, arg
return .{
.msg = @enumFromInt(message_idx),
.token = tok,
.token = .fromToken(tok),
.node_or_offset = 0,
};
}
fn addErrorNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype) Allocator.Error!void {
return zg.addErrorInner(Zoir.CompileError.invalid_token, node, format, args, &.{});
return zg.addErrorInner(.none, @intFromEnum(node), format, args, &.{});
}
fn addErrorTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype) Allocator.Error!void {
return zg.addErrorInner(tok, 0, format, args, &.{});
return zg.addErrorInner(.fromToken(tok), 0, format, args, &.{});
}
fn addErrorNodeNotes(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
return zg.addErrorInner(Zoir.CompileError.invalid_token, node, format, args, notes);
return zg.addErrorInner(.none, @intFromEnum(node), format, args, notes);
}
fn addErrorTokNotes(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
return zg.addErrorInner(tok, 0, format, args, notes);
return zg.addErrorInner(.fromToken(tok), 0, format, args, notes);
}
fn addErrorTokOff(zg: *ZonGen, tok: Ast.TokenIndex, offset: u32, comptime format: []const u8, args: anytype) Allocator.Error!void {
return zg.addErrorInner(tok, offset, format, args, &.{});
return zg.addErrorInner(.fromToken(tok), offset, format, args, &.{});
}
fn addErrorTokNotesOff(zg: *ZonGen, tok: Ast.TokenIndex, offset: u32, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
return zg.addErrorInner(tok, offset, format, args, notes);
return zg.addErrorInner(.fromToken(tok), offset, format, args, notes);
}
fn addErrorInner(
zg: *ZonGen,
token: Ast.TokenIndex,
token: Ast.OptionalTokenIndex,
node_or_offset: u32,
comptime format: []const u8,
args: anytype,

File diff suppressed because it is too large Load diff

View file

@ -196,16 +196,15 @@ pub const Error = union(enum) {
return .{ .err = self, .status = status };
}
fn zoirErrorLocation(ast: Ast, maybe_token: Ast.TokenIndex, node_or_offset: u32) Ast.Location {
if (maybe_token == Zoir.CompileError.invalid_token) {
const main_tokens = ast.nodes.items(.main_token);
const ast_node = node_or_offset;
const token = main_tokens[ast_node];
return ast.tokenLocation(0, token);
} else {
var location = ast.tokenLocation(0, maybe_token);
fn zoirErrorLocation(ast: Ast, maybe_token: Ast.OptionalTokenIndex, node_or_offset: u32) Ast.Location {
if (maybe_token.unwrap()) |token| {
var location = ast.tokenLocation(0, token);
location.column += node_or_offset;
return location;
} else {
const ast_node: Ast.Node.Index = @enumFromInt(node_or_offset);
const token = ast.nodeMainToken(ast_node);
return ast.tokenLocation(0, token);
}
}
};
@ -632,7 +631,7 @@ const Parser = struct {
switch (try ZonGen.parseStrLit(self.ast, ast_node, buf.writer(self.gpa))) {
.success => {},
.failure => |err| {
const token = self.ast.nodes.items(.main_token)[ast_node];
const token = self.ast.nodeMainToken(ast_node);
const raw_string = self.ast.tokenSlice(token);
return self.failTokenFmt(token, @intCast(err.offset()), "{s}", .{err.fmt(raw_string)});
},
@ -1005,8 +1004,7 @@ const Parser = struct {
args: anytype,
) error{ OutOfMemory, ParseZon } {
@branchHint(.cold);
const main_tokens = self.ast.nodes.items(.main_token);
const token = main_tokens[node.getAstNode(self.zoir)];
const token = self.ast.nodeMainToken(node.getAstNode(self.zoir));
return self.failTokenFmt(token, 0, fmt, args);
}
@ -1025,8 +1023,7 @@ const Parser = struct {
message: []const u8,
) error{ParseZon} {
@branchHint(.cold);
const main_tokens = self.ast.nodes.items(.main_token);
const token = main_tokens[node.getAstNode(self.zoir)];
const token = self.ast.nodeMainToken(node.getAstNode(self.zoir));
return self.failToken(.{
.token = token,
.offset = 0,
@ -1059,10 +1056,7 @@ const Parser = struct {
const struct_init = self.ast.fullStructInit(&buf, node.getAstNode(self.zoir)).?;
const field_node = struct_init.ast.fields[f];
break :b self.ast.firstToken(field_node) - 2;
} else b: {
const main_tokens = self.ast.nodes.items(.main_token);
break :b main_tokens[node.getAstNode(self.zoir)];
};
} else self.ast.nodeMainToken(node.getAstNode(self.zoir));
switch (@typeInfo(T)) {
inline .@"struct", .@"union", .@"enum" => |info| {
const note: Error.TypeCheckFailure.Note = if (info.fields.len == 0) b: {

View file

@ -30,7 +30,7 @@
arena: std.heap.ArenaAllocator,
location: Location,
location_tok: std.zig.Ast.TokenIndex,
hash_tok: std.zig.Ast.TokenIndex,
hash_tok: std.zig.Ast.OptionalTokenIndex,
name_tok: std.zig.Ast.TokenIndex,
lazy_status: LazyStatus,
parent_package_root: Cache.Path,
@ -317,8 +317,8 @@ pub fn run(f: *Fetch) RunError!void {
f.location_tok,
try eb.addString("expected path relative to build root; found absolute path"),
);
if (f.hash_tok != 0) return f.fail(
f.hash_tok,
if (f.hash_tok.unwrap()) |hash_tok| return f.fail(
hash_tok,
try eb.addString("path-based dependencies are not hashed"),
);
// Packages fetched by URL may not use relative paths to escape outside the
@ -555,17 +555,18 @@ fn runResource(
// job is done.
if (remote_hash) |declared_hash| {
const hash_tok = f.hash_tok.unwrap().?;
if (declared_hash.isOld()) {
const actual_hex = Package.multiHashHexDigest(f.computed_hash.digest);
if (!std.mem.eql(u8, declared_hash.toSlice(), &actual_hex)) {
return f.fail(f.hash_tok, try eb.printString(
return f.fail(hash_tok, try eb.printString(
"hash mismatch: manifest declares {s} but the fetched package has {s}",
.{ declared_hash.toSlice(), actual_hex },
));
}
} else {
if (!computed_package_hash.eql(&declared_hash)) {
return f.fail(f.hash_tok, try eb.printString(
return f.fail(hash_tok, try eb.printString(
"hash mismatch: manifest declares {s} but the fetched package has {s}",
.{ declared_hash.toSlice(), computed_package_hash.toSlice() },
));
@ -813,15 +814,14 @@ fn srcLoc(
) Allocator.Error!ErrorBundle.SourceLocationIndex {
const ast = f.parent_manifest_ast orelse return .none;
const eb = &f.error_bundle;
const token_starts = ast.tokens.items(.start);
const start_loc = ast.tokenLocation(0, tok);
const src_path = try eb.printString("{}" ++ fs.path.sep_str ++ Manifest.basename, .{f.parent_package_root});
const msg_off = 0;
return eb.addSourceLocation(.{
.src_path = src_path,
.span_start = token_starts[tok],
.span_end = @intCast(token_starts[tok] + ast.tokenSlice(tok).len),
.span_main = token_starts[tok] + msg_off,
.span_start = ast.tokenStart(tok),
.span_end = @intCast(ast.tokenStart(tok) + ast.tokenSlice(tok).len),
.span_main = ast.tokenStart(tok) + msg_off,
.line = @intCast(start_loc.line),
.column = @intCast(start_loc.column),
.source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
@ -2331,7 +2331,7 @@ const TestFetchBuilder = struct {
.arena = std.heap.ArenaAllocator.init(allocator),
.location = .{ .path_or_url = path_or_url },
.location_tok = 0,
.hash_tok = 0,
.hash_tok = .none,
.name_tok = 0,
.lazy_status = .eager,
.parent_package_root = Cache.Path{ .root_dir = Cache.Directory{ .handle = cache_dir, .path = null } },

View file

@ -17,8 +17,8 @@ pub const Dependency = struct {
location_tok: Ast.TokenIndex,
location_node: Ast.Node.Index,
hash: ?[]const u8,
hash_tok: Ast.TokenIndex,
hash_node: Ast.Node.Index,
hash_tok: Ast.OptionalTokenIndex,
hash_node: Ast.Node.OptionalIndex,
node: Ast.Node.Index,
name_tok: Ast.TokenIndex,
lazy: bool,
@ -40,7 +40,7 @@ id: u32,
version: std.SemanticVersion,
version_node: Ast.Node.Index,
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
dependencies_node: Ast.Node.Index,
dependencies_node: Ast.Node.OptionalIndex,
paths: std.StringArrayHashMapUnmanaged(void),
minimum_zig_version: ?std.SemanticVersion,
@ -58,10 +58,7 @@ pub const ParseOptions = struct {
pub const Error = Allocator.Error;
pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
const node_tags = ast.nodes.items(.tag);
const node_datas = ast.nodes.items(.data);
assert(node_tags[0] == .root);
const main_node_index = node_datas[0].lhs;
const main_node_index = ast.nodeData(.root).node;
var arena_instance = std.heap.ArenaAllocator.init(gpa);
errdefer arena_instance.deinit();
@ -75,9 +72,9 @@ pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
.name = undefined,
.id = 0,
.version = undefined,
.version_node = 0,
.version_node = undefined,
.dependencies = .{},
.dependencies_node = 0,
.dependencies_node = .none,
.paths = .{},
.allow_missing_paths_field = options.allow_missing_paths_field,
.allow_name_string = options.allow_name_string,
@ -121,8 +118,6 @@ pub fn copyErrorsIntoBundle(
src_path: u32,
eb: *std.zig.ErrorBundle.Wip,
) Allocator.Error!void {
const token_starts = ast.tokens.items(.start);
for (man.errors) |msg| {
const start_loc = ast.tokenLocation(0, msg.tok);
@ -130,9 +125,9 @@ pub fn copyErrorsIntoBundle(
.msg = try eb.addString(msg.msg),
.src_loc = try eb.addSourceLocation(.{
.src_path = src_path,
.span_start = token_starts[msg.tok],
.span_end = @intCast(token_starts[msg.tok] + ast.tokenSlice(msg.tok).len),
.span_main = token_starts[msg.tok] + msg.off,
.span_start = ast.tokenStart(msg.tok),
.span_end = @intCast(ast.tokenStart(msg.tok) + ast.tokenSlice(msg.tok).len),
.span_main = ast.tokenStart(msg.tok) + msg.off,
.line = @intCast(start_loc.line),
.column = @intCast(start_loc.column),
.source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
@ -153,7 +148,7 @@ const Parse = struct {
version: std.SemanticVersion,
version_node: Ast.Node.Index,
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
dependencies_node: Ast.Node.Index,
dependencies_node: Ast.Node.OptionalIndex,
paths: std.StringArrayHashMapUnmanaged(void),
allow_missing_paths_field: bool,
allow_name_string: bool,
@ -164,8 +159,7 @@ const Parse = struct {
fn parseRoot(p: *Parse, node: Ast.Node.Index) !void {
const ast = p.ast;
const main_tokens = ast.nodes.items(.main_token);
const main_token = main_tokens[node];
const main_token = ast.nodeMainToken(node);
var buf: [2]Ast.Node.Index = undefined;
const struct_init = ast.fullStructInit(&buf, node) orelse {
@ -184,7 +178,7 @@ const Parse = struct {
// things manually provides an opportunity to do any additional verification
// that is desirable on a per-field basis.
if (mem.eql(u8, field_name, "dependencies")) {
p.dependencies_node = field_init;
p.dependencies_node = field_init.toOptional();
try parseDependencies(p, field_init);
} else if (mem.eql(u8, field_name, "paths")) {
have_included_paths = true;
@ -198,17 +192,17 @@ const Parse = struct {
p.version_node = field_init;
const version_text = try parseString(p, field_init);
if (version_text.len > max_version_len) {
try appendError(p, main_tokens[field_init], "version string length {d} exceeds maximum of {d}", .{ version_text.len, max_version_len });
try appendError(p, ast.nodeMainToken(field_init), "version string length {d} exceeds maximum of {d}", .{ version_text.len, max_version_len });
}
p.version = std.SemanticVersion.parse(version_text) catch |err| v: {
try appendError(p, main_tokens[field_init], "unable to parse semantic version: {s}", .{@errorName(err)});
try appendError(p, ast.nodeMainToken(field_init), "unable to parse semantic version: {s}", .{@errorName(err)});
break :v undefined;
};
have_version = true;
} else if (mem.eql(u8, field_name, "minimum_zig_version")) {
const version_text = try parseString(p, field_init);
p.minimum_zig_version = std.SemanticVersion.parse(version_text) catch |err| v: {
try appendError(p, main_tokens[field_init], "unable to parse semantic version: {s}", .{@errorName(err)});
try appendError(p, ast.nodeMainToken(field_init), "unable to parse semantic version: {s}", .{@errorName(err)});
break :v null;
};
} else {
@ -251,11 +245,10 @@ const Parse = struct {
fn parseDependencies(p: *Parse, node: Ast.Node.Index) !void {
const ast = p.ast;
const main_tokens = ast.nodes.items(.main_token);
var buf: [2]Ast.Node.Index = undefined;
const struct_init = ast.fullStructInit(&buf, node) orelse {
const tok = main_tokens[node];
const tok = ast.nodeMainToken(node);
return fail(p, tok, "expected dependencies expression to be a struct", .{});
};
@ -269,23 +262,22 @@ const Parse = struct {
fn parseDependency(p: *Parse, node: Ast.Node.Index) !Dependency {
const ast = p.ast;
const main_tokens = ast.nodes.items(.main_token);
var buf: [2]Ast.Node.Index = undefined;
const struct_init = ast.fullStructInit(&buf, node) orelse {
const tok = main_tokens[node];
const tok = ast.nodeMainToken(node);
return fail(p, tok, "expected dependency expression to be a struct", .{});
};
var dep: Dependency = .{
.location = undefined,
.location_tok = 0,
.location_tok = undefined,
.location_node = undefined,
.hash = null,
.hash_tok = 0,
.hash_node = undefined,
.hash_tok = .none,
.hash_node = .none,
.node = node,
.name_tok = 0,
.name_tok = undefined,
.lazy = false,
};
var has_location = false;
@ -299,7 +291,7 @@ const Parse = struct {
// that is desirable on a per-field basis.
if (mem.eql(u8, field_name, "url")) {
if (has_location) {
return fail(p, main_tokens[field_init], "dependency should specify only one of 'url' and 'path' fields.", .{});
return fail(p, ast.nodeMainToken(field_init), "dependency should specify only one of 'url' and 'path' fields.", .{});
}
dep.location = .{
.url = parseString(p, field_init) catch |err| switch (err) {
@ -308,11 +300,11 @@ const Parse = struct {
},
};
has_location = true;
dep.location_tok = main_tokens[field_init];
dep.location_tok = ast.nodeMainToken(field_init);
dep.location_node = field_init;
} else if (mem.eql(u8, field_name, "path")) {
if (has_location) {
return fail(p, main_tokens[field_init], "dependency should specify only one of 'url' and 'path' fields.", .{});
return fail(p, ast.nodeMainToken(field_init), "dependency should specify only one of 'url' and 'path' fields.", .{});
}
dep.location = .{
.path = parseString(p, field_init) catch |err| switch (err) {
@ -321,15 +313,15 @@ const Parse = struct {
},
};
has_location = true;
dep.location_tok = main_tokens[field_init];
dep.location_tok = ast.nodeMainToken(field_init);
dep.location_node = field_init;
} else if (mem.eql(u8, field_name, "hash")) {
dep.hash = parseHash(p, field_init) catch |err| switch (err) {
error.ParseFailure => continue,
else => |e| return e,
};
dep.hash_tok = main_tokens[field_init];
dep.hash_node = field_init;
dep.hash_tok = .fromToken(ast.nodeMainToken(field_init));
dep.hash_node = field_init.toOptional();
} else if (mem.eql(u8, field_name, "lazy")) {
dep.lazy = parseBool(p, field_init) catch |err| switch (err) {
error.ParseFailure => continue,
@ -342,7 +334,7 @@ const Parse = struct {
}
if (!has_location) {
try appendError(p, main_tokens[node], "dependency requires location field, one of 'url' or 'path'.", .{});
try appendError(p, ast.nodeMainToken(node), "dependency requires location field, one of 'url' or 'path'.", .{});
}
return dep;
@ -350,11 +342,10 @@ const Parse = struct {
fn parseIncludedPaths(p: *Parse, node: Ast.Node.Index) !void {
const ast = p.ast;
const main_tokens = ast.nodes.items(.main_token);
var buf: [2]Ast.Node.Index = undefined;
const array_init = ast.fullArrayInit(&buf, node) orelse {
const tok = main_tokens[node];
const tok = ast.nodeMainToken(node);
return fail(p, tok, "expected paths expression to be a list of strings", .{});
};
@ -369,12 +360,10 @@ const Parse = struct {
fn parseBool(p: *Parse, node: Ast.Node.Index) !bool {
const ast = p.ast;
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
if (node_tags[node] != .identifier) {
return fail(p, main_tokens[node], "expected identifier", .{});
if (ast.nodeTag(node) != .identifier) {
return fail(p, ast.nodeMainToken(node), "expected identifier", .{});
}
const ident_token = main_tokens[node];
const ident_token = ast.nodeMainToken(node);
const token_bytes = ast.tokenSlice(ident_token);
if (mem.eql(u8, token_bytes, "true")) {
return true;
@ -387,10 +376,8 @@ const Parse = struct {
fn parseFingerprint(p: *Parse, node: Ast.Node.Index) !Package.Fingerprint {
const ast = p.ast;
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
const main_token = main_tokens[node];
if (node_tags[node] != .number_literal) {
const main_token = ast.nodeMainToken(node);
if (ast.nodeTag(node) != .number_literal) {
return fail(p, main_token, "expected integer literal", .{});
}
const token_bytes = ast.tokenSlice(main_token);
@ -406,11 +393,9 @@ const Parse = struct {
fn parseName(p: *Parse, node: Ast.Node.Index) ![]const u8 {
const ast = p.ast;
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
const main_token = main_tokens[node];
const main_token = ast.nodeMainToken(node);
if (p.allow_name_string and node_tags[node] == .string_literal) {
if (p.allow_name_string and ast.nodeTag(node) == .string_literal) {
const name = try parseString(p, node);
if (!std.zig.isValidId(name))
return fail(p, main_token, "name must be a valid bare zig identifier (hint: switch from string to enum literal)", .{});
@ -423,7 +408,7 @@ const Parse = struct {
return name;
}
if (node_tags[node] != .enum_literal)
if (ast.nodeTag(node) != .enum_literal)
return fail(p, main_token, "expected enum literal", .{});
const ident_name = ast.tokenSlice(main_token);
@ -440,12 +425,10 @@ const Parse = struct {
fn parseString(p: *Parse, node: Ast.Node.Index) ![]const u8 {
const ast = p.ast;
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
if (node_tags[node] != .string_literal) {
return fail(p, main_tokens[node], "expected string literal", .{});
if (ast.nodeTag(node) != .string_literal) {
return fail(p, ast.nodeMainToken(node), "expected string literal", .{});
}
const str_lit_token = main_tokens[node];
const str_lit_token = ast.nodeMainToken(node);
const token_bytes = ast.tokenSlice(str_lit_token);
p.buf.clearRetainingCapacity();
try parseStrLit(p, str_lit_token, &p.buf, token_bytes, 0);
@ -455,8 +438,7 @@ const Parse = struct {
fn parseHash(p: *Parse, node: Ast.Node.Index) ![]const u8 {
const ast = p.ast;
const main_tokens = ast.nodes.items(.main_token);
const tok = main_tokens[node];
const tok = ast.nodeMainToken(node);
const h = try parseString(p, node);
if (h.len > Package.Hash.max_len) {
@ -469,8 +451,7 @@ const Parse = struct {
/// TODO: try to DRY this with AstGen.identifierTokenString
fn identifierTokenString(p: *Parse, token: Ast.TokenIndex) InnerError![]const u8 {
const ast = p.ast;
const token_tags = ast.tokens.items(.tag);
assert(token_tags[token] == .identifier);
assert(ast.tokenTag(token) == .identifier);
const ident_name = ast.tokenSlice(token);
if (!mem.startsWith(u8, ident_name, "@")) {
return ident_name;

View file

@ -407,18 +407,18 @@ pub const Block = struct {
return block.comptime_reason != null;
}
fn builtinCallArgSrc(block: *Block, builtin_call_node: i32, arg_index: u32) LazySrcLoc {
fn builtinCallArgSrc(block: *Block, builtin_call_node: std.zig.Ast.Node.Offset, arg_index: u32) LazySrcLoc {
return block.src(.{ .node_offset_builtin_call_arg = .{
.builtin_call_node = builtin_call_node,
.arg_index = arg_index,
} });
}
pub fn nodeOffset(block: Block, node_offset: i32) LazySrcLoc {
pub fn nodeOffset(block: Block, node_offset: std.zig.Ast.Node.Offset) LazySrcLoc {
return block.src(LazySrcLoc.Offset.nodeOffset(node_offset));
}
fn tokenOffset(block: Block, tok_offset: u32) LazySrcLoc {
fn tokenOffset(block: Block, tok_offset: std.zig.Ast.TokenOffset) LazySrcLoc {
return block.src(.{ .token_offset = tok_offset });
}
@ -1860,7 +1860,7 @@ fn analyzeBodyInner(
if (!block.isComptime()) break :blk try sema.zirTry(block, inst);
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.nodeOffset(inst_data.src_node);
const operand_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const operand_src = block.src(.{ .node_offset_try_operand = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
const inline_body = sema.code.bodySlice(extra.end, extra.data.body_len);
const err_union = try sema.resolveInst(extra.data.operand);
@ -1883,7 +1883,7 @@ fn analyzeBodyInner(
if (!block.isComptime()) break :blk try sema.zirTryPtr(block, inst);
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.nodeOffset(inst_data.src_node);
const operand_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const operand_src = block.src(.{ .node_offset_try_operand = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
const inline_body = sema.code.bodySlice(extra.end, extra.data.body_len);
const operand = try sema.resolveInst(extra.data.operand);
@ -2166,7 +2166,7 @@ pub fn setupErrorReturnTrace(sema: *Sema, block: *Block, last_arg_index: usize)
const addrs_ptr = try err_trace_block.addTy(.alloc, try pt.singleMutPtrType(addr_arr_ty));
// var st: StackTrace = undefined;
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
try stack_trace_ty.resolveFields(pt);
const st_ptr = try err_trace_block.addTy(.alloc, try pt.singleMutPtrType(stack_trace_ty));
@ -2901,7 +2901,7 @@ fn zirStructDecl(
const tracked_inst = try block.trackZir(inst);
const src: LazySrcLoc = .{
.base_node_inst = tracked_inst,
.offset = LazySrcLoc.Offset.nodeOffset(0),
.offset = LazySrcLoc.Offset.nodeOffset(.zero),
};
var extra_index = extra.end;
@ -3114,7 +3114,7 @@ fn zirEnumDecl(
var extra_index: usize = extra.end;
const tracked_inst = try block.trackZir(inst);
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
const tag_type_ref = if (small.has_tag_type) blk: {
const tag_type_ref: Zir.Inst.Ref = @enumFromInt(sema.code.extra[extra_index]);
@ -3277,7 +3277,7 @@ fn zirUnionDecl(
var extra_index: usize = extra.end;
const tracked_inst = try block.trackZir(inst);
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
extra_index += @intFromBool(small.has_tag_type);
const captures_len = if (small.has_captures_len) blk: {
@ -3402,7 +3402,7 @@ fn zirOpaqueDecl(
var extra_index: usize = extra.end;
const tracked_inst = try block.trackZir(inst);
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
const captures_len = if (small.has_captures_len) blk: {
const captures_len = sema.code.extra[extra_index];
@ -3835,7 +3835,7 @@ fn zirMakePtrConst(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
if (try elem_ty.comptimeOnlySema(pt)) {
// The value was initialized through RLS, so we didn't detect the runtime condition earlier.
// TODO: source location of runtime control flow
const init_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const init_src = block.src(.{ .node_offset_var_decl_init = inst_data.src_node });
return sema.fail(block, init_src, "value with comptime-only type '{}' depends on runtime control flow", .{elem_ty.fmt(pt)});
}
@ -6690,8 +6690,8 @@ fn zirBreak(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) CompileError
if (block.label) |label| {
if (label.zir_block == zir_block) {
const br_ref = try start_block.addBr(label.merges.block_inst, operand);
const src_loc = if (extra.operand_src_node != Zir.Inst.Break.no_src_node)
start_block.nodeOffset(extra.operand_src_node)
const src_loc = if (extra.operand_src_node.unwrap()) |operand_src_node|
start_block.nodeOffset(operand_src_node)
else
null;
try label.merges.src_locs.append(sema.gpa, src_loc);
@ -6715,8 +6715,7 @@ fn zirSwitchContinue(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) Com
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].@"break";
const extra = sema.code.extraData(Zir.Inst.Break, inst_data.payload_index).data;
assert(extra.operand_src_node != Zir.Inst.Break.no_src_node);
const operand_src = start_block.nodeOffset(extra.operand_src_node);
const operand_src = start_block.nodeOffset(extra.operand_src_node.unwrap().?);
const uncoerced_operand = try sema.resolveInst(inst_data.operand);
const switch_inst = extra.block_inst;
@ -7048,7 +7047,7 @@ pub fn analyzeSaveErrRetIndex(sema: *Sema, block: *Block) SemaError!Air.Inst.Ref
if (!block.ownerModule().error_tracing) return .none;
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
try stack_trace_ty.resolveFields(pt);
const field_name = try zcu.intern_pool.getOrPutString(gpa, pt.tid, "index", .no_embedded_nulls);
const field_index = sema.structFieldIndex(block, stack_trace_ty, field_name, LazySrcLoc.unneeded) catch |err| switch (err) {
@ -7346,7 +7345,7 @@ fn checkCallArgumentCount(
if (maybe_func_inst) |func_inst| {
try sema.errNote(.{
.base_node_inst = func_inst,
.offset = LazySrcLoc.Offset.nodeOffset(0),
.offset = LazySrcLoc.Offset.nodeOffset(.zero),
}, msg, "function declared here", .{});
}
break :msg msg;
@ -7418,7 +7417,7 @@ const CallArgsInfo = union(enum) {
/// The list of resolved (but uncoerced) arguments is known ahead of time, but
/// originated from a usage of the @call builtin at the given node offset.
call_builtin: struct {
call_node_offset: i32,
call_node_offset: std.zig.Ast.Node.Offset,
args: []const Air.Inst.Ref,
},
@ -7436,7 +7435,7 @@ const CallArgsInfo = union(enum) {
/// analyzing arguments.
call_inst: Zir.Inst.Index,
/// The node offset of `call_inst`.
call_node_offset: i32,
call_node_offset: std.zig.Ast.Node.Offset,
/// The number of arguments to this call, not including `bound_arg`.
num_args: u32,
/// The ZIR corresponding to all function arguments (other than `bound_arg`, if it
@ -7599,7 +7598,7 @@ fn analyzeCall(
const maybe_func_inst = try sema.funcDeclSrcInst(callee);
const func_ret_ty_src: LazySrcLoc = if (maybe_func_inst) |fn_decl_inst| .{
.base_node_inst = fn_decl_inst,
.offset = .{ .node_offset_fn_type_ret_ty = 0 },
.offset = .{ .node_offset_fn_type_ret_ty = .zero },
} else func_src;
const func_ty_info = zcu.typeToFunc(func_ty).?;
@ -7613,7 +7612,7 @@ fn analyzeCall(
errdefer msg.destroy(gpa);
if (maybe_func_inst) |func_inst| try sema.errNote(.{
.base_node_inst = func_inst,
.offset = .nodeOffset(0),
.offset = .nodeOffset(.zero),
}, msg, "function declared here", .{});
break :msg msg;
});
@ -9574,7 +9573,7 @@ const Section = union(enum) {
fn funcCommon(
sema: *Sema,
block: *Block,
src_node_offset: i32,
src_node_offset: std.zig.Ast.Node.Offset,
func_inst: Zir.Inst.Index,
cc: std.builtin.CallingConvention,
/// this might be Type.generic_poison
@ -9948,7 +9947,7 @@ fn finishFunc(
if (!is_generic and sema.wantErrorReturnTracing(return_type)) {
// Make sure that StackTrace's fields are resolved so that the backend can
// lower this fn type.
const unresolved_stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
const unresolved_stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
try unresolved_stack_trace_ty.resolveFields(pt);
}
@ -12599,7 +12598,7 @@ fn analyzeSwitchRuntimeBlock(
union_originally: bool,
maybe_union_ty: Type,
err_set: bool,
switch_node_offset: i32,
switch_node_offset: std.zig.Ast.Node.Offset,
special_prong_src: LazySrcLoc,
seen_enum_fields: []?LazySrcLoc,
seen_errors: SwitchErrorSet,
@ -13219,7 +13218,7 @@ fn resolveSwitchComptimeLoop(
maybe_ptr_operand_ty: Type,
cond_ty: Type,
init_cond_val: Value,
switch_node_offset: i32,
switch_node_offset: std.zig.Ast.Node.Offset,
special: SpecialProng,
case_vals: std.ArrayListUnmanaged(Air.Inst.Ref),
scalar_cases_len: u32,
@ -13255,7 +13254,7 @@ fn resolveSwitchComptimeLoop(
const extra = sema.code.extraData(Zir.Inst.Break, break_inst.data.@"break".payload_index).data;
if (extra.block_inst != spa.switch_block_inst) return error.ComptimeBreak;
// This is a `switch_continue` targeting this block. Change the operand and start over.
const src = child_block.nodeOffset(extra.operand_src_node);
const src = child_block.nodeOffset(extra.operand_src_node.unwrap().?);
const new_operand_uncoerced = try sema.resolveInst(break_inst.data.@"break".operand);
const new_operand = try sema.coerce(child_block, maybe_ptr_operand_ty, new_operand_uncoerced, src);
@ -13287,7 +13286,7 @@ fn resolveSwitchComptime(
cond_operand: Air.Inst.Ref,
operand_val: Value,
operand_ty: Type,
switch_node_offset: i32,
switch_node_offset: std.zig.Ast.Node.Offset,
special: SpecialProng,
case_vals: std.ArrayListUnmanaged(Air.Inst.Ref),
scalar_cases_len: u32,
@ -13837,7 +13836,7 @@ fn validateSwitchNoRange(
block: *Block,
ranges_len: u32,
operand_ty: Type,
src_node_offset: i32,
src_node_offset: std.zig.Ast.Node.Offset,
) CompileError!void {
if (ranges_len == 0)
return;
@ -14158,14 +14157,24 @@ fn zirShl(
const pt = sema.pt;
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.nodeOffset(inst_data.src_node);
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
const lhs_ty = sema.typeOf(lhs);
const rhs_ty = sema.typeOf(rhs);
const src = block.nodeOffset(inst_data.src_node);
const lhs_src = switch (air_tag) {
.shl, .shl_sat => block.src(.{ .node_offset_bin_lhs = inst_data.src_node }),
.shl_exact => block.builtinCallArgSrc(inst_data.src_node, 0),
else => unreachable,
};
const rhs_src = switch (air_tag) {
.shl, .shl_sat => block.src(.{ .node_offset_bin_rhs = inst_data.src_node }),
.shl_exact => block.builtinCallArgSrc(inst_data.src_node, 1),
else => unreachable,
};
try sema.checkVectorizableBinaryOperands(block, src, lhs_ty, rhs_ty, lhs_src, rhs_src);
const scalar_ty = lhs_ty.scalarType(zcu);
@ -14329,14 +14338,24 @@ fn zirShr(
const pt = sema.pt;
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.nodeOffset(inst_data.src_node);
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
const lhs_ty = sema.typeOf(lhs);
const rhs_ty = sema.typeOf(rhs);
const src = block.nodeOffset(inst_data.src_node);
const lhs_src = switch (air_tag) {
.shr => block.src(.{ .node_offset_bin_lhs = inst_data.src_node }),
.shr_exact => block.builtinCallArgSrc(inst_data.src_node, 0),
else => unreachable,
};
const rhs_src = switch (air_tag) {
.shr => block.src(.{ .node_offset_bin_rhs = inst_data.src_node }),
.shr_exact => block.builtinCallArgSrc(inst_data.src_node, 1),
else => unreachable,
};
try sema.checkVectorizableBinaryOperands(block, src, lhs_ty, rhs_ty, lhs_src, rhs_src);
const scalar_ty = lhs_ty.scalarType(zcu);
@ -14560,7 +14579,7 @@ fn zirBitNot(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
fn analyzeTupleCat(
sema: *Sema,
block: *Block,
src_node: i32,
src_node: std.zig.Ast.Node.Offset,
lhs: Air.Inst.Ref,
rhs: Air.Inst.Ref,
) CompileError!Air.Inst.Ref {
@ -15005,7 +15024,7 @@ fn getArrayCatInfo(sema: *Sema, block: *Block, src: LazySrcLoc, operand: Air.Ins
fn analyzeTupleMul(
sema: *Sema,
block: *Block,
src_node: i32,
src_node: std.zig.Ast.Node.Offset,
operand: Air.Inst.Ref,
factor: usize,
) CompileError!Air.Inst.Ref {
@ -15494,8 +15513,8 @@ fn zirDivExact(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@ -15660,8 +15679,8 @@ fn zirDivFloor(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@ -15771,8 +15790,8 @@ fn zirDivTrunc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@ -16201,8 +16220,8 @@ fn zirMod(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@ -16297,8 +16316,8 @@ fn zirRem(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@ -17873,7 +17892,7 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
const ip = &zcu.intern_pool;
const captures = Type.fromInterned(zcu.namespacePtr(block.namespace).owner_type).getCaptures(zcu);
const src_node: i32 = @bitCast(extended.operand);
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
const src = block.nodeOffset(src_node);
const capture_ty = switch (captures.get(ip)[extended.small].unwrap()) {
@ -17897,8 +17916,8 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
});
break :name null;
};
const node: std.zig.Ast.Node.Index = @bitCast(src_node + @as(i32, @bitCast(src_base_node)));
const token = tree.nodes.items(.main_token)[node];
const node = src_node.toAbsolute(src_base_node);
const token = tree.nodeMainToken(node);
break :name tree.tokenSlice(token);
};
@ -17925,8 +17944,8 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
});
break :name null;
};
const node: std.zig.Ast.Node.Index = @bitCast(src_node + @as(i32, @bitCast(src_base_node)));
const token = tree.nodes.items(.main_token)[node];
const node = src_node.toAbsolute(src_base_node);
const token = tree.nodeMainToken(node);
break :name tree.tokenSlice(token);
};
@ -17936,7 +17955,7 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
try sema.errMsg(src, "variable not accessible from inner function", .{});
errdefer msg.destroy(sema.gpa);
try sema.errNote(block.nodeOffset(0), msg, "crossed function definition here", .{});
try sema.errNote(block.nodeOffset(.zero), msg, "crossed function definition here", .{});
// TODO add "declared here" note
break :msg msg;
@ -17968,7 +17987,8 @@ fn zirFrameAddress(
block: *Block,
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const src = block.nodeOffset(@bitCast(extended.operand));
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
const src = block.nodeOffset(src_node);
try sema.requireRuntimeBlock(block, src, null);
return try block.addNoOp(.frame_addr);
}
@ -18065,7 +18085,7 @@ fn zirBuiltinSrc(
} });
};
const src_loc_ty = try sema.getBuiltinType(block.nodeOffset(0), .SourceLocation);
const src_loc_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .SourceLocation);
const fields = .{
// module: [:0]const u8,
module_name_val,
@ -19534,7 +19554,7 @@ fn zirCondbr(
fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = parent_block.nodeOffset(inst_data.src_node);
const operand_src = parent_block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const operand_src = parent_block.src(.{ .node_offset_try_operand = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
const body = sema.code.bodySlice(extra.end, extra.data.body_len);
const err_union = try sema.resolveInst(extra.data.operand);
@ -19593,7 +19613,7 @@ fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!
fn zirTryPtr(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = parent_block.nodeOffset(inst_data.src_node);
const operand_src = parent_block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const operand_src = parent_block.src(.{ .node_offset_try_operand = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
const body = sema.code.bodySlice(extra.end, extra.data.body_len);
const operand = try sema.resolveInst(extra.data.operand);
@ -19796,7 +19816,7 @@ fn zirRetImplicit(
}
const operand = try sema.resolveInst(inst_data.operand);
const ret_ty_src = block.src(.{ .node_offset_fn_type_ret_ty = 0 });
const ret_ty_src = block.src(.{ .node_offset_fn_type_ret_ty = .zero });
const base_tag = sema.fn_ret_ty.baseZigTypeTag(zcu);
if (base_tag == .noreturn) {
const msg = msg: {
@ -21283,7 +21303,7 @@ fn getErrorReturnTrace(sema: *Sema, block: *Block) CompileError!Air.Inst.Ref {
const pt = sema.pt;
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
try stack_trace_ty.resolveFields(pt);
const ptr_stack_trace_ty = try pt.singleMutPtrType(stack_trace_ty);
const opt_ptr_stack_trace_ty = try pt.optionalType(ptr_stack_trace_ty.toIntern());
@ -21305,7 +21325,8 @@ fn zirFrame(
block: *Block,
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
const src = block.nodeOffset(@bitCast(extended.operand));
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
const src = block.nodeOffset(src_node);
return sema.failWithUseOfAsync(block, src);
}
@ -21559,13 +21580,13 @@ fn zirReify(
const tracked_inst = try block.trackZir(inst);
const src: LazySrcLoc = .{
.base_node_inst = tracked_inst,
.offset = LazySrcLoc.Offset.nodeOffset(0),
.offset = LazySrcLoc.Offset.nodeOffset(.zero),
};
const operand_src: LazySrcLoc = .{
.base_node_inst = tracked_inst,
.offset = .{
.node_offset_builtin_call_arg = .{
.builtin_call_node = 0, // `tracked_inst` is precisely the `reify` instruction, so offset is 0
.builtin_call_node = .zero, // `tracked_inst` is precisely the `reify` instruction, so offset is 0
.arg_index = 0,
},
},
@ -22873,7 +22894,8 @@ fn zirCVaEnd(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) C
}
fn zirCVaStart(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
const src = block.nodeOffset(@bitCast(extended.operand));
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
const src = block.nodeOffset(src_node);
const va_list_ty = try sema.getBuiltinType(src, .VaList);
try sema.requireRuntimeBlock(block, src, null);
@ -24278,12 +24300,12 @@ fn zirOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u64 {
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const ty_src = block.builtinCallArgSrc(inst_data.src_node, 0);
const field_name_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const ty = try sema.resolveType(block, lhs_src, extra.lhs);
const field_name = try sema.resolveConstStringIntern(block, rhs_src, extra.rhs, .{ .simple = .field_name });
const ty = try sema.resolveType(block, ty_src, extra.lhs);
const field_name = try sema.resolveConstStringIntern(block, field_name_src, extra.rhs, .{ .simple = .field_name });
const pt = sema.pt;
const zcu = pt.zcu;
@ -24291,15 +24313,15 @@ fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u6
try ty.resolveLayout(pt);
switch (ty.zigTypeTag(zcu)) {
.@"struct" => {},
else => return sema.fail(block, lhs_src, "expected struct type, found '{}'", .{ty.fmt(pt)}),
else => return sema.fail(block, ty_src, "expected struct type, found '{}'", .{ty.fmt(pt)}),
}
const field_index = if (ty.isTuple(zcu)) blk: {
if (field_name.eqlSlice("len", ip)) {
return sema.fail(block, src, "no offset available for 'len' field of tuple", .{});
}
break :blk try sema.tupleFieldIndex(block, ty, field_name, rhs_src);
} else try sema.structFieldIndex(block, ty, field_name, rhs_src);
break :blk try sema.tupleFieldIndex(block, ty, field_name, field_name_src);
} else try sema.structFieldIndex(block, ty, field_name, field_name_src);
if (ty.structFieldIsComptime(field_index, zcu)) {
return sema.fail(block, src, "no offset available for comptime field", .{});
@ -25083,7 +25105,7 @@ fn zirShuffle(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
fn analyzeShuffle(
sema: *Sema,
block: *Block,
src_node: i32,
src_node: std.zig.Ast.Node.Offset,
elem_ty: Type,
a_arg: Air.Inst.Ref,
b_arg: Air.Inst.Ref,
@ -27010,7 +27032,8 @@ fn zirBuiltinValue(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstD
const gpa = zcu.gpa;
const ip = &zcu.intern_pool;
const src = block.nodeOffset(@bitCast(extended.operand));
const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
const src = block.nodeOffset(src_node);
const value: Zir.Inst.BuiltinValue = @enumFromInt(extended.small);
const ty = switch (value) {
@ -29485,7 +29508,7 @@ const CoerceOpts = struct {
return .{
.base_node_inst = func_inst,
.offset = .{ .fn_proto_param_type = .{
.fn_proto_node_offset = 0,
.fn_proto_node_offset = .zero,
.param_index = info.param_i,
} },
};
@ -30090,7 +30113,7 @@ fn coerceExtra(
const ret_ty_src: LazySrcLoc = .{
.base_node_inst = ip.getNav(zcu.funcInfo(sema.func_index).owner_nav).srcInst(ip),
.offset = .{ .node_offset_fn_type_ret_ty = 0 },
.offset = .{ .node_offset_fn_type_ret_ty = .zero },
};
try sema.errNote(ret_ty_src, msg, "'noreturn' declared here", .{});
break :msg msg;
@ -30130,7 +30153,7 @@ fn coerceExtra(
{
const ret_ty_src: LazySrcLoc = .{
.base_node_inst = ip.getNav(zcu.funcInfo(sema.func_index).owner_nav).srcInst(ip),
.offset = .{ .node_offset_fn_type_ret_ty = 0 },
.offset = .{ .node_offset_fn_type_ret_ty = .zero },
};
if (inst_ty.isError(zcu) and !dest_ty.isError(zcu)) {
try sema.errNote(ret_ty_src, msg, "function cannot return an error", .{});
@ -32331,7 +32354,7 @@ pub fn ensureNavResolved(sema: *Sema, src: LazySrcLoc, nav_index: InternPool.Nav
if (zcu.analysis_in_progress.contains(anal_unit)) {
return sema.failWithOwnedErrorMsg(null, try sema.errMsg(.{
.base_node_inst = nav.analysis.?.zir_index,
.offset = LazySrcLoc.Offset.nodeOffset(0),
.offset = LazySrcLoc.Offset.nodeOffset(.zero),
}, "dependency loop detected", .{}));
}
@ -33948,7 +33971,7 @@ const PeerTypeCandidateSrc = union(enum) {
/// index i in this slice
override: []const ?LazySrcLoc,
/// resolvePeerTypes originates from a @TypeOf(...) call
typeof_builtin_call_node_offset: i32,
typeof_builtin_call_node_offset: std.zig.Ast.Node.Offset,
pub fn resolve(
self: PeerTypeCandidateSrc,
@ -35551,7 +35574,7 @@ fn backingIntType(
const backing_int_src: LazySrcLoc = .{
.base_node_inst = struct_type.zir_index,
.offset = .{ .node_offset_container_tag = 0 },
.offset = .{ .node_offset_container_tag = .zero },
};
block.comptime_reason = .{ .reason = .{
.src = backing_int_src,
@ -35572,7 +35595,7 @@ fn backingIntType(
struct_type.setBackingIntType(ip, backing_int_ty.toIntern());
} else {
if (fields_bit_sum > std.math.maxInt(u16)) {
return sema.fail(&block, block.nodeOffset(0), "size of packed struct '{d}' exceeds maximum bit width of 65535", .{fields_bit_sum});
return sema.fail(&block, block.nodeOffset(.zero), "size of packed struct '{d}' exceeds maximum bit width of 65535", .{fields_bit_sum});
}
const backing_int_ty = try pt.intType(.unsigned, @intCast(fields_bit_sum));
struct_type.setBackingIntType(ip, backing_int_ty.toIntern());
@ -36173,7 +36196,7 @@ fn structFields(
.comptime_reason = .{ .reason = .{
.src = .{
.base_node_inst = struct_type.zir_index,
.offset = .nodeOffset(0),
.offset = .nodeOffset(.zero),
},
.r = .{ .simple = .struct_fields },
} },
@ -36514,7 +36537,7 @@ fn unionFields(
const src: LazySrcLoc = .{
.base_node_inst = union_type.zir_index,
.offset = .nodeOffset(0),
.offset = .nodeOffset(.zero),
};
var block_scope: Block = .{
@ -36543,7 +36566,7 @@ fn unionFields(
if (tag_type_ref != .none) {
const tag_ty_src: LazySrcLoc = .{
.base_node_inst = union_type.zir_index,
.offset = .{ .node_offset_container_tag = 0 },
.offset = .{ .node_offset_container_tag = .zero },
};
const provided_ty = try sema.resolveType(&block_scope, tag_ty_src, tag_type_ref);
if (small.auto_enum_tag) {
@ -38523,7 +38546,7 @@ pub fn resolveDeclaredEnum(
const zcu = pt.zcu;
const gpa = zcu.gpa;
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
var arena: std.heap.ArenaAllocator = .init(gpa);
defer arena.deinit();
@ -38610,7 +38633,7 @@ fn resolveDeclaredEnumInner(
const bit_bags_count = std.math.divCeil(usize, fields_len, 32) catch unreachable;
const tag_ty_src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = .{ .node_offset_container_tag = 0 } };
const tag_ty_src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = .{ .node_offset_container_tag = .zero } };
const int_tag_ty = ty: {
if (body.len != 0) {
@ -38763,9 +38786,9 @@ pub fn resolveNavPtrModifiers(
const gpa = zcu.gpa;
const ip = &zcu.intern_pool;
const align_src = block.src(.{ .node_offset_var_decl_align = 0 });
const section_src = block.src(.{ .node_offset_var_decl_section = 0 });
const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = 0 });
const align_src = block.src(.{ .node_offset_var_decl_align = .zero });
const section_src = block.src(.{ .node_offset_var_decl_section = .zero });
const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = .zero });
const alignment: InternPool.Alignment = a: {
const align_body = zir_decl.align_body orelse break :a .none;
@ -38838,7 +38861,7 @@ pub fn analyzeMemoizedState(sema: *Sema, block: *Block, simple_src: LazySrcLoc,
const src: LazySrcLoc = .{
.base_node_inst = ip.getNav(nav).srcInst(ip),
.offset = .nodeOffset(0),
.offset = .nodeOffset(.zero),
};
const result = try sema.analyzeNavVal(block, src, nav);

View file

@ -3505,7 +3505,7 @@ pub fn srcLocOrNull(ty: Type, zcu: *Zcu) ?Zcu.LazySrcLoc {
},
else => return null,
},
.offset = Zcu.LazySrcLoc.Offset.nodeOffset(0),
.offset = Zcu.LazySrcLoc.Offset.nodeOffset(.zero),
};
}

File diff suppressed because it is too large Load diff

View file

@ -841,7 +841,7 @@ fn analyzeComptimeUnit(pt: Zcu.PerThread, cu_id: InternPool.ComptimeUnit.Id) Zcu
.comptime_reason = .{ .reason = .{
.src = .{
.base_node_inst = comptime_unit.zir_index,
.offset = .{ .token_offset = 0 },
.offset = .{ .token_offset = .zero },
},
.r = .{ .simple = .comptime_keyword },
} },
@ -1042,11 +1042,11 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
const zir_decl = zir.getDeclaration(inst_resolved.inst);
assert(old_nav.is_usingnamespace == (zir_decl.kind == .@"usingnamespace"));
const ty_src = block.src(.{ .node_offset_var_decl_ty = 0 });
const init_src = block.src(.{ .node_offset_var_decl_init = 0 });
const align_src = block.src(.{ .node_offset_var_decl_align = 0 });
const section_src = block.src(.{ .node_offset_var_decl_section = 0 });
const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = 0 });
const ty_src = block.src(.{ .node_offset_var_decl_ty = .zero });
const init_src = block.src(.{ .node_offset_var_decl_init = .zero });
const align_src = block.src(.{ .node_offset_var_decl_align = .zero });
const section_src = block.src(.{ .node_offset_var_decl_section = .zero });
const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = .zero });
block.comptime_reason = .{ .reason = .{
.src = init_src,
@ -1135,7 +1135,7 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
break :l zir.nullTerminatedString(zir_decl.lib_name);
} else null;
if (lib_name) |l| {
const lib_name_src = block.src(.{ .node_offset_lib_name = 0 });
const lib_name_src = block.src(.{ .node_offset_lib_name = .zero });
try sema.handleExternLibName(&block, lib_name_src, l);
}
break :val .fromInterned(try pt.getExtern(.{
@ -1233,7 +1233,7 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
}
if (zir_decl.linkage == .@"export") {
const export_src = block.src(.{ .token_offset = @intFromBool(zir_decl.is_pub) });
const export_src = block.src(.{ .token_offset = @enumFromInt(@intFromBool(zir_decl.is_pub)) });
const name_slice = zir.nullTerminatedString(zir_decl.name);
const name_ip = try ip.getOrPutString(gpa, pt.tid, name_slice, .no_embedded_nulls);
try sema.analyzeExport(&block, export_src, .{ .name = name_ip }, nav_id);
@ -1414,7 +1414,7 @@ fn analyzeNavType(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileEr
const zir_decl = zir.getDeclaration(inst_resolved.inst);
assert(old_nav.is_usingnamespace == (zir_decl.kind == .@"usingnamespace"));
const ty_src = block.src(.{ .node_offset_var_decl_ty = 0 });
const ty_src = block.src(.{ .node_offset_var_decl_ty = .zero });
block.comptime_reason = .{ .reason = .{
.src = ty_src,
@ -2743,7 +2743,7 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
if (sema.fn_ret_ty_ies) |ies| {
sema.resolveInferredErrorSetPtr(&inner_block, .{
.base_node_inst = inner_block.src_base_inst,
.offset = Zcu.LazySrcLoc.Offset.nodeOffset(0),
.offset = Zcu.LazySrcLoc.Offset.nodeOffset(.zero),
}, ies) catch |err| switch (err) {
error.ComptimeReturn => unreachable,
error.ComptimeBreak => unreachable,
@ -2762,7 +2762,7 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
// result in circular dependency errors.
// TODO: this can go away once we fix backends having to resolve `StackTrace`.
// The codegen timing guarantees that the parameter types will be populated.
sema.resolveFnTypes(fn_ty, inner_block.nodeOffset(0)) catch |err| switch (err) {
sema.resolveFnTypes(fn_ty, inner_block.nodeOffset(.zero)) catch |err| switch (err) {
error.ComptimeReturn => unreachable,
error.ComptimeBreak => unreachable,
else => |e| return e,

View file

@ -5224,7 +5224,7 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
.arena = std.heap.ArenaAllocator.init(gpa),
.location = .{ .relative_path = build_mod.root },
.location_tok = 0,
.hash_tok = 0,
.hash_tok = .none,
.name_tok = 0,
.lazy_status = .eager,
.parent_package_root = build_mod.root,
@ -6285,8 +6285,10 @@ fn cmdAstCheck(
file.tree.?.tokens.len * (@sizeOf(std.zig.Token.Tag) + @sizeOf(Ast.ByteOffset));
const tree_bytes = @sizeOf(Ast) + file.tree.?.nodes.len *
(@sizeOf(Ast.Node.Tag) +
@sizeOf(Ast.Node.Data) +
@sizeOf(Ast.TokenIndex));
@sizeOf(Ast.TokenIndex) +
// Here we don't use @sizeOf(Ast.Node.Data) because it would include
// the debug safety tag but we want to measure release size.
8);
const instruction_bytes = file.zir.?.instructions.len *
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
// the debug safety tag but we want to measure release size.
@ -7126,7 +7128,7 @@ fn cmdFetch(
.arena = std.heap.ArenaAllocator.init(gpa),
.location = .{ .path_or_url = path_or_url },
.location_tok = 0,
.hash_tok = 0,
.hash_tok = .none,
.name_tok = 0,
.lazy_status = .eager,
.parent_package_root = undefined,
@ -7282,15 +7284,19 @@ fn cmdFetch(
warn("overwriting existing dependency named '{s}'", .{name});
try fixups.replace_nodes_with_string.put(gpa, dep.location_node, location_replace);
try fixups.replace_nodes_with_string.put(gpa, dep.hash_node, hash_replace);
if (dep.hash_node.unwrap()) |hash_node| {
try fixups.replace_nodes_with_string.put(gpa, hash_node, hash_replace);
} else {
// https://github.com/ziglang/zig/issues/21690
}
} else if (manifest.dependencies.count() > 0) {
// Add fixup for adding another dependency.
const deps = manifest.dependencies.values();
const last_dep_node = deps[deps.len - 1].node;
try fixups.append_string_after_node.put(gpa, last_dep_node, new_node_text);
} else if (manifest.dependencies_node != 0) {
} else if (manifest.dependencies_node.unwrap()) |dependencies_node| {
// Add fixup for replacing the entire dependencies struct.
try fixups.replace_nodes_with_string.put(gpa, manifest.dependencies_node, dependencies_init);
try fixups.replace_nodes_with_string.put(gpa, dependencies_node, dependencies_init);
} else {
// Add fixup for adding dependencies struct.
try fixups.append_string_after_node.put(gpa, manifest.version_node, dependencies_text);

View file

@ -24,7 +24,7 @@ pub fn renderAsTextToFile(
.file = scope_file,
.code = scope_file.zir.?,
.indent = 0,
.parent_decl_node = 0,
.parent_decl_node = .root,
.recurse_decls = true,
.recurse_blocks = true,
};
@ -185,10 +185,6 @@ const Writer = struct {
}
} = .{},
fn relativeToNodeIndex(self: *Writer, offset: i32) Ast.Node.Index {
return @bitCast(offset + @as(i32, @bitCast(self.parent_decl_node)));
}
fn writeInstToStream(
self: *Writer,
stream: anytype,
@ -595,7 +591,7 @@ const Writer = struct {
const prev_parent_decl_node = self.parent_decl_node;
self.parent_decl_node = inst_data.node;
defer self.parent_decl_node = prev_parent_decl_node;
try self.writeSrcNode(stream, 0);
try self.writeSrcNode(stream, .zero);
},
.builtin_extern,
@ -631,7 +627,8 @@ const Writer = struct {
fn writeExtNode(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
try stream.writeAll(")) ");
try self.writeSrcNode(stream, @bitCast(extended.operand));
const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
try self.writeSrcNode(stream, src_node);
}
fn writeArrayInitElemType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
@ -1579,7 +1576,7 @@ const Writer = struct {
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}) ");
}
try self.writeSrcNode(stream, 0);
try self.writeSrcNode(stream, .zero);
}
fn writeUnionDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
@ -1659,7 +1656,7 @@ const Writer = struct {
if (fields_len == 0) {
try stream.writeAll("}) ");
try self.writeSrcNode(stream, 0);
try self.writeSrcNode(stream, .zero);
return;
}
try stream.writeAll(", ");
@ -1730,7 +1727,7 @@ const Writer = struct {
self.indent -= 2;
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}) ");
try self.writeSrcNode(stream, 0);
try self.writeSrcNode(stream, .zero);
}
fn writeEnumDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
@ -1849,7 +1846,7 @@ const Writer = struct {
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}) ");
}
try self.writeSrcNode(stream, 0);
try self.writeSrcNode(stream, .zero);
}
fn writeOpaqueDecl(
@ -1893,7 +1890,7 @@ const Writer = struct {
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}) ");
}
try self.writeSrcNode(stream, 0);
try self.writeSrcNode(stream, .zero);
}
fn writeTupleDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
@ -2539,7 +2536,7 @@ const Writer = struct {
ret_ty_body: []const Zir.Inst.Index,
ret_ty_is_generic: bool,
body: []const Zir.Inst.Index,
src_node: i32,
src_node: Ast.Node.Offset,
src_locs: Zir.Inst.Func.SrcLocs,
noalias_bits: u32,
) !void {
@ -2647,18 +2644,20 @@ const Writer = struct {
}
try stream.writeAll(") ");
try self.writeSrcNode(stream, 0);
try self.writeSrcNode(stream, .zero);
}
fn writeClosureGet(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
try stream.print("{d})) ", .{extended.small});
try self.writeSrcNode(stream, @bitCast(extended.operand));
const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
try self.writeSrcNode(stream, src_node);
}
fn writeBuiltinValue(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
const val: Zir.Inst.BuiltinValue = @enumFromInt(extended.small);
try stream.print("{s})) ", .{@tagName(val)});
try self.writeSrcNode(stream, @bitCast(extended.operand));
const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
try self.writeSrcNode(stream, src_node);
}
fn writeInplaceArithResultTy(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
@ -2760,9 +2759,9 @@ const Writer = struct {
try stream.writeAll(name);
}
fn writeSrcNode(self: *Writer, stream: anytype, src_node: i32) !void {
fn writeSrcNode(self: *Writer, stream: anytype, src_node: Ast.Node.Offset) !void {
const tree = self.file.tree orelse return;
const abs_node = self.relativeToNodeIndex(src_node);
const abs_node = src_node.toAbsolute(self.parent_decl_node);
const src_span = tree.nodeToSpan(abs_node);
const start = self.line_col_cursor.find(tree.source, src_span.start);
const end = self.line_col_cursor.find(tree.source, src_span.end);
@ -2772,10 +2771,10 @@ const Writer = struct {
});
}
fn writeSrcTok(self: *Writer, stream: anytype, src_tok: u32) !void {
fn writeSrcTok(self: *Writer, stream: anytype, src_tok: Ast.TokenOffset) !void {
const tree = self.file.tree orelse return;
const abs_tok = tree.firstToken(self.parent_decl_node) + src_tok;
const span_start = tree.tokens.items(.start)[abs_tok];
const abs_tok = src_tok.toAbsolute(tree.firstToken(self.parent_decl_node));
const span_start = tree.tokenStart(abs_tok);
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(abs_tok).len));
const start = self.line_col_cursor.find(tree.source, span_start);
const end = self.line_col_cursor.find(tree.source, span_end);
@ -2785,9 +2784,9 @@ const Writer = struct {
});
}
fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: u32) !void {
fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: Ast.TokenIndex) !void {
const tree = self.file.tree orelse return;
const span_start = tree.tokens.items(.start)[src_tok];
const span_start = tree.tokenStart(src_tok);
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(src_tok).len));
const start = self.line_col_cursor.find(tree.source, span_start);
const end = self.line_col_cursor.find(tree.source, span_end);

View file

@ -0,0 +1,14 @@
void foo() {
for (;;) {
continue;
}
}
// translate-c
// c_frontend=clang
//
// pub export fn foo() void {
// while (true) {
// continue;
// }
// }