mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 13:54:21 +00:00
fmt: canonicalize identifiers
This commit is contained in:
parent
d6d4f2481d
commit
ca9e1760e8
9 changed files with 731 additions and 91 deletions
|
|
@ -11,6 +11,7 @@ pub const isValidId = fmt.isValidId;
|
||||||
pub const parse = @import("zig/parse.zig").parse;
|
pub const parse = @import("zig/parse.zig").parse;
|
||||||
pub const string_literal = @import("zig/string_literal.zig");
|
pub const string_literal = @import("zig/string_literal.zig");
|
||||||
pub const number_literal = @import("zig/number_literal.zig");
|
pub const number_literal = @import("zig/number_literal.zig");
|
||||||
|
pub const primitives = @import("zig/primitives.zig");
|
||||||
pub const Ast = @import("zig/Ast.zig");
|
pub const Ast = @import("zig/Ast.zig");
|
||||||
pub const system = @import("zig/system.zig");
|
pub const system = @import("zig/system.zig");
|
||||||
pub const CrossTarget = @import("zig/CrossTarget.zig");
|
pub const CrossTarget = @import("zig/CrossTarget.zig");
|
||||||
|
|
|
||||||
|
|
@ -5198,6 +5198,382 @@ test "zig fmt: while continue expr" {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test "zig fmt: canonicalize symbols (simple)" {
|
||||||
|
try testTransform(
|
||||||
|
\\const val_normal: Normal = .{};
|
||||||
|
\\const @"val_unesc_me": @"UnescMe" = .{};
|
||||||
|
\\const @"val_esc!": @"Esc!" = .{};
|
||||||
|
\\
|
||||||
|
\\fn fnNormal() void {}
|
||||||
|
\\fn @"fnUnescMe"() void {}
|
||||||
|
\\fn @"fnEsc!"() void {}
|
||||||
|
\\
|
||||||
|
\\extern fn protoNormal() void;
|
||||||
|
\\extern fn @"protoUnescMe"() void;
|
||||||
|
\\extern fn @"protoEsc!"() void;
|
||||||
|
\\
|
||||||
|
\\fn fnWithArgs(normal: Normal, @"unesc_me": @"UnescMe", @"esc!": @"Esc!") void {
|
||||||
|
\\ _ = normal;
|
||||||
|
\\ _ = @"unesc_me";
|
||||||
|
\\ _ = @"esc!";
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
\\const Normal = struct {};
|
||||||
|
\\const @"UnescMe" = struct {
|
||||||
|
\\ @"x": @"X",
|
||||||
|
\\ const X = union(@"EnumUnesc") {
|
||||||
|
\\ normal,
|
||||||
|
\\ @"unesc_me",
|
||||||
|
\\ @"esc!",
|
||||||
|
\\ };
|
||||||
|
\\ const @"EnumUnesc" = enum {
|
||||||
|
\\ normal,
|
||||||
|
\\ @"unesc_me",
|
||||||
|
\\ @"esc!",
|
||||||
|
\\ };
|
||||||
|
\\};
|
||||||
|
\\const @"Esc!" = struct {
|
||||||
|
\\ normal: bool = false,
|
||||||
|
\\ @"unesc_me": bool = false,
|
||||||
|
\\ @"esc!": bool = false,
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\pub fn main() void {
|
||||||
|
\\ _ = val_normal;
|
||||||
|
\\ _ = @"val_normal";
|
||||||
|
\\ _ = val_unesc_me;
|
||||||
|
\\ _ = @"val_unesc_me";
|
||||||
|
\\ _ = @"val_esc!";
|
||||||
|
\\
|
||||||
|
\\ fnNormal();
|
||||||
|
\\ @"fnNormal"();
|
||||||
|
\\ fnUnescMe();
|
||||||
|
\\ @"fnUnescMe"();
|
||||||
|
\\ @"fnEsc!"();
|
||||||
|
\\
|
||||||
|
\\ fnWithArgs(1, Normal{}, UnescMe{}, @"Esc!"{});
|
||||||
|
\\ fnWithArgs(1, @"Normal"{}, @"UnescMe"{}, @"Esc!"{});
|
||||||
|
\\ fnWithArgs(1, @"Normal"{}, @"Normal"{}, @"Esc!"{});
|
||||||
|
\\
|
||||||
|
\\ const local_val1: @"Normal" = .{};
|
||||||
|
\\ const @"local_val2": UnescMe = .{
|
||||||
|
\\ .@"x" = .@"unesc_me",
|
||||||
|
\\ };
|
||||||
|
\\ fnWithArgs(@"local_val1", @"local_val2", .{ .@"normal" = true, .@"unesc_me" = true, .@"esc!" = true });
|
||||||
|
\\ fnWithArgs(local_val1, local_val2, .{ .normal = true, .unesc_me = true, .@"esc!" = true });
|
||||||
|
\\
|
||||||
|
\\ var x: u8 = 'x';
|
||||||
|
\\ switch (@"x") {
|
||||||
|
\\ @"x" => {},
|
||||||
|
\\ }
|
||||||
|
\\
|
||||||
|
\\ _ = @import("std"); // Don't mess with @builtins
|
||||||
|
\\ // @"comment"
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
,
|
||||||
|
\\const val_normal: Normal = .{};
|
||||||
|
\\const val_unesc_me: UnescMe = .{};
|
||||||
|
\\const @"val_esc!": @"Esc!" = .{};
|
||||||
|
\\
|
||||||
|
\\fn fnNormal() void {}
|
||||||
|
\\fn fnUnescMe() void {}
|
||||||
|
\\fn @"fnEsc!"() void {}
|
||||||
|
\\
|
||||||
|
\\extern fn protoNormal() void;
|
||||||
|
\\extern fn protoUnescMe() void;
|
||||||
|
\\extern fn @"protoEsc!"() void;
|
||||||
|
\\
|
||||||
|
\\fn fnWithArgs(normal: Normal, unesc_me: UnescMe, @"esc!": @"Esc!") void {
|
||||||
|
\\ _ = normal;
|
||||||
|
\\ _ = unesc_me;
|
||||||
|
\\ _ = @"esc!";
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
\\const Normal = struct {};
|
||||||
|
\\const UnescMe = struct {
|
||||||
|
\\ x: X,
|
||||||
|
\\ const X = union(EnumUnesc) {
|
||||||
|
\\ normal,
|
||||||
|
\\ unesc_me,
|
||||||
|
\\ @"esc!",
|
||||||
|
\\ };
|
||||||
|
\\ const EnumUnesc = enum {
|
||||||
|
\\ normal,
|
||||||
|
\\ unesc_me,
|
||||||
|
\\ @"esc!",
|
||||||
|
\\ };
|
||||||
|
\\};
|
||||||
|
\\const @"Esc!" = struct {
|
||||||
|
\\ normal: bool = false,
|
||||||
|
\\ unesc_me: bool = false,
|
||||||
|
\\ @"esc!": bool = false,
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\pub fn main() void {
|
||||||
|
\\ _ = val_normal;
|
||||||
|
\\ _ = val_normal;
|
||||||
|
\\ _ = val_unesc_me;
|
||||||
|
\\ _ = val_unesc_me;
|
||||||
|
\\ _ = @"val_esc!";
|
||||||
|
\\
|
||||||
|
\\ fnNormal();
|
||||||
|
\\ fnNormal();
|
||||||
|
\\ fnUnescMe();
|
||||||
|
\\ fnUnescMe();
|
||||||
|
\\ @"fnEsc!"();
|
||||||
|
\\
|
||||||
|
\\ fnWithArgs(1, Normal{}, UnescMe{}, @"Esc!"{});
|
||||||
|
\\ fnWithArgs(1, Normal{}, UnescMe{}, @"Esc!"{});
|
||||||
|
\\ fnWithArgs(1, Normal{}, Normal{}, @"Esc!"{});
|
||||||
|
\\
|
||||||
|
\\ const local_val1: Normal = .{};
|
||||||
|
\\ const local_val2: UnescMe = .{
|
||||||
|
\\ .x = .unesc_me,
|
||||||
|
\\ };
|
||||||
|
\\ fnWithArgs(local_val1, local_val2, .{ .normal = true, .unesc_me = true, .@"esc!" = true });
|
||||||
|
\\ fnWithArgs(local_val1, local_val2, .{ .normal = true, .unesc_me = true, .@"esc!" = true });
|
||||||
|
\\
|
||||||
|
\\ var x: u8 = 'x';
|
||||||
|
\\ switch (x) {
|
||||||
|
\\ x => {},
|
||||||
|
\\ }
|
||||||
|
\\
|
||||||
|
\\ _ = @import("std"); // Don't mess with @builtins
|
||||||
|
\\ // @"comment"
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Contextually unescape when shadowing primitive types and values.
|
||||||
|
test "zig fmt: canonicalize symbols (primitive types)" {
|
||||||
|
try testTransform(
|
||||||
|
\\const @"anyopaque" = struct {
|
||||||
|
\\ @"u8": @"type" = true,
|
||||||
|
\\ @"_": @"false" = @"true",
|
||||||
|
\\ const @"type" = bool;
|
||||||
|
\\ const @"false" = bool;
|
||||||
|
\\ const @"true" = false;
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\const U = union(@"null") {
|
||||||
|
\\ @"type",
|
||||||
|
\\ const @"null" = enum {
|
||||||
|
\\ @"type",
|
||||||
|
\\ };
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\test {
|
||||||
|
\\ const E = enum { @"anyopaque" };
|
||||||
|
\\ _ = U{ .@"type" = {} };
|
||||||
|
\\ _ = U.@"type";
|
||||||
|
\\ _ = E.@"anyopaque";
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
\\fn @"i10"(@"void": @"anyopaque", @"type": @"anyopaque".@"type") error{@"null"}!void {
|
||||||
|
\\ var @"f32" = @"void";
|
||||||
|
\\ @"f32".@"u8" = false;
|
||||||
|
\\ _ = @"type";
|
||||||
|
\\ _ = type;
|
||||||
|
\\ if (@"f32".@"u8") {
|
||||||
|
\\ return @"i10"(.{ .@"u8" = true, .@"_" = false }, false);
|
||||||
|
\\ } else {
|
||||||
|
\\ return error.@"null";
|
||||||
|
\\ }
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
\\test @"i10" {
|
||||||
|
\\ try @"i10"(.{}, true);
|
||||||
|
\\ _ = @"void": while (null) |@"u3"| {
|
||||||
|
\\ break :@"void" @"u3";
|
||||||
|
\\ };
|
||||||
|
\\ _ = @"void": {
|
||||||
|
\\ break :@"void";
|
||||||
|
\\ };
|
||||||
|
\\ for ("hi") |@"u3", @"i4"| {
|
||||||
|
\\ _ = @"u3";
|
||||||
|
\\ _ = @"i4";
|
||||||
|
\\ }
|
||||||
|
\\ if (false) {} else |@"bool"| {
|
||||||
|
\\ _ = @"bool";
|
||||||
|
\\ }
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
,
|
||||||
|
\\const @"anyopaque" = struct {
|
||||||
|
\\ u8: @"type" = true,
|
||||||
|
\\ _: @"false" = @"true",
|
||||||
|
\\ const @"type" = bool;
|
||||||
|
\\ const @"false" = bool;
|
||||||
|
\\ const @"true" = false;
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\const U = union(@"null") {
|
||||||
|
\\ type,
|
||||||
|
\\ const @"null" = enum {
|
||||||
|
\\ type,
|
||||||
|
\\ };
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\test {
|
||||||
|
\\ const E = enum { anyopaque };
|
||||||
|
\\ _ = U{ .type = {} };
|
||||||
|
\\ _ = U.type;
|
||||||
|
\\ _ = E.anyopaque;
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
\\fn @"i10"(@"void": @"anyopaque", @"type": @"anyopaque".type) error{null}!void {
|
||||||
|
\\ var @"f32" = @"void";
|
||||||
|
\\ @"f32".u8 = false;
|
||||||
|
\\ _ = @"type";
|
||||||
|
\\ _ = type;
|
||||||
|
\\ if (@"f32".u8) {
|
||||||
|
\\ return @"i10"(.{ .u8 = true, ._ = false }, false);
|
||||||
|
\\ } else {
|
||||||
|
\\ return error.null;
|
||||||
|
\\ }
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
\\test @"i10" {
|
||||||
|
\\ try @"i10"(.{}, true);
|
||||||
|
\\ _ = void: while (null) |@"u3"| {
|
||||||
|
\\ break :void @"u3";
|
||||||
|
\\ };
|
||||||
|
\\ _ = void: {
|
||||||
|
\\ break :void;
|
||||||
|
\\ };
|
||||||
|
\\ for ("hi") |@"u3", @"i4"| {
|
||||||
|
\\ _ = @"u3";
|
||||||
|
\\ _ = @"i4";
|
||||||
|
\\ }
|
||||||
|
\\ if (false) {} else |@"bool"| {
|
||||||
|
\\ _ = @"bool";
|
||||||
|
\\ }
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Never unescape names spelled like keywords.
|
||||||
|
test "zig fmt: canonicalize symbols (keywords)" {
|
||||||
|
try testCanonical(
|
||||||
|
\\const @"enum" = struct {
|
||||||
|
\\ @"error": @"struct" = true,
|
||||||
|
\\ const @"struct" = bool;
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\fn @"usingnamespace"(@"union": @"enum") error{@"try"}!void {
|
||||||
|
\\ var @"struct" = @"union";
|
||||||
|
\\ @"struct".@"error" = false;
|
||||||
|
\\ if (@"struct".@"error") {
|
||||||
|
\\ return @"usingnamespace"(.{ .@"error" = false });
|
||||||
|
\\ } else {
|
||||||
|
\\ return error.@"try";
|
||||||
|
\\ }
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
\\test @"usingnamespace" {
|
||||||
|
\\ try @"usingnamespace"(.{});
|
||||||
|
\\ _ = @"return": {
|
||||||
|
\\ break :@"return" 4;
|
||||||
|
\\ };
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize \xNN and \u{NN} escapes and unicode inside @"" escapes.
|
||||||
|
test "zig fmt: canonicalize symbols (character escapes)" {
|
||||||
|
try testTransform(
|
||||||
|
\\const @"\x46\x6f\x6f\x64" = struct {
|
||||||
|
\\ @"\x62\x61\x72\x6E": @"\x43\x72\x61\x62" = false,
|
||||||
|
\\ @"\u{67}\u{6C}o\u{70}\xFF": @"Cra\x62" = false,
|
||||||
|
\\ @"\x65\x72\x72\x6F\x72": Crab = true,
|
||||||
|
\\ @"\x74\x72\x79": Crab = true,
|
||||||
|
\\ @"\u{74}\u{79}\u{70}\u{65}": @"any\u{6F}\u{70}\u{61}\u{71}\u{75}\u{65}",
|
||||||
|
\\
|
||||||
|
\\ const @"\x43\x72\x61\x62" = bool;
|
||||||
|
\\ const @"\x61\x6E\x79\x6F\x70\x61que" = void;
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\test "unicode" {
|
||||||
|
\\ const @"cąbbäge ⚡" = 2;
|
||||||
|
\\ _ = @"cąbbäge ⚡";
|
||||||
|
\\ const @"\u{01f422} friend\u{f6}" = 4;
|
||||||
|
\\ _ = @"🐢 friendö";
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
,
|
||||||
|
\\const Food = struct {
|
||||||
|
\\ barn: Crab = false,
|
||||||
|
\\ @"glop\xFF": Crab = false,
|
||||||
|
\\ @"error": Crab = true,
|
||||||
|
\\ @"try": Crab = true,
|
||||||
|
\\ type: @"anyopaque",
|
||||||
|
\\
|
||||||
|
\\ const Crab = bool;
|
||||||
|
\\ const @"anyopaque" = void;
|
||||||
|
\\};
|
||||||
|
\\
|
||||||
|
\\test "unicode" {
|
||||||
|
\\ const @"cąbbäge ⚡" = 2;
|
||||||
|
\\ _ = @"cąbbäge ⚡";
|
||||||
|
\\ const @"\u{01f422} friend\u{f6}" = 4;
|
||||||
|
\\ _ = @"🐢 friendö";
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "zig fmt: canonicalize symbols (asm)" {
|
||||||
|
try testTransform(
|
||||||
|
\\test "asm" {
|
||||||
|
\\ const @"null" = usize;
|
||||||
|
\\ const @"try": usize = 808;
|
||||||
|
\\ const arg: usize = 2;
|
||||||
|
\\ _ = asm volatile ("syscall"
|
||||||
|
\\ : [@"void"] "={rax}" (-> @"null"),
|
||||||
|
\\ : [@"error"] "{rax}" (@"try"),
|
||||||
|
\\ [@"arg1"] "{rdi}" (arg),
|
||||||
|
\\ [arg2] "{rsi}" (arg),
|
||||||
|
\\ [arg3] "{rdx}" (arg),
|
||||||
|
\\ : "rcx", "r11"
|
||||||
|
\\ );
|
||||||
|
\\
|
||||||
|
\\ const @"false": usize = 10;
|
||||||
|
\\ const @"true" = "explode";
|
||||||
|
\\ _ = asm volatile (@"true"
|
||||||
|
\\ : [one] "={rax}" (@"false"),
|
||||||
|
\\ : [two] "{rax}" (@"false"),
|
||||||
|
\\ );
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
,
|
||||||
|
\\test "asm" {
|
||||||
|
\\ const @"null" = usize;
|
||||||
|
\\ const @"try": usize = 808;
|
||||||
|
\\ const arg: usize = 2;
|
||||||
|
\\ _ = asm volatile ("syscall"
|
||||||
|
\\ : [void] "={rax}" (-> @"null"),
|
||||||
|
\\ : [@"error"] "{rax}" (@"try"),
|
||||||
|
\\ [arg1] "{rdi}" (arg),
|
||||||
|
\\ [arg2] "{rsi}" (arg),
|
||||||
|
\\ [arg3] "{rdx}" (arg),
|
||||||
|
\\ : "rcx", "r11"
|
||||||
|
\\ );
|
||||||
|
\\
|
||||||
|
\\ const @"false": usize = 10;
|
||||||
|
\\ const @"true" = "explode";
|
||||||
|
\\ _ = asm volatile (@"true"
|
||||||
|
\\ : [one] "={rax}" (false),
|
||||||
|
\\ : [two] "{rax}" (@"false"),
|
||||||
|
\\ );
|
||||||
|
\\}
|
||||||
|
\\
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
test "zig fmt: error for missing sentinel value in sentinel slice" {
|
test "zig fmt: error for missing sentinel value in sentinel slice" {
|
||||||
try testError(
|
try testError(
|
||||||
\\const foo = foo[0..:];
|
\\const foo = foo[0..:];
|
||||||
|
|
|
||||||
63
lib/std/zig/primitives.zig
Normal file
63
lib/std/zig/primitives.zig
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
/// Set of primitive type and value names.
|
||||||
|
/// Does not include `_` or integer type names.
|
||||||
|
pub const names = std.ComptimeStringMap(void, .{
|
||||||
|
.{"anyerror"},
|
||||||
|
.{"anyframe"},
|
||||||
|
.{"anyopaque"},
|
||||||
|
.{"bool"},
|
||||||
|
.{"c_int"},
|
||||||
|
.{"c_long"},
|
||||||
|
.{"c_longdouble"},
|
||||||
|
.{"c_longlong"},
|
||||||
|
.{"c_short"},
|
||||||
|
.{"c_uint"},
|
||||||
|
.{"c_ulong"},
|
||||||
|
.{"c_ulonglong"},
|
||||||
|
.{"c_ushort"},
|
||||||
|
.{"comptime_float"},
|
||||||
|
.{"comptime_int"},
|
||||||
|
.{"f128"},
|
||||||
|
.{"f16"},
|
||||||
|
.{"f32"},
|
||||||
|
.{"f64"},
|
||||||
|
.{"f80"},
|
||||||
|
.{"false"},
|
||||||
|
.{"isize"},
|
||||||
|
.{"noreturn"},
|
||||||
|
.{"null"},
|
||||||
|
.{"true"},
|
||||||
|
.{"type"},
|
||||||
|
.{"undefined"},
|
||||||
|
.{"usize"},
|
||||||
|
.{"void"},
|
||||||
|
});
|
||||||
|
|
||||||
|
/// Returns true if a name matches a primitive type or value, excluding `_`.
|
||||||
|
/// Integer type names like `u8` or `i32` are only matched for syntax,
|
||||||
|
/// so this will still return true when they have an oversized bit count
|
||||||
|
/// or leading zeroes.
|
||||||
|
pub fn isPrimitive(name: []const u8) bool {
|
||||||
|
if (names.get(name) != null) return true;
|
||||||
|
if (name.len < 2) return false;
|
||||||
|
const first_c = name[0];
|
||||||
|
if (first_c != 'i' and first_c != 'u') return false;
|
||||||
|
for (name[1..]) |c| switch (c) {
|
||||||
|
'0'...'9' => {},
|
||||||
|
else => return false,
|
||||||
|
};
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
test "isPrimitive" {
|
||||||
|
const expect = std.testing.expect;
|
||||||
|
try expect(!isPrimitive(""));
|
||||||
|
try expect(!isPrimitive("_"));
|
||||||
|
try expect(!isPrimitive("haberdasher"));
|
||||||
|
try expect(isPrimitive("bool"));
|
||||||
|
try expect(isPrimitive("false"));
|
||||||
|
try expect(isPrimitive("comptime_float"));
|
||||||
|
try expect(isPrimitive("u1"));
|
||||||
|
try expect(isPrimitive("i99999999999999"));
|
||||||
|
}
|
||||||
|
|
@ -5,6 +5,7 @@ const Allocator = std.mem.Allocator;
|
||||||
const meta = std.meta;
|
const meta = std.meta;
|
||||||
const Ast = std.zig.Ast;
|
const Ast = std.zig.Ast;
|
||||||
const Token = std.zig.Token;
|
const Token = std.zig.Token;
|
||||||
|
const primitives = std.zig.primitives;
|
||||||
|
|
||||||
const indent_delta = 4;
|
const indent_delta = 4;
|
||||||
const asm_indent_delta = 2;
|
const asm_indent_delta = 2;
|
||||||
|
|
@ -152,8 +153,10 @@ fn renderMember(gpa: Allocator, ais: *Ais, tree: Ast, decl: Ast.Node.Index, spac
|
||||||
const test_token = main_tokens[decl];
|
const test_token = main_tokens[decl];
|
||||||
try renderToken(ais, tree, test_token, .space);
|
try renderToken(ais, tree, test_token, .space);
|
||||||
const test_name_tag = token_tags[test_token + 1];
|
const test_name_tag = token_tags[test_token + 1];
|
||||||
if (test_name_tag == .string_literal or test_name_tag == .identifier) {
|
switch (test_name_tag) {
|
||||||
try renderToken(ais, tree, test_token + 1, .space);
|
.string_literal => try renderToken(ais, tree, test_token + 1, .space),
|
||||||
|
.identifier => try renderIdentifier(ais, tree, test_token + 1, .space, .preserve_when_shadowing),
|
||||||
|
else => {},
|
||||||
}
|
}
|
||||||
try renderExpression(gpa, ais, tree, datas[decl].rhs, space);
|
try renderExpression(gpa, ais, tree, datas[decl].rhs, space);
|
||||||
},
|
},
|
||||||
|
|
@ -192,11 +195,10 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
const lexeme = tokenSliceForRender(tree, token_index);
|
const lexeme = tokenSliceForRender(tree, token_index);
|
||||||
if (mem.eql(u8, lexeme, "c_void")) {
|
if (mem.eql(u8, lexeme, "c_void")) {
|
||||||
try ais.writer().writeAll("anyopaque");
|
try ais.writer().writeAll("anyopaque");
|
||||||
} else {
|
|
||||||
try ais.writer().writeAll(lexeme);
|
|
||||||
}
|
|
||||||
|
|
||||||
return renderSpace(ais, tree, token_index, lexeme.len, space);
|
return renderSpace(ais, tree, token_index, lexeme.len, space);
|
||||||
|
} else {
|
||||||
|
return renderIdentifier(ais, tree, token_index, space, .preserve_when_shadowing);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
.number_literal,
|
.number_literal,
|
||||||
|
|
@ -226,7 +228,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
.error_value => {
|
.error_value => {
|
||||||
try renderToken(ais, tree, main_tokens[node], .none);
|
try renderToken(ais, tree, main_tokens[node], .none);
|
||||||
try renderToken(ais, tree, main_tokens[node] + 1, .none);
|
try renderToken(ais, tree, main_tokens[node] + 1, .none);
|
||||||
return renderToken(ais, tree, main_tokens[node] + 2, space);
|
return renderIdentifier(ais, tree, main_tokens[node] + 2, space, .eagerly_unquote);
|
||||||
},
|
},
|
||||||
|
|
||||||
.block_two,
|
.block_two,
|
||||||
|
|
@ -256,7 +258,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
try renderToken(ais, tree, defer_token, .space);
|
try renderToken(ais, tree, defer_token, .space);
|
||||||
if (payload_token != 0) {
|
if (payload_token != 0) {
|
||||||
try renderToken(ais, tree, payload_token - 1, .none); // |
|
try renderToken(ais, tree, payload_token - 1, .none); // |
|
||||||
try renderToken(ais, tree, payload_token, .none); // identifier
|
try renderIdentifier(ais, tree, payload_token, .none, .preserve_when_shadowing); // identifier
|
||||||
try renderToken(ais, tree, payload_token + 1, .space); // |
|
try renderToken(ais, tree, payload_token + 1, .space); // |
|
||||||
}
|
}
|
||||||
return renderExpression(gpa, ais, tree, expr, space);
|
return renderExpression(gpa, ais, tree, expr, space);
|
||||||
|
|
@ -294,7 +296,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
if (token_tags[fallback_first - 1] == .pipe) {
|
if (token_tags[fallback_first - 1] == .pipe) {
|
||||||
try renderToken(ais, tree, main_token, .space); // catch keyword
|
try renderToken(ais, tree, main_token, .space); // catch keyword
|
||||||
try renderToken(ais, tree, main_token + 1, .none); // pipe
|
try renderToken(ais, tree, main_token + 1, .none); // pipe
|
||||||
try renderToken(ais, tree, main_token + 2, .none); // payload identifier
|
try renderIdentifier(ais, tree, main_token + 2, .none, .preserve_when_shadowing); // payload identifier
|
||||||
try renderToken(ais, tree, main_token + 3, after_op_space); // pipe
|
try renderToken(ais, tree, main_token + 3, after_op_space); // pipe
|
||||||
} else {
|
} else {
|
||||||
assert(token_tags[fallback_first - 1] == .keyword_catch);
|
assert(token_tags[fallback_first - 1] == .keyword_catch);
|
||||||
|
|
@ -320,7 +322,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
ais.pushIndentOneShot();
|
ais.pushIndentOneShot();
|
||||||
}
|
}
|
||||||
|
|
||||||
try renderToken(ais, tree, main_token, .none);
|
try renderToken(ais, tree, main_token, .none); // .
|
||||||
|
|
||||||
// This check ensures that zag() is indented in the following example:
|
// This check ensures that zag() is indented in the following example:
|
||||||
// const x = foo
|
// const x = foo
|
||||||
|
|
@ -331,7 +333,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
ais.pushIndentOneShot();
|
ais.pushIndentOneShot();
|
||||||
}
|
}
|
||||||
|
|
||||||
return renderToken(ais, tree, field_access.rhs, space);
|
return renderIdentifier(ais, tree, field_access.rhs, space, .eagerly_unquote); // field
|
||||||
},
|
},
|
||||||
|
|
||||||
.error_union,
|
.error_union,
|
||||||
|
|
@ -514,11 +516,11 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
} else if (label_token != 0 and target == 0) {
|
} else if (label_token != 0 and target == 0) {
|
||||||
try renderToken(ais, tree, main_token, .space); // break keyword
|
try renderToken(ais, tree, main_token, .space); // break keyword
|
||||||
try renderToken(ais, tree, label_token - 1, .none); // colon
|
try renderToken(ais, tree, label_token - 1, .none); // colon
|
||||||
try renderToken(ais, tree, label_token, space); // identifier
|
try renderIdentifier(ais, tree, label_token, space, .eagerly_unquote); // identifier
|
||||||
} else if (label_token != 0 and target != 0) {
|
} else if (label_token != 0 and target != 0) {
|
||||||
try renderToken(ais, tree, main_token, .space); // break keyword
|
try renderToken(ais, tree, main_token, .space); // break keyword
|
||||||
try renderToken(ais, tree, label_token - 1, .none); // colon
|
try renderToken(ais, tree, label_token - 1, .none); // colon
|
||||||
try renderToken(ais, tree, label_token, .space); // identifier
|
try renderIdentifier(ais, tree, label_token, .space, .eagerly_unquote); // identifier
|
||||||
try renderExpression(gpa, ais, tree, target, space);
|
try renderExpression(gpa, ais, tree, target, space);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -529,7 +531,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
if (label != 0) {
|
if (label != 0) {
|
||||||
try renderToken(ais, tree, main_token, .space); // continue
|
try renderToken(ais, tree, main_token, .space); // continue
|
||||||
try renderToken(ais, tree, label - 1, .none); // :
|
try renderToken(ais, tree, label - 1, .none); // :
|
||||||
return renderToken(ais, tree, label, space); // label
|
return renderIdentifier(ais, tree, label, space, .eagerly_unquote); // label
|
||||||
} else {
|
} else {
|
||||||
return renderToken(ais, tree, main_token, space); // continue
|
return renderToken(ais, tree, main_token, space); // continue
|
||||||
}
|
}
|
||||||
|
|
@ -590,7 +592,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
// There is exactly one member and no trailing comma or
|
// There is exactly one member and no trailing comma or
|
||||||
// comments, so render without surrounding spaces: `error{Foo}`
|
// comments, so render without surrounding spaces: `error{Foo}`
|
||||||
try renderToken(ais, tree, lbrace, .none);
|
try renderToken(ais, tree, lbrace, .none);
|
||||||
try renderToken(ais, tree, lbrace + 1, .none); // identifier
|
try renderIdentifier(ais, tree, lbrace + 1, .none, .eagerly_unquote); // identifier
|
||||||
return renderToken(ais, tree, rbrace, space);
|
return renderToken(ais, tree, rbrace, space);
|
||||||
} else if (token_tags[rbrace - 1] == .comma) {
|
} else if (token_tags[rbrace - 1] == .comma) {
|
||||||
// There is a trailing comma so render each member on a new line.
|
// There is a trailing comma so render each member on a new line.
|
||||||
|
|
@ -601,7 +603,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
if (i > lbrace + 1) try renderExtraNewlineToken(ais, tree, i);
|
if (i > lbrace + 1) try renderExtraNewlineToken(ais, tree, i);
|
||||||
switch (token_tags[i]) {
|
switch (token_tags[i]) {
|
||||||
.doc_comment => try renderToken(ais, tree, i, .newline),
|
.doc_comment => try renderToken(ais, tree, i, .newline),
|
||||||
.identifier => try renderToken(ais, tree, i, .comma),
|
.identifier => try renderIdentifier(ais, tree, i, .comma, .eagerly_unquote),
|
||||||
.comma => {},
|
.comma => {},
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
}
|
}
|
||||||
|
|
@ -615,7 +617,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
while (i < rbrace) : (i += 1) {
|
while (i < rbrace) : (i += 1) {
|
||||||
switch (token_tags[i]) {
|
switch (token_tags[i]) {
|
||||||
.doc_comment => unreachable, // TODO
|
.doc_comment => unreachable, // TODO
|
||||||
.identifier => try renderToken(ais, tree, i, .comma_space),
|
.identifier => try renderIdentifier(ais, tree, i, .comma_space, .eagerly_unquote),
|
||||||
.comma => {},
|
.comma => {},
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
}
|
}
|
||||||
|
|
@ -702,7 +704,7 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
|
||||||
|
|
||||||
.enum_literal => {
|
.enum_literal => {
|
||||||
try renderToken(ais, tree, main_tokens[node] - 1, .none); // .
|
try renderToken(ais, tree, main_tokens[node] - 1, .none); // .
|
||||||
return renderToken(ais, tree, main_tokens[node], space); // name
|
return renderIdentifier(ais, tree, main_tokens[node], space, .eagerly_unquote); // name
|
||||||
},
|
},
|
||||||
|
|
||||||
.fn_decl => unreachable,
|
.fn_decl => unreachable,
|
||||||
|
|
@ -887,7 +889,7 @@ fn renderAsmOutput(
|
||||||
const symbolic_name = main_tokens[asm_output];
|
const symbolic_name = main_tokens[asm_output];
|
||||||
|
|
||||||
try renderToken(ais, tree, symbolic_name - 1, .none); // lbracket
|
try renderToken(ais, tree, symbolic_name - 1, .none); // lbracket
|
||||||
try renderToken(ais, tree, symbolic_name, .none); // ident
|
try renderIdentifier(ais, tree, symbolic_name, .none, .eagerly_unquote); // ident
|
||||||
try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket
|
try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket
|
||||||
try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint"
|
try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint"
|
||||||
try renderToken(ais, tree, symbolic_name + 3, .none); // lparen
|
try renderToken(ais, tree, symbolic_name + 3, .none); // lparen
|
||||||
|
|
@ -897,7 +899,7 @@ fn renderAsmOutput(
|
||||||
try renderExpression(gpa, ais, tree, datas[asm_output].lhs, Space.none);
|
try renderExpression(gpa, ais, tree, datas[asm_output].lhs, Space.none);
|
||||||
return renderToken(ais, tree, datas[asm_output].rhs, space); // rparen
|
return renderToken(ais, tree, datas[asm_output].rhs, space); // rparen
|
||||||
} else {
|
} else {
|
||||||
try renderToken(ais, tree, symbolic_name + 4, .none); // ident
|
try renderIdentifier(ais, tree, symbolic_name + 4, .none, .eagerly_unquote); // ident
|
||||||
return renderToken(ais, tree, symbolic_name + 5, space); // rparen
|
return renderToken(ais, tree, symbolic_name + 5, space); // rparen
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -916,7 +918,7 @@ fn renderAsmInput(
|
||||||
const symbolic_name = main_tokens[asm_input];
|
const symbolic_name = main_tokens[asm_input];
|
||||||
|
|
||||||
try renderToken(ais, tree, symbolic_name - 1, .none); // lbracket
|
try renderToken(ais, tree, symbolic_name - 1, .none); // lbracket
|
||||||
try renderToken(ais, tree, symbolic_name, .none); // ident
|
try renderIdentifier(ais, tree, symbolic_name, .none, .eagerly_unquote); // ident
|
||||||
try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket
|
try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket
|
||||||
try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint"
|
try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint"
|
||||||
try renderToken(ais, tree, symbolic_name + 3, .none); // lparen
|
try renderToken(ais, tree, symbolic_name + 3, .none); // lparen
|
||||||
|
|
@ -955,7 +957,7 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
|
||||||
Space.space
|
Space.space
|
||||||
else
|
else
|
||||||
Space.none;
|
Space.none;
|
||||||
try renderToken(ais, tree, var_decl.ast.mut_token + 1, name_space); // name
|
try renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, name_space, .preserve_when_shadowing); // name
|
||||||
|
|
||||||
if (var_decl.ast.type_node != 0) {
|
if (var_decl.ast.type_node != 0) {
|
||||||
try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.space); // :
|
try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.space); // :
|
||||||
|
|
@ -1055,7 +1057,7 @@ fn renderWhile(gpa: Allocator, ais: *Ais, tree: Ast, while_node: Ast.full.While,
|
||||||
const token_tags = tree.tokens.items(.tag);
|
const token_tags = tree.tokens.items(.tag);
|
||||||
|
|
||||||
if (while_node.label_token) |label| {
|
if (while_node.label_token) |label| {
|
||||||
try renderToken(ais, tree, label, .none); // label
|
try renderIdentifier(ais, tree, label, .none, .eagerly_unquote); // label
|
||||||
try renderToken(ais, tree, label + 1, .space); // :
|
try renderToken(ais, tree, label + 1, .space); // :
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1080,11 +1082,11 @@ fn renderWhile(gpa: Allocator, ais: *Ais, tree: Ast, while_node: Ast.full.While,
|
||||||
break :blk payload_token;
|
break :blk payload_token;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
try renderToken(ais, tree, ident, .none); // identifier
|
try renderIdentifier(ais, tree, ident, .none, .preserve_when_shadowing); // identifier
|
||||||
const pipe = blk: {
|
const pipe = blk: {
|
||||||
if (token_tags[ident + 1] == .comma) {
|
if (token_tags[ident + 1] == .comma) {
|
||||||
try renderToken(ais, tree, ident + 1, .space); // ,
|
try renderToken(ais, tree, ident + 1, .space); // ,
|
||||||
try renderToken(ais, tree, ident + 2, .none); // index
|
try renderIdentifier(ais, tree, ident + 2, .none, .preserve_when_shadowing); // index
|
||||||
break :blk ident + 3;
|
break :blk ident + 3;
|
||||||
} else {
|
} else {
|
||||||
break :blk ident + 1;
|
break :blk ident + 1;
|
||||||
|
|
@ -1127,7 +1129,7 @@ fn renderWhile(gpa: Allocator, ais: *Ais, tree: Ast, while_node: Ast.full.While,
|
||||||
if (while_node.error_token) |error_token| {
|
if (while_node.error_token) |error_token| {
|
||||||
try renderToken(ais, tree, while_node.else_token, .space); // else
|
try renderToken(ais, tree, while_node.else_token, .space); // else
|
||||||
try renderToken(ais, tree, error_token - 1, .none); // |
|
try renderToken(ais, tree, error_token - 1, .none); // |
|
||||||
try renderToken(ais, tree, error_token, .none); // identifier
|
try renderIdentifier(ais, tree, error_token, .none, .preserve_when_shadowing); // identifier
|
||||||
last_else_token = error_token + 1; // |
|
last_else_token = error_token + 1; // |
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1163,10 +1165,10 @@ fn renderContainerField(
|
||||||
try renderToken(ais, tree, t, .space); // comptime
|
try renderToken(ais, tree, t, .space); // comptime
|
||||||
}
|
}
|
||||||
if (field.ast.type_expr == 0 and field.ast.value_expr == 0) {
|
if (field.ast.type_expr == 0 and field.ast.value_expr == 0) {
|
||||||
return renderTokenComma(ais, tree, field.ast.name_token, space); // name
|
return renderIdentifierComma(ais, tree, field.ast.name_token, space, .eagerly_unquote); // name
|
||||||
}
|
}
|
||||||
if (field.ast.type_expr != 0 and field.ast.value_expr == 0) {
|
if (field.ast.type_expr != 0 and field.ast.value_expr == 0) {
|
||||||
try renderToken(ais, tree, field.ast.name_token, .none); // name
|
try renderIdentifier(ais, tree, field.ast.name_token, .none, .eagerly_unquote); // name
|
||||||
try renderToken(ais, tree, field.ast.name_token + 1, .space); // :
|
try renderToken(ais, tree, field.ast.name_token + 1, .space); // :
|
||||||
|
|
||||||
if (field.ast.align_expr != 0) {
|
if (field.ast.align_expr != 0) {
|
||||||
|
|
@ -1182,12 +1184,12 @@ fn renderContainerField(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (field.ast.type_expr == 0 and field.ast.value_expr != 0) {
|
if (field.ast.type_expr == 0 and field.ast.value_expr != 0) {
|
||||||
try renderToken(ais, tree, field.ast.name_token, .space); // name
|
try renderIdentifier(ais, tree, field.ast.name_token, .space, .eagerly_unquote); // name
|
||||||
try renderToken(ais, tree, field.ast.name_token + 1, .space); // =
|
try renderToken(ais, tree, field.ast.name_token + 1, .space); // =
|
||||||
return renderExpressionComma(gpa, ais, tree, field.ast.value_expr, space); // value
|
return renderExpressionComma(gpa, ais, tree, field.ast.value_expr, space); // value
|
||||||
}
|
}
|
||||||
|
|
||||||
try renderToken(ais, tree, field.ast.name_token, .none); // name
|
try renderIdentifier(ais, tree, field.ast.name_token, .none, .eagerly_unquote); // name
|
||||||
try renderToken(ais, tree, field.ast.name_token + 1, .space); // :
|
try renderToken(ais, tree, field.ast.name_token + 1, .space); // :
|
||||||
try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type
|
try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type
|
||||||
|
|
||||||
|
|
@ -1294,7 +1296,7 @@ fn renderFnProto(gpa: Allocator, ais: *Ais, tree: Ast, fn_proto: Ast.full.FnProt
|
||||||
const after_fn_token = fn_proto.ast.fn_token + 1;
|
const after_fn_token = fn_proto.ast.fn_token + 1;
|
||||||
const lparen = if (token_tags[after_fn_token] == .identifier) blk: {
|
const lparen = if (token_tags[after_fn_token] == .identifier) blk: {
|
||||||
try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn
|
try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn
|
||||||
try renderToken(ais, tree, after_fn_token, .none); // name
|
try renderIdentifier(ais, tree, after_fn_token, .none, .preserve_when_shadowing); // name
|
||||||
break :blk after_fn_token + 1;
|
break :blk after_fn_token + 1;
|
||||||
} else blk: {
|
} else blk: {
|
||||||
try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn
|
try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn
|
||||||
|
|
@ -1383,7 +1385,7 @@ fn renderFnProto(gpa: Allocator, ais: *Ais, tree: Ast, fn_proto: Ast.full.FnProt
|
||||||
if (token_tags[last_param_token] == .identifier and
|
if (token_tags[last_param_token] == .identifier and
|
||||||
token_tags[last_param_token + 1] == .colon)
|
token_tags[last_param_token + 1] == .colon)
|
||||||
{
|
{
|
||||||
try renderToken(ais, tree, last_param_token, .none); // name
|
try renderIdentifier(ais, tree, last_param_token, .none, .preserve_when_shadowing); // name
|
||||||
last_param_token += 1;
|
last_param_token += 1;
|
||||||
try renderToken(ais, tree, last_param_token, .space); // :
|
try renderToken(ais, tree, last_param_token, .space); // :
|
||||||
last_param_token += 1;
|
last_param_token += 1;
|
||||||
|
|
@ -1432,7 +1434,7 @@ fn renderFnProto(gpa: Allocator, ais: *Ais, tree: Ast, fn_proto: Ast.full.FnProt
|
||||||
if (token_tags[last_param_token] == .identifier and
|
if (token_tags[last_param_token] == .identifier and
|
||||||
token_tags[last_param_token + 1] == .colon)
|
token_tags[last_param_token + 1] == .colon)
|
||||||
{
|
{
|
||||||
try renderToken(ais, tree, last_param_token, .none); // name
|
try renderIdentifier(ais, tree, last_param_token, .none, .preserve_when_shadowing); // name
|
||||||
last_param_token += 1;
|
last_param_token += 1;
|
||||||
try renderToken(ais, tree, last_param_token, .space); // :
|
try renderToken(ais, tree, last_param_token, .space); // :
|
||||||
last_param_token += 1;
|
last_param_token += 1;
|
||||||
|
|
@ -1545,7 +1547,7 @@ fn renderSwitchCase(
|
||||||
else
|
else
|
||||||
Space.space;
|
Space.space;
|
||||||
const after_arrow_space: Space = if (switch_case.payload_token == null) pre_target_space else .space;
|
const after_arrow_space: Space = if (switch_case.payload_token == null) pre_target_space else .space;
|
||||||
try renderToken(ais, tree, switch_case.ast.arrow_token, after_arrow_space);
|
try renderToken(ais, tree, switch_case.ast.arrow_token, after_arrow_space); // =>
|
||||||
|
|
||||||
if (switch_case.payload_token) |payload_token| {
|
if (switch_case.payload_token) |payload_token| {
|
||||||
try renderToken(ais, tree, payload_token - 1, .none); // pipe
|
try renderToken(ais, tree, payload_token - 1, .none); // pipe
|
||||||
|
|
@ -1553,10 +1555,10 @@ fn renderSwitchCase(
|
||||||
if (token_tags[payload_token] == .asterisk) {
|
if (token_tags[payload_token] == .asterisk) {
|
||||||
try renderToken(ais, tree, payload_token, .none); // asterisk
|
try renderToken(ais, tree, payload_token, .none); // asterisk
|
||||||
}
|
}
|
||||||
try renderToken(ais, tree, ident, .none); // identifier
|
try renderIdentifier(ais, tree, ident, .none, .preserve_when_shadowing); // identifier
|
||||||
if (token_tags[ident + 1] == .comma) {
|
if (token_tags[ident + 1] == .comma) {
|
||||||
try renderToken(ais, tree, ident + 1, .space); // ,
|
try renderToken(ais, tree, ident + 1, .space); // ,
|
||||||
try renderToken(ais, tree, ident + 2, .none); // identifier
|
try renderIdentifier(ais, tree, ident + 2, .none, .preserve_when_shadowing); // identifier
|
||||||
try renderToken(ais, tree, ident + 3, pre_target_space); // pipe
|
try renderToken(ais, tree, ident + 3, pre_target_space); // pipe
|
||||||
} else {
|
} else {
|
||||||
try renderToken(ais, tree, ident + 1, pre_target_space); // pipe
|
try renderToken(ais, tree, ident + 1, pre_target_space); // pipe
|
||||||
|
|
@ -1581,8 +1583,8 @@ fn renderBlock(
|
||||||
if (token_tags[lbrace - 1] == .colon and
|
if (token_tags[lbrace - 1] == .colon and
|
||||||
token_tags[lbrace - 2] == .identifier)
|
token_tags[lbrace - 2] == .identifier)
|
||||||
{
|
{
|
||||||
try renderToken(ais, tree, lbrace - 2, .none);
|
try renderIdentifier(ais, tree, lbrace - 2, .none, .eagerly_unquote); // identifier
|
||||||
try renderToken(ais, tree, lbrace - 1, .space);
|
try renderToken(ais, tree, lbrace - 1, .space); // :
|
||||||
}
|
}
|
||||||
|
|
||||||
ais.pushIndentNextLine();
|
ais.pushIndentNextLine();
|
||||||
|
|
@ -1635,7 +1637,7 @@ fn renderStructInit(
|
||||||
try renderToken(ais, tree, struct_init.ast.lbrace, .newline);
|
try renderToken(ais, tree, struct_init.ast.lbrace, .newline);
|
||||||
|
|
||||||
try renderToken(ais, tree, struct_init.ast.lbrace + 1, .none); // .
|
try renderToken(ais, tree, struct_init.ast.lbrace + 1, .none); // .
|
||||||
try renderToken(ais, tree, struct_init.ast.lbrace + 2, .space); // name
|
try renderIdentifier(ais, tree, struct_init.ast.lbrace + 2, .space, .eagerly_unquote); // name
|
||||||
try renderToken(ais, tree, struct_init.ast.lbrace + 3, .space); // =
|
try renderToken(ais, tree, struct_init.ast.lbrace + 3, .space); // =
|
||||||
try renderExpression(gpa, ais, tree, struct_init.ast.fields[0], .comma);
|
try renderExpression(gpa, ais, tree, struct_init.ast.fields[0], .comma);
|
||||||
|
|
||||||
|
|
@ -1643,7 +1645,7 @@ fn renderStructInit(
|
||||||
const init_token = tree.firstToken(field_init);
|
const init_token = tree.firstToken(field_init);
|
||||||
try renderExtraNewlineToken(ais, tree, init_token - 3);
|
try renderExtraNewlineToken(ais, tree, init_token - 3);
|
||||||
try renderToken(ais, tree, init_token - 3, .none); // .
|
try renderToken(ais, tree, init_token - 3, .none); // .
|
||||||
try renderToken(ais, tree, init_token - 2, .space); // name
|
try renderIdentifier(ais, tree, init_token - 2, .space, .eagerly_unquote); // name
|
||||||
try renderToken(ais, tree, init_token - 1, .space); // =
|
try renderToken(ais, tree, init_token - 1, .space); // =
|
||||||
try renderExpression(gpa, ais, tree, field_init, .comma);
|
try renderExpression(gpa, ais, tree, field_init, .comma);
|
||||||
}
|
}
|
||||||
|
|
@ -1656,7 +1658,7 @@ fn renderStructInit(
|
||||||
for (struct_init.ast.fields) |field_init| {
|
for (struct_init.ast.fields) |field_init| {
|
||||||
const init_token = tree.firstToken(field_init);
|
const init_token = tree.firstToken(field_init);
|
||||||
try renderToken(ais, tree, init_token - 3, .none); // .
|
try renderToken(ais, tree, init_token - 3, .none); // .
|
||||||
try renderToken(ais, tree, init_token - 2, .space); // name
|
try renderIdentifier(ais, tree, init_token - 2, .space, .eagerly_unquote); // name
|
||||||
try renderToken(ais, tree, init_token - 1, .space); // =
|
try renderToken(ais, tree, init_token - 1, .space); // =
|
||||||
try renderExpression(gpa, ais, tree, field_init, .comma_space);
|
try renderExpression(gpa, ais, tree, field_init, .comma_space);
|
||||||
}
|
}
|
||||||
|
|
@ -2310,6 +2312,19 @@ fn renderTokenComma(ais: *Ais, tree: Ast, token: Ast.TokenIndex, space: Space) E
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Render an identifier, and the comma that follows it, if it is present in the source.
|
||||||
|
/// If a comma is present, and `space` is `Space.comma`, render only a single comma.
|
||||||
|
fn renderIdentifierComma(ais: *Ais, tree: Ast, token: Ast.TokenIndex, space: Space, quote: QuoteBehavior) Error!void {
|
||||||
|
const token_tags = tree.tokens.items(.tag);
|
||||||
|
const maybe_comma = token + 1;
|
||||||
|
if (token_tags[maybe_comma] == .comma and space != .comma) {
|
||||||
|
try renderIdentifier(ais, tree, token, .none, quote);
|
||||||
|
return renderToken(ais, tree, maybe_comma, space);
|
||||||
|
} else {
|
||||||
|
return renderIdentifier(ais, tree, token, space, quote);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const Space = enum {
|
const Space = enum {
|
||||||
/// Output the token lexeme only.
|
/// Output the token lexeme only.
|
||||||
none,
|
none,
|
||||||
|
|
@ -2377,6 +2392,158 @@ fn renderSpace(ais: *Ais, tree: Ast, token_index: Ast.TokenIndex, lexeme_len: us
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const QuoteBehavior = enum {
|
||||||
|
preserve_when_shadowing,
|
||||||
|
eagerly_unquote,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn renderIdentifier(ais: *Ais, tree: Ast, token_index: Ast.TokenIndex, space: Space, quote: QuoteBehavior) Error!void {
|
||||||
|
const token_tags = tree.tokens.items(.tag);
|
||||||
|
assert(token_tags[token_index] == .identifier);
|
||||||
|
const lexeme = tokenSliceForRender(tree, token_index);
|
||||||
|
if (lexeme[0] != '@') {
|
||||||
|
return renderToken(ais, tree, token_index, space);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(lexeme.len >= 3);
|
||||||
|
assert(lexeme[0] == '@');
|
||||||
|
assert(lexeme[1] == '\"');
|
||||||
|
assert(lexeme[lexeme.len - 1] == '\"');
|
||||||
|
const contents = lexeme[2 .. lexeme.len - 1]; // inside the @"" quotation
|
||||||
|
|
||||||
|
// Empty name can't be unquoted.
|
||||||
|
if (contents.len == 0) {
|
||||||
|
return renderQuotedIdentifier(ais, tree, token_index, space, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Special case for _ which would incorrectly be rejected by isValidId below.
|
||||||
|
if (contents.len == 1 and contents[0] == '_') switch (quote) {
|
||||||
|
.eagerly_unquote => return renderQuotedIdentifier(ais, tree, token_index, space, true),
|
||||||
|
.preserve_when_shadowing => return renderQuotedIdentifier(ais, tree, token_index, space, false),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Scan the entire name for characters that would (after un-escaping) be illegal in a symbol,
|
||||||
|
// i.e. contents don't match: [A-Za-z_][A-Za-z0-9_]*
|
||||||
|
var contents_i: usize = 0;
|
||||||
|
while (contents_i < contents.len) {
|
||||||
|
switch (contents[contents_i]) {
|
||||||
|
'0'...'9' => if (contents_i == 0) return renderQuotedIdentifier(ais, tree, token_index, space, false),
|
||||||
|
'A'...'Z', 'a'...'z', '_' => {},
|
||||||
|
'\\' => {
|
||||||
|
var esc_offset = contents_i;
|
||||||
|
const res = std.zig.string_literal.parseEscapeSequence(contents, &esc_offset);
|
||||||
|
switch (res) {
|
||||||
|
.success => |char| switch (char) {
|
||||||
|
'0'...'9' => if (contents_i == 0) return renderQuotedIdentifier(ais, tree, token_index, space, false),
|
||||||
|
'A'...'Z', 'a'...'z', '_' => {},
|
||||||
|
else => return renderQuotedIdentifier(ais, tree, token_index, space, false),
|
||||||
|
},
|
||||||
|
.failure => return renderQuotedIdentifier(ais, tree, token_index, space, false),
|
||||||
|
}
|
||||||
|
contents_i += esc_offset;
|
||||||
|
continue;
|
||||||
|
},
|
||||||
|
else => return renderQuotedIdentifier(ais, tree, token_index, space, false),
|
||||||
|
}
|
||||||
|
contents_i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read enough of the name (while un-escaping) to determine if it's a keyword or primitive.
|
||||||
|
// If it's too long to fit in this buffer, we know it's neither and quoting is unnecessary.
|
||||||
|
// If we read the whole thing, we have to do further checks.
|
||||||
|
const longest_keyword_or_primitive_len = comptime blk: {
|
||||||
|
var longest = 0;
|
||||||
|
for (primitives.names.kvs) |kv| {
|
||||||
|
if (kv.key.len > longest) longest = kv.key.len;
|
||||||
|
}
|
||||||
|
for (std.zig.Token.keywords.kvs) |kv| {
|
||||||
|
if (kv.key.len > longest) longest = kv.key.len;
|
||||||
|
}
|
||||||
|
break :blk longest;
|
||||||
|
};
|
||||||
|
var buf: [longest_keyword_or_primitive_len]u8 = undefined;
|
||||||
|
|
||||||
|
contents_i = 0;
|
||||||
|
var buf_i: usize = 0;
|
||||||
|
while (contents_i < contents.len and buf_i < longest_keyword_or_primitive_len) {
|
||||||
|
if (contents[contents_i] == '\\') {
|
||||||
|
const res = std.zig.string_literal.parseEscapeSequence(contents, &contents_i).success;
|
||||||
|
buf[buf_i] = @intCast(u8, res);
|
||||||
|
buf_i += 1;
|
||||||
|
} else {
|
||||||
|
buf[buf_i] = contents[contents_i];
|
||||||
|
contents_i += 1;
|
||||||
|
buf_i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// We read the whole thing, so it could be a keyword or primitive.
|
||||||
|
if (contents_i == contents.len) {
|
||||||
|
if (!std.zig.isValidId(buf[0..buf_i])) {
|
||||||
|
return renderQuotedIdentifier(ais, tree, token_index, space, false);
|
||||||
|
}
|
||||||
|
if (primitives.isPrimitive(buf[0..buf_i])) switch (quote) {
|
||||||
|
.eagerly_unquote => return renderQuotedIdentifier(ais, tree, token_index, space, true),
|
||||||
|
.preserve_when_shadowing => return renderQuotedIdentifier(ais, tree, token_index, space, false),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try renderQuotedIdentifier(ais, tree, token_index, space, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Renders a @"" quoted identifier, normalizing escapes.
|
||||||
|
// Unnecessary escapes are un-escaped, and \u escapes are normalized to \x when they fit.
|
||||||
|
// If unquote is true, the @"" is removed and the result is a bare symbol whose validity is asserted.
|
||||||
|
fn renderQuotedIdentifier(ais: *Ais, tree: Ast, token_index: Ast.TokenIndex, space: Space, comptime unquote: bool) !void {
|
||||||
|
const token_tags = tree.tokens.items(.tag);
|
||||||
|
assert(token_tags[token_index] == .identifier);
|
||||||
|
const lexeme = tokenSliceForRender(tree, token_index);
|
||||||
|
assert(lexeme.len >= 3 and lexeme[0] == '@');
|
||||||
|
|
||||||
|
if (!unquote) try ais.writer().writeAll("@\"");
|
||||||
|
const contents = lexeme[2 .. lexeme.len - 1];
|
||||||
|
try renderIdentifierContents(ais.writer(), contents);
|
||||||
|
if (!unquote) try ais.writer().writeByte('\"');
|
||||||
|
|
||||||
|
try renderSpace(ais, tree, token_index, lexeme.len, space);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void {
|
||||||
|
var pos: usize = 0;
|
||||||
|
while (pos < bytes.len) {
|
||||||
|
const byte = bytes[pos];
|
||||||
|
switch (byte) {
|
||||||
|
'\\' => {
|
||||||
|
const old_pos = pos;
|
||||||
|
const res = std.zig.string_literal.parseEscapeSequence(bytes, &pos);
|
||||||
|
const escape_sequence = bytes[old_pos..pos];
|
||||||
|
switch (res) {
|
||||||
|
.success => |codepoint| {
|
||||||
|
if (codepoint <= 0x7f) {
|
||||||
|
const buf = [1]u8{@intCast(u8, codepoint)};
|
||||||
|
try std.fmt.format(writer, "{}", .{std.zig.fmtEscapes(&buf)});
|
||||||
|
} else {
|
||||||
|
try writer.writeAll(escape_sequence);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.failure => {
|
||||||
|
try writer.writeAll(escape_sequence);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
0x00...('\\' - 1), ('\\' + 1)...0x7f => {
|
||||||
|
const buf = [1]u8{@intCast(u8, byte)};
|
||||||
|
try std.fmt.format(writer, "{}", .{std.zig.fmtEscapes(&buf)});
|
||||||
|
pos += 1;
|
||||||
|
},
|
||||||
|
0x80...0xff => {
|
||||||
|
try writer.writeByte(byte);
|
||||||
|
pos += 1;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if there exists a line comment between any of the tokens from
|
/// Returns true if there exists a line comment between any of the tokens from
|
||||||
/// `start_token` to `end_token`. This is used to determine if e.g. a
|
/// `start_token` to `end_token`. This is used to determine if e.g. a
|
||||||
/// fn_proto should be wrapped and have a trailing comma inserted even if
|
/// fn_proto should be wrapped and have a trailing comma inserted even if
|
||||||
|
|
|
||||||
|
|
@ -63,7 +63,7 @@ pub fn parseCharLiteral(slice: []const u8) ParsedCharLiteral {
|
||||||
|
|
||||||
/// Parse an escape sequence from `slice[offset..]`. If parsing is successful,
|
/// Parse an escape sequence from `slice[offset..]`. If parsing is successful,
|
||||||
/// offset is updated to reflect the characters consumed.
|
/// offset is updated to reflect the characters consumed.
|
||||||
fn parseEscapeSequence(slice: []const u8, offset: *usize) ParsedCharLiteral {
|
pub fn parseEscapeSequence(slice: []const u8, offset: *usize) ParsedCharLiteral {
|
||||||
assert(slice.len > offset.*);
|
assert(slice.len > offset.*);
|
||||||
assert(slice[offset.*] == '\\');
|
assert(slice[offset.*] == '\\');
|
||||||
|
|
||||||
|
|
@ -274,12 +274,50 @@ pub fn parseAlloc(allocator: std.mem.Allocator, bytes: []const u8) ParseError![]
|
||||||
var buf = std.ArrayList(u8).init(allocator);
|
var buf = std.ArrayList(u8).init(allocator);
|
||||||
defer buf.deinit();
|
defer buf.deinit();
|
||||||
|
|
||||||
switch (try parseAppend(&buf, bytes)) {
|
switch (try parseWrite(buf.writer(), bytes)) {
|
||||||
.success => return buf.toOwnedSlice(),
|
.success => return buf.toOwnedSlice(),
|
||||||
.failure => return error.InvalidLiteral,
|
.failure => return error.InvalidLiteral,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parses `bytes` as a Zig string literal and writes the result to the std.io.Writer type.
|
||||||
|
/// Asserts `bytes` has '"' at beginning and end.
|
||||||
|
pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result {
|
||||||
|
assert(bytes.len >= 2 and bytes[0] == '"' and bytes[bytes.len - 1] == '"');
|
||||||
|
|
||||||
|
var index: usize = 1;
|
||||||
|
while (true) {
|
||||||
|
const b = bytes[index];
|
||||||
|
|
||||||
|
switch (b) {
|
||||||
|
'\\' => {
|
||||||
|
const escape_char_index = index + 1;
|
||||||
|
const result = parseEscapeSequence(bytes, &index);
|
||||||
|
switch (result) {
|
||||||
|
.success => |codepoint| {
|
||||||
|
if (bytes[escape_char_index] == 'u') {
|
||||||
|
var buf: [3]u8 = undefined;
|
||||||
|
const len = utf8Encode(codepoint, &buf) catch {
|
||||||
|
return Result{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } };
|
||||||
|
};
|
||||||
|
try writer.writeAll(buf[0..len]);
|
||||||
|
} else {
|
||||||
|
try writer.writeByte(@intCast(u8, codepoint));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.failure => |err| return Result{ .failure = err },
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'\n' => return Result{ .failure = .{ .invalid_character = index } },
|
||||||
|
'"' => return Result.success,
|
||||||
|
else => {
|
||||||
|
try writer.writeByte(b);
|
||||||
|
index += 1;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else unreachable; // TODO should not need else unreachable on while(true)
|
||||||
|
}
|
||||||
|
|
||||||
test "parse" {
|
test "parse" {
|
||||||
const expect = std.testing.expect;
|
const expect = std.testing.expect;
|
||||||
const expectError = std.testing.expectError;
|
const expectError = std.testing.expectError;
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,8 @@ const ArrayListUnmanaged = std.ArrayListUnmanaged;
|
||||||
const StringIndexAdapter = std.hash_map.StringIndexAdapter;
|
const StringIndexAdapter = std.hash_map.StringIndexAdapter;
|
||||||
const StringIndexContext = std.hash_map.StringIndexContext;
|
const StringIndexContext = std.hash_map.StringIndexContext;
|
||||||
|
|
||||||
|
const isPrimitive = std.zig.primitives.isPrimitive;
|
||||||
|
|
||||||
const Zir = @import("Zir.zig");
|
const Zir = @import("Zir.zig");
|
||||||
const refToIndex = Zir.refToIndex;
|
const refToIndex = Zir.refToIndex;
|
||||||
const indexToRef = Zir.indexToRef;
|
const indexToRef = Zir.indexToRef;
|
||||||
|
|
@ -4237,33 +4239,7 @@ fn testDecl(
|
||||||
|
|
||||||
// if not @"" syntax, just use raw token slice
|
// if not @"" syntax, just use raw token slice
|
||||||
if (ident_name_raw[0] != '@') {
|
if (ident_name_raw[0] != '@') {
|
||||||
if (primitives.get(ident_name_raw)) |_| return astgen.failTok(test_name_token, "cannot test a primitive", .{});
|
if (isPrimitive(ident_name_raw)) return astgen.failTok(test_name_token, "cannot test a primitive", .{});
|
||||||
|
|
||||||
if (ident_name_raw.len >= 2) integer: {
|
|
||||||
const first_c = ident_name_raw[0];
|
|
||||||
if (first_c == 'i' or first_c == 'u') {
|
|
||||||
_ = switch (first_c == 'i') {
|
|
||||||
true => .signed,
|
|
||||||
false => .unsigned,
|
|
||||||
};
|
|
||||||
if (ident_name_raw.len >= 3 and ident_name_raw[1] == '0') {
|
|
||||||
return astgen.failTok(
|
|
||||||
test_name_token,
|
|
||||||
"primitive integer type '{s}' has leading zero",
|
|
||||||
.{ident_name_raw},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ = parseBitCount(ident_name_raw[1..]) catch |err| switch (err) {
|
|
||||||
error.Overflow => return astgen.failTok(
|
|
||||||
test_name_token,
|
|
||||||
"primitive integer type '{s}' exceeds maximum bit width of 65535",
|
|
||||||
.{ident_name_raw},
|
|
||||||
),
|
|
||||||
error.InvalidCharacter => break :integer,
|
|
||||||
};
|
|
||||||
return astgen.failTok(test_name_token, "cannot test a primitive", .{});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Local variables, including function parameters.
|
// Local variables, including function parameters.
|
||||||
|
|
@ -7108,7 +7084,7 @@ fn identifier(
|
||||||
|
|
||||||
// if not @"" syntax, just use raw token slice
|
// if not @"" syntax, just use raw token slice
|
||||||
if (ident_name_raw[0] != '@') {
|
if (ident_name_raw[0] != '@') {
|
||||||
if (primitives.get(ident_name_raw)) |zir_const_ref| {
|
if (primitive_instrs.get(ident_name_raw)) |zir_const_ref| {
|
||||||
return rvalue(gz, ri, zir_const_ref, ident);
|
return rvalue(gz, ri, zir_const_ref, ident);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -8751,7 +8727,7 @@ fn calleeExpr(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const primitives = std.ComptimeStringMap(Zir.Inst.Ref, .{
|
const primitive_instrs = std.ComptimeStringMap(Zir.Inst.Ref, .{
|
||||||
.{ "anyerror", .anyerror_type },
|
.{ "anyerror", .anyerror_type },
|
||||||
.{ "anyframe", .anyframe_type },
|
.{ "anyframe", .anyframe_type },
|
||||||
.{ "anyopaque", .anyopaque_type },
|
.{ "anyopaque", .anyopaque_type },
|
||||||
|
|
@ -8795,6 +8771,21 @@ const primitives = std.ComptimeStringMap(Zir.Inst.Ref, .{
|
||||||
.{ "void", .void_type },
|
.{ "void", .void_type },
|
||||||
});
|
});
|
||||||
|
|
||||||
|
comptime {
|
||||||
|
// These checks ensure that std.zig.primitives stays in synce with the primitive->Zir map.
|
||||||
|
const primitives = std.zig.primitives;
|
||||||
|
for (primitive_instrs.kvs) |kv| {
|
||||||
|
if (!primitives.isPrimitive(kv.key)) {
|
||||||
|
@compileError("std.zig.isPrimitive() is not aware of Zir instr '" ++ @tagName(kv.value) ++ "'");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (primitives.names.kvs) |kv| {
|
||||||
|
if (primitive_instrs.get(kv.key) == null) {
|
||||||
|
@compileError("std.zig.primitives entry '" ++ kv.key ++ "' does not have a corresponding Zir instr");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn nodeMayNeedMemoryLocation(tree: *const Ast, start_node: Ast.Node.Index, have_res_ty: bool) bool {
|
fn nodeMayNeedMemoryLocation(tree: *const Ast, start_node: Ast.Node.Index, have_res_ty: bool) bool {
|
||||||
const node_tags = tree.nodes.items(.tag);
|
const node_tags = tree.nodes.items(.tag);
|
||||||
const node_datas = tree.nodes.items(.data);
|
const node_datas = tree.nodes.items(.data);
|
||||||
|
|
@ -9458,7 +9449,7 @@ fn nodeImpliesMoreThanOnePossibleValue(tree: *const Ast, start_node: Ast.Node.In
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const main_tokens = tree.nodes.items(.main_token);
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
const ident_bytes = tree.tokenSlice(main_tokens[node]);
|
const ident_bytes = tree.tokenSlice(main_tokens[node]);
|
||||||
if (primitives.get(ident_bytes)) |primitive| switch (primitive) {
|
if (primitive_instrs.get(ident_bytes)) |primitive| switch (primitive) {
|
||||||
.anyerror_type,
|
.anyerror_type,
|
||||||
.anyframe_type,
|
.anyframe_type,
|
||||||
.anyopaque_type,
|
.anyopaque_type,
|
||||||
|
|
@ -9702,7 +9693,7 @@ fn nodeImpliesComptimeOnly(tree: *const Ast, start_node: Ast.Node.Index) bool {
|
||||||
.identifier => {
|
.identifier => {
|
||||||
const main_tokens = tree.nodes.items(.main_token);
|
const main_tokens = tree.nodes.items(.main_token);
|
||||||
const ident_bytes = tree.tokenSlice(main_tokens[node]);
|
const ident_bytes = tree.tokenSlice(main_tokens[node]);
|
||||||
if (primitives.get(ident_bytes)) |primitive| switch (primitive) {
|
if (primitive_instrs.get(ident_bytes)) |primitive| switch (primitive) {
|
||||||
.anyerror_type,
|
.anyerror_type,
|
||||||
.anyframe_type,
|
.anyframe_type,
|
||||||
.anyopaque_type,
|
.anyopaque_type,
|
||||||
|
|
@ -12045,19 +12036,6 @@ fn nullTerminatedString(astgen: AstGen, index: usize) [*:0]const u8 {
|
||||||
return @ptrCast([*:0]const u8, astgen.string_bytes.items.ptr) + index;
|
return @ptrCast([*:0]const u8, astgen.string_bytes.items.ptr) + index;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn isPrimitive(name: []const u8) bool {
|
|
||||||
if (primitives.get(name) != null) return true;
|
|
||||||
if (name.len < 2) return false;
|
|
||||||
const first_c = name[0];
|
|
||||||
if (first_c != 'i' and first_c != 'u') return false;
|
|
||||||
if (parseBitCount(name[1..])) |_| {
|
|
||||||
return true;
|
|
||||||
} else |err| switch (err) {
|
|
||||||
error.Overflow => return true,
|
|
||||||
error.InvalidCharacter => return false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Local variables shadowing detection, including function parameters.
|
/// Local variables shadowing detection, including function parameters.
|
||||||
fn detectLocalShadowing(
|
fn detectLocalShadowing(
|
||||||
astgen: *AstGen,
|
astgen: *AstGen,
|
||||||
|
|
|
||||||
|
|
@ -3879,6 +3879,27 @@ static Stage1ZirInst *astgen_identifier(Stage1AstGen *ag, Scope *scope, AstNode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
Stage1ZirInst *value = nullptr;
|
||||||
|
if (buf_eql_str(variable_name, "null")) {
|
||||||
|
value = ir_build_const_null(ag, scope, node);
|
||||||
|
} else if (buf_eql_str(variable_name, "true")) {
|
||||||
|
value = ir_build_const_bool(ag, scope, node, true);
|
||||||
|
} else if (buf_eql_str(variable_name, "false")) {
|
||||||
|
value = ir_build_const_bool(ag, scope, node, false);
|
||||||
|
} else if (buf_eql_str(variable_name, "undefined")) {
|
||||||
|
value = ir_build_const_undefined(ag, scope, node);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value != nullptr) {
|
||||||
|
if (lval == LValPtr || lval == LValAssign) {
|
||||||
|
return ir_build_ref_src(ag, scope, node, value);
|
||||||
|
} else {
|
||||||
|
return ir_expr_wrap(ag, scope, value, result_loc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ZigType *primitive_type;
|
ZigType *primitive_type;
|
||||||
if ((err = get_primitive_type(ag->codegen, variable_name, &primitive_type))) {
|
if ((err = get_primitive_type(ag->codegen, variable_name, &primitive_type))) {
|
||||||
if (err == ErrorOverflow) {
|
if (err == ErrorOverflow) {
|
||||||
|
|
|
||||||
|
|
@ -1617,11 +1617,7 @@ static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
|
||||||
// / INTEGER
|
// / INTEGER
|
||||||
// / KEYWORD_comptime TypeExpr
|
// / KEYWORD_comptime TypeExpr
|
||||||
// / KEYWORD_error DOT IDENTIFIER
|
// / KEYWORD_error DOT IDENTIFIER
|
||||||
// / KEYWORD_false
|
|
||||||
// / KEYWORD_null
|
|
||||||
// / KEYWORD_promise
|
// / KEYWORD_promise
|
||||||
// / KEYWORD_true
|
|
||||||
// / KEYWORD_undefined
|
|
||||||
// / KEYWORD_unreachable
|
// / KEYWORD_unreachable
|
||||||
// / STRINGLITERAL
|
// / STRINGLITERAL
|
||||||
// / SwitchExpr
|
// / SwitchExpr
|
||||||
|
|
|
||||||
|
|
@ -827,7 +827,7 @@ const Context = struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex {
|
fn addIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex {
|
||||||
if (@import("../AstGen.zig").isPrimitive(bytes))
|
if (std.zig.primitives.isPrimitive(bytes))
|
||||||
return c.addTokenFmt(.identifier, "@\"{s}\"", .{bytes});
|
return c.addTokenFmt(.identifier, "@\"{s}\"", .{bytes});
|
||||||
return c.addTokenFmt(.identifier, "{s}", .{std.zig.fmtId(bytes)});
|
return c.addTokenFmt(.identifier, "{s}", .{std.zig.fmtId(bytes)});
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue