tools: Unbreak many tools

Many tools were broken after the recent hash-table refactorings, fix
them and ensure they won't silently break again.
This commit is contained in:
LemonBoy 2021-06-13 20:36:51 +02:00 committed by Andrew Kelley
parent 37f36da391
commit ff79b87fa0
5 changed files with 46 additions and 30 deletions

View file

@ -32,4 +32,12 @@ pub fn addCases(cases: *tests.StandaloneContext) void {
cases.addBuildFile("test/stage1/c_abi/build.zig", .{}); cases.addBuildFile("test/stage1/c_abi/build.zig", .{});
} }
cases.addBuildFile("test/standalone/c_compiler/build.zig", .{ .build_modes = true, .cross_targets = true }); cases.addBuildFile("test/standalone/c_compiler/build.zig", .{ .build_modes = true, .cross_targets = true });
// Ensure the development tools are buildable.
cases.add("tools/gen_spirv_spec.zig");
cases.add("tools/gen_stubs.zig");
cases.add("tools/update_clang_options.zig");
cases.add("tools/update_cpu_features.zig");
cases.add("tools/update_glibc.zig");
cases.add("tools/update_spirv_features.zig");
} }

View file

@ -14,6 +14,9 @@ pub fn main() !void {
const spec_path = args[1]; const spec_path = args[1];
const spec = try std.fs.cwd().readFileAlloc(allocator, spec_path, std.math.maxInt(usize)); const spec = try std.fs.cwd().readFileAlloc(allocator, spec_path, std.math.maxInt(usize));
// Required for json parsing.
@setEvalBranchQuota(10000);
var tokens = std.json.TokenStream.init(spec); var tokens = std.json.TokenStream.init(spec);
var registry = try std.json.parse(g.Registry, &tokens, .{ .allocator = allocator }); var registry = try std.json.parse(g.Registry, &tokens, .{ .allocator = allocator });

View file

@ -234,17 +234,19 @@ const DestTarget = struct {
os: OsTag, os: OsTag,
abi: Abi, abi: Abi,
fn hash(a: DestTarget) u32 { const HashContext = struct {
return @enumToInt(a.arch) +% pub fn hash(self: @This(), a: DestTarget) u32 {
(@enumToInt(a.os) *% @as(u32, 4202347608)) +% return @enumToInt(a.arch) +%
(@enumToInt(a.abi) *% @as(u32, 4082223418)); (@enumToInt(a.os) *% @as(u32, 4202347608)) +%
} (@enumToInt(a.abi) *% @as(u32, 4082223418));
}
fn eql(a: DestTarget, b: DestTarget) bool { pub fn eql(self: @This(), a: DestTarget, b: DestTarget) bool {
return a.arch.eql(b.arch) and return a.arch.eql(b.arch) and
a.os == b.os and a.os == b.os and
a.abi == b.abi; a.abi == b.abi;
} }
};
}; };
const Contents = struct { const Contents = struct {
@ -259,7 +261,7 @@ const Contents = struct {
}; };
const HashToContents = std.StringHashMap(Contents); const HashToContents = std.StringHashMap(Contents);
const TargetToHash = std.ArrayHashMap(DestTarget, []const u8, DestTarget.hash, DestTarget.eql, true); const TargetToHash = std.ArrayHashMap(DestTarget, []const u8, DestTarget.HashContext, true);
const PathTable = std.StringHashMap(*TargetToHash); const PathTable = std.StringHashMap(*TargetToHash);
const LibCVendor = enum { const LibCVendor = enum {
@ -423,9 +425,9 @@ pub fn main() !void {
while (path_it.next()) |path_kv| { while (path_it.next()) |path_kv| {
var contents_list = std.ArrayList(*Contents).init(allocator); var contents_list = std.ArrayList(*Contents).init(allocator);
{ {
var hash_it = path_kv.value.*.iterator(); var hash_it = path_kv.value_ptr.*.iterator();
while (hash_it.next()) |hash_kv| { while (hash_it.next()) |hash_kv| {
const contents = hash_to_contents.get(hash_kv.value.*).?; const contents = hash_to_contents.getPtr(hash_kv.value_ptr.*).?;
try contents_list.append(contents); try contents_list.append(contents);
} }
} }
@ -433,7 +435,7 @@ pub fn main() !void {
const best_contents = contents_list.popOrNull().?; const best_contents = contents_list.popOrNull().?;
if (best_contents.hit_count > 1) { if (best_contents.hit_count > 1) {
// worth it to make it generic // worth it to make it generic
const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, generic_name, path_kv.key.* }); const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, generic_name, path_kv.key_ptr.* });
try std.fs.cwd().makePath(std.fs.path.dirname(full_path).?); try std.fs.cwd().makePath(std.fs.path.dirname(full_path).?);
try std.fs.cwd().writeFile(full_path, best_contents.bytes); try std.fs.cwd().writeFile(full_path, best_contents.bytes);
best_contents.is_generic = true; best_contents.is_generic = true;
@ -443,17 +445,17 @@ pub fn main() !void {
missed_opportunity_bytes += this_missed_bytes; missed_opportunity_bytes += this_missed_bytes;
std.debug.warn("Missed opportunity ({:2}): {s}\n", .{ std.debug.warn("Missed opportunity ({:2}): {s}\n", .{
std.fmt.fmtIntSizeDec(this_missed_bytes), std.fmt.fmtIntSizeDec(this_missed_bytes),
path_kv.key.*, path_kv.key_ptr.*,
}); });
} else break; } else break;
} }
} }
var hash_it = path_kv.value.*.iterator(); var hash_it = path_kv.value_ptr.*.iterator();
while (hash_it.next()) |hash_kv| { while (hash_it.next()) |hash_kv| {
const contents = hash_to_contents.get(hash_kv.value.*).?; const contents = hash_to_contents.get(hash_kv.value_ptr.*).?;
if (contents.is_generic) continue; if (contents.is_generic) continue;
const dest_target = hash_kv.key.*; const dest_target = hash_kv.key_ptr.*;
const arch_name = switch (dest_target.arch) { const arch_name = switch (dest_target.arch) {
.specific => |a| @tagName(a), .specific => |a| @tagName(a),
else => @tagName(dest_target.arch), else => @tagName(dest_target.arch),
@ -463,7 +465,7 @@ pub fn main() !void {
@tagName(dest_target.os), @tagName(dest_target.os),
@tagName(dest_target.abi), @tagName(dest_target.abi),
}); });
const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, out_subpath, path_kv.key.* }); const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, out_subpath, path_kv.key_ptr.* });
try std.fs.cwd().makePath(std.fs.path.dirname(full_path).?); try std.fs.cwd().makePath(std.fs.path.dirname(full_path).?);
try std.fs.cwd().writeFile(full_path, contents.bytes); try std.fs.cwd().writeFile(full_path, contents.bytes);
} }

View file

@ -902,19 +902,19 @@ fn processOneTarget(job: Job) anyerror!void {
{ {
var it = root_map.iterator(); var it = root_map.iterator();
root_it: while (it.next()) |kv| { root_it: while (it.next()) |kv| {
if (kv.key.len == 0) continue; if (kv.key_ptr.len == 0) continue;
if (kv.key.*[0] == '!') continue; if (kv.key_ptr.*[0] == '!') continue;
if (kv.value.* != .Object) continue; if (kv.value_ptr.* != .Object) continue;
if (hasSuperclass(&kv.value.Object, "SubtargetFeature")) { if (hasSuperclass(&kv.value_ptr.Object, "SubtargetFeature")) {
const llvm_name = kv.value.Object.get("Name").?.String; const llvm_name = kv.value_ptr.Object.get("Name").?.String;
if (llvm_name.len == 0) continue; if (llvm_name.len == 0) continue;
var zig_name = try llvmNameToZigName(arena, llvm_name); var zig_name = try llvmNameToZigName(arena, llvm_name);
var desc = kv.value.Object.get("Desc").?.String; var desc = kv.value_ptr.Object.get("Desc").?.String;
var deps = std.ArrayList([]const u8).init(arena); var deps = std.ArrayList([]const u8).init(arena);
var omit = false; var omit = false;
var flatten = false; var flatten = false;
const implies = kv.value.Object.get("Implies").?.Array; const implies = kv.value_ptr.Object.get("Implies").?.Array;
for (implies.items) |imply| { for (implies.items) |imply| {
const other_key = imply.Object.get("def").?.String; const other_key = imply.Object.get("def").?.String;
const other_obj = &root_map.getPtr(other_key).?.Object; const other_obj = &root_map.getPtr(other_key).?.Object;
@ -960,13 +960,13 @@ fn processOneTarget(job: Job) anyerror!void {
try all_features.append(feature); try all_features.append(feature);
} }
} }
if (hasSuperclass(&kv.value.Object, "Processor")) { if (hasSuperclass(&kv.value_ptr.Object, "Processor")) {
const llvm_name = kv.value.Object.get("Name").?.String; const llvm_name = kv.value_ptr.Object.get("Name").?.String;
if (llvm_name.len == 0) continue; if (llvm_name.len == 0) continue;
var zig_name = try llvmNameToZigName(arena, llvm_name); var zig_name = try llvmNameToZigName(arena, llvm_name);
var deps = std.ArrayList([]const u8).init(arena); var deps = std.ArrayList([]const u8).init(arena);
const features = kv.value.Object.get("Features").?.Array; const features = kv.value_ptr.Object.get("Features").?.Array;
for (features.items) |feature| { for (features.items) |feature| {
const feature_key = feature.Object.get("def").?.String; const feature_key = feature.Object.get("def").?.String;
const feature_obj = &root_map.getPtr(feature_key).?.Object; const feature_obj = &root_map.getPtr(feature_key).?.Object;
@ -979,7 +979,7 @@ fn processOneTarget(job: Job) anyerror!void {
)) orelse continue; )) orelse continue;
try deps.append(feature_zig_name); try deps.append(feature_zig_name);
} }
const tune_features = kv.value.Object.get("TuneFeatures").?.Array; const tune_features = kv.value_ptr.Object.get("TuneFeatures").?.Array;
for (tune_features.items) |feature| { for (tune_features.items) |feature| {
const feature_key = feature.Object.get("def").?.String; const feature_key = feature.Object.get("def").?.String;
const feature_obj = &root_map.getPtr(feature_key).?.Object; const feature_obj = &root_map.getPtr(feature_key).?.Object;

View file

@ -68,6 +68,9 @@ pub fn main() !void {
usageAndExit(std.io.getStdErr(), args[0], 1); usageAndExit(std.io.getStdErr(), args[0], 1);
} }
// Required for json parsing.
@setEvalBranchQuota(10000);
const registry_path = try fs.path.join(allocator, &.{ spirv_headers_root, "include", "spirv", "unified1", "spirv.core.grammar.json" }); const registry_path = try fs.path.join(allocator, &.{ spirv_headers_root, "include", "spirv", "unified1", "spirv.core.grammar.json" });
const registry_json = try std.fs.cwd().readFileAlloc(allocator, registry_path, std.math.maxInt(usize)); const registry_json = try std.fs.cwd().readFileAlloc(allocator, registry_path, std.math.maxInt(usize));
var tokens = std.json.TokenStream.init(registry_json); var tokens = std.json.TokenStream.init(registry_json);