aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLemonBoy <thatlemon@gmail.com>2021-06-13 20:36:51 +0200
committerAndrew Kelley <andrew@ziglang.org>2021-06-13 15:25:18 -0400
commitff79b87fa062d602722a6ec17f74550cda42a624 (patch)
treea08a33c1f2160724fc5436ad6f527fa1c4264aad
parent37f36da391570739e639f857bef3de1170eaac50 (diff)
downloadzig-ff79b87fa062d602722a6ec17f74550cda42a624.tar.gz
zig-ff79b87fa062d602722a6ec17f74550cda42a624.zip
tools: Unbreak many tools
Many tools were broken after the recent hash-table refactorings, fix them and ensure they won't silently break again.
-rw-r--r--test/standalone.zig8
-rw-r--r--tools/gen_spirv_spec.zig3
-rw-r--r--tools/process_headers.zig40
-rw-r--r--tools/update_cpu_features.zig22
-rw-r--r--tools/update_spirv_features.zig3
5 files changed, 46 insertions, 30 deletions
diff --git a/test/standalone.zig b/test/standalone.zig
index 77f15db286..4ac046de0c 100644
--- a/test/standalone.zig
+++ b/test/standalone.zig
@@ -32,4 +32,12 @@ pub fn addCases(cases: *tests.StandaloneContext) void {
cases.addBuildFile("test/stage1/c_abi/build.zig", .{});
}
cases.addBuildFile("test/standalone/c_compiler/build.zig", .{ .build_modes = true, .cross_targets = true });
+
+ // Ensure the development tools are buildable.
+ cases.add("tools/gen_spirv_spec.zig");
+ cases.add("tools/gen_stubs.zig");
+ cases.add("tools/update_clang_options.zig");
+ cases.add("tools/update_cpu_features.zig");
+ cases.add("tools/update_glibc.zig");
+ cases.add("tools/update_spirv_features.zig");
}
diff --git a/tools/gen_spirv_spec.zig b/tools/gen_spirv_spec.zig
index f00bd1884b..0480866867 100644
--- a/tools/gen_spirv_spec.zig
+++ b/tools/gen_spirv_spec.zig
@@ -14,6 +14,9 @@ pub fn main() !void {
const spec_path = args[1];
const spec = try std.fs.cwd().readFileAlloc(allocator, spec_path, std.math.maxInt(usize));
+ // Required for json parsing.
+ @setEvalBranchQuota(10000);
+
var tokens = std.json.TokenStream.init(spec);
var registry = try std.json.parse(g.Registry, &tokens, .{ .allocator = allocator });
diff --git a/tools/process_headers.zig b/tools/process_headers.zig
index da7ab11143..c087688732 100644
--- a/tools/process_headers.zig
+++ b/tools/process_headers.zig
@@ -234,17 +234,19 @@ const DestTarget = struct {
os: OsTag,
abi: Abi,
- fn hash(a: DestTarget) u32 {
- return @enumToInt(a.arch) +%
- (@enumToInt(a.os) *% @as(u32, 4202347608)) +%
- (@enumToInt(a.abi) *% @as(u32, 4082223418));
- }
+ const HashContext = struct {
+ pub fn hash(self: @This(), a: DestTarget) u32 {
+ return @enumToInt(a.arch) +%
+ (@enumToInt(a.os) *% @as(u32, 4202347608)) +%
+ (@enumToInt(a.abi) *% @as(u32, 4082223418));
+ }
- fn eql(a: DestTarget, b: DestTarget) bool {
- return a.arch.eql(b.arch) and
- a.os == b.os and
- a.abi == b.abi;
- }
+ pub fn eql(self: @This(), a: DestTarget, b: DestTarget) bool {
+ return a.arch.eql(b.arch) and
+ a.os == b.os and
+ a.abi == b.abi;
+ }
+ };
};
const Contents = struct {
@@ -259,7 +261,7 @@ const Contents = struct {
};
const HashToContents = std.StringHashMap(Contents);
-const TargetToHash = std.ArrayHashMap(DestTarget, []const u8, DestTarget.hash, DestTarget.eql, true);
+const TargetToHash = std.ArrayHashMap(DestTarget, []const u8, DestTarget.HashContext, true);
const PathTable = std.StringHashMap(*TargetToHash);
const LibCVendor = enum {
@@ -423,9 +425,9 @@ pub fn main() !void {
while (path_it.next()) |path_kv| {
var contents_list = std.ArrayList(*Contents).init(allocator);
{
- var hash_it = path_kv.value.*.iterator();
+ var hash_it = path_kv.value_ptr.*.iterator();
while (hash_it.next()) |hash_kv| {
- const contents = hash_to_contents.get(hash_kv.value.*).?;
+ const contents = hash_to_contents.getPtr(hash_kv.value_ptr.*).?;
try contents_list.append(contents);
}
}
@@ -433,7 +435,7 @@ pub fn main() !void {
const best_contents = contents_list.popOrNull().?;
if (best_contents.hit_count > 1) {
// worth it to make it generic
- const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, generic_name, path_kv.key.* });
+ const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, generic_name, path_kv.key_ptr.* });
try std.fs.cwd().makePath(std.fs.path.dirname(full_path).?);
try std.fs.cwd().writeFile(full_path, best_contents.bytes);
best_contents.is_generic = true;
@@ -443,17 +445,17 @@ pub fn main() !void {
missed_opportunity_bytes += this_missed_bytes;
std.debug.warn("Missed opportunity ({:2}): {s}\n", .{
std.fmt.fmtIntSizeDec(this_missed_bytes),
- path_kv.key.*,
+ path_kv.key_ptr.*,
});
} else break;
}
}
- var hash_it = path_kv.value.*.iterator();
+ var hash_it = path_kv.value_ptr.*.iterator();
while (hash_it.next()) |hash_kv| {
- const contents = hash_to_contents.get(hash_kv.value.*).?;
+ const contents = hash_to_contents.get(hash_kv.value_ptr.*).?;
if (contents.is_generic) continue;
- const dest_target = hash_kv.key.*;
+ const dest_target = hash_kv.key_ptr.*;
const arch_name = switch (dest_target.arch) {
.specific => |a| @tagName(a),
else => @tagName(dest_target.arch),
@@ -463,7 +465,7 @@ pub fn main() !void {
@tagName(dest_target.os),
@tagName(dest_target.abi),
});
- const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, out_subpath, path_kv.key.* });
+ const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, out_subpath, path_kv.key_ptr.* });
try std.fs.cwd().makePath(std.fs.path.dirname(full_path).?);
try std.fs.cwd().writeFile(full_path, contents.bytes);
}
diff --git a/tools/update_cpu_features.zig b/tools/update_cpu_features.zig
index 9cda12c63a..c97b7ce02c 100644
--- a/tools/update_cpu_features.zig
+++ b/tools/update_cpu_features.zig
@@ -902,19 +902,19 @@ fn processOneTarget(job: Job) anyerror!void {
{
var it = root_map.iterator();
root_it: while (it.next()) |kv| {
- if (kv.key.len == 0) continue;
- if (kv.key.*[0] == '!') continue;
- if (kv.value.* != .Object) continue;
- if (hasSuperclass(&kv.value.Object, "SubtargetFeature")) {
- const llvm_name = kv.value.Object.get("Name").?.String;
+ if (kv.key_ptr.len == 0) continue;
+ if (kv.key_ptr.*[0] == '!') continue;
+ if (kv.value_ptr.* != .Object) continue;
+ if (hasSuperclass(&kv.value_ptr.Object, "SubtargetFeature")) {
+ const llvm_name = kv.value_ptr.Object.get("Name").?.String;
if (llvm_name.len == 0) continue;
var zig_name = try llvmNameToZigName(arena, llvm_name);
- var desc = kv.value.Object.get("Desc").?.String;
+ var desc = kv.value_ptr.Object.get("Desc").?.String;
var deps = std.ArrayList([]const u8).init(arena);
var omit = false;
var flatten = false;
- const implies = kv.value.Object.get("Implies").?.Array;
+ const implies = kv.value_ptr.Object.get("Implies").?.Array;
for (implies.items) |imply| {
const other_key = imply.Object.get("def").?.String;
const other_obj = &root_map.getPtr(other_key).?.Object;
@@ -960,13 +960,13 @@ fn processOneTarget(job: Job) anyerror!void {
try all_features.append(feature);
}
}
- if (hasSuperclass(&kv.value.Object, "Processor")) {
- const llvm_name = kv.value.Object.get("Name").?.String;
+ if (hasSuperclass(&kv.value_ptr.Object, "Processor")) {
+ const llvm_name = kv.value_ptr.Object.get("Name").?.String;
if (llvm_name.len == 0) continue;
var zig_name = try llvmNameToZigName(arena, llvm_name);
var deps = std.ArrayList([]const u8).init(arena);
- const features = kv.value.Object.get("Features").?.Array;
+ const features = kv.value_ptr.Object.get("Features").?.Array;
for (features.items) |feature| {
const feature_key = feature.Object.get("def").?.String;
const feature_obj = &root_map.getPtr(feature_key).?.Object;
@@ -979,7 +979,7 @@ fn processOneTarget(job: Job) anyerror!void {
)) orelse continue;
try deps.append(feature_zig_name);
}
- const tune_features = kv.value.Object.get("TuneFeatures").?.Array;
+ const tune_features = kv.value_ptr.Object.get("TuneFeatures").?.Array;
for (tune_features.items) |feature| {
const feature_key = feature.Object.get("def").?.String;
const feature_obj = &root_map.getPtr(feature_key).?.Object;
diff --git a/tools/update_spirv_features.zig b/tools/update_spirv_features.zig
index da1cefcd9c..5f2bab6c28 100644
--- a/tools/update_spirv_features.zig
+++ b/tools/update_spirv_features.zig
@@ -68,6 +68,9 @@ pub fn main() !void {
usageAndExit(std.io.getStdErr(), args[0], 1);
}
+ // Required for json parsing.
+ @setEvalBranchQuota(10000);
+
const registry_path = try fs.path.join(allocator, &.{ spirv_headers_root, "include", "spirv", "unified1", "spirv.core.grammar.json" });
const registry_json = try std.fs.cwd().readFileAlloc(allocator, registry_path, std.math.maxInt(usize));
var tokens = std.json.TokenStream.init(registry_json);