aboutsummaryrefslogtreecommitdiff
path: root/src/Compilation.zig
diff options
context:
space:
mode:
authorJacob Young <jacobly0@users.noreply.github.com>2024-07-10 10:04:33 -0400
committerJacob Young <jacobly0@users.noreply.github.com>2024-07-10 11:20:08 -0400
commit3d2dfbe8289c2ecb45e1ba1fe79c4d7e21dd26c3 (patch)
tree921e81c14c12e84cf4970a299b37d2a659c63c2f /src/Compilation.zig
parentf93a10f664fbbb67aeda031583a790e2a842fb01 (diff)
downloadzig-3d2dfbe8289c2ecb45e1ba1fe79c4d7e21dd26c3.tar.gz
zig-3d2dfbe8289c2ecb45e1ba1fe79c4d7e21dd26c3.zip
InternPool: add `FileIndex` to `*File` mapping
Diffstat (limited to 'src/Compilation.zig')
-rw-r--r--src/Compilation.zig41
1 files changed, 22 insertions, 19 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig
index 118e325ed7..a474d1955a 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -2119,12 +2119,14 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void {
}
if (comp.module) |zcu| {
+ const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = .main };
+
zcu.compile_log_text.shrinkAndFree(gpa, 0);
// Make sure std.zig is inside the import_table. We unconditionally need
// it for start.zig.
const std_mod = zcu.std_mod;
- _ = try zcu.importPkg(std_mod);
+ _ = try pt.importPkg(std_mod);
// Normally we rely on importing std to in turn import the root source file
// in the start code, but when using the stage1 backend that won't happen,
@@ -2133,20 +2135,19 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void {
// Likewise, in the case of `zig test`, the test runner is the root source file,
// and so there is nothing to import the main file.
if (comp.config.is_test) {
- _ = try zcu.importPkg(zcu.main_mod);
+ _ = try pt.importPkg(zcu.main_mod);
}
if (zcu.root_mod.deps.get("compiler_rt")) |compiler_rt_mod| {
- _ = try zcu.importPkg(compiler_rt_mod);
+ _ = try pt.importPkg(compiler_rt_mod);
}
// Put a work item in for every known source file to detect if
// it changed, and, if so, re-compute ZIR and then queue the job
// to update it.
try comp.astgen_work_queue.ensureUnusedCapacity(zcu.import_table.count());
- for (zcu.import_table.values(), 0..) |file, file_index_usize| {
- const file_index: Zcu.File.Index = @enumFromInt(file_index_usize);
- if (file.mod.isBuiltin()) continue;
+ for (zcu.import_table.values()) |file_index| {
+ if (zcu.fileByIndex(file_index).mod.isBuiltin()) continue;
comp.astgen_work_queue.writeItemAssumeCapacity(file_index);
}
@@ -2641,7 +2642,8 @@ fn resolveEmitLoc(
return slice.ptr;
}
-fn reportMultiModuleErrors(zcu: *Zcu) !void {
+fn reportMultiModuleErrors(pt: Zcu.PerThread) !void {
+ const zcu = pt.zcu;
const gpa = zcu.gpa;
const ip = &zcu.intern_pool;
// Some cases can give you a whole bunch of multi-module errors, which it's not helpful to
@@ -2651,14 +2653,13 @@ fn reportMultiModuleErrors(zcu: *Zcu) !void {
// Attach the "some omitted" note to the final error message
var last_err: ?*Zcu.ErrorMsg = null;
- for (zcu.import_table.values(), 0..) |file, file_index_usize| {
+ for (zcu.import_table.values()) |file_index| {
+ const file = zcu.fileByIndex(file_index);
if (!file.multi_pkg) continue;
num_errors += 1;
if (num_errors > max_errors) continue;
- const file_index: Zcu.File.Index = @enumFromInt(file_index_usize);
-
const err = err_blk: {
// Like with errors, let's cap the number of notes to prevent a huge error spew.
const max_notes = 5;
@@ -2749,8 +2750,9 @@ fn reportMultiModuleErrors(zcu: *Zcu) !void {
// to add this flag after reporting the errors however, as otherwise
// we'd get an error for every single downstream file, which wouldn't be
// very useful.
- for (zcu.import_table.values()) |file| {
- if (file.multi_pkg) file.recursiveMarkMultiPkg(zcu);
+ for (zcu.import_table.values()) |file_index| {
+ const file = zcu.fileByIndex(file_index);
+ if (file.multi_pkg) file.recursiveMarkMultiPkg(pt);
}
}
@@ -3443,11 +3445,12 @@ fn performAllTheWorkInner(
}
}
- if (comp.module) |mod| {
- try reportMultiModuleErrors(mod);
- try mod.flushRetryableFailures();
- mod.sema_prog_node = main_progress_node.start("Semantic Analysis", 0);
- mod.codegen_prog_node = main_progress_node.start("Code Generation", 0);
+ if (comp.module) |zcu| {
+ const pt: Zcu.PerThread = .{ .zcu = comp.module.?, .tid = .main };
+ try reportMultiModuleErrors(pt);
+ try zcu.flushRetryableFailures();
+ zcu.sema_prog_node = main_progress_node.start("Semantic Analysis", 0);
+ zcu.codegen_prog_node = main_progress_node.start("Code Generation", 0);
}
if (!InternPool.single_threaded) comp.thread_pool.spawnWgId(&comp.work_queue_wait_group, codegenThread, .{comp});
@@ -4189,9 +4192,9 @@ fn workerAstGenFile(
comp.mutex.lock();
defer comp.mutex.unlock();
- const res = pt.zcu.importFile(file, import_path) catch continue;
+ const res = pt.importFile(file, import_path) catch continue;
if (!res.is_pkg) {
- res.file.addReference(pt.zcu.*, .{ .import = .{
+ res.file.addReference(pt.zcu, .{ .import = .{
.file = file_index,
.token = item.data.token,
} }) catch continue;