aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authormlugg <mlugg@mlugg.co.uk>2025-04-20 02:01:46 +0100
committermlugg <mlugg@mlugg.co.uk>2025-04-20 18:11:53 +0100
commit6561a98a61bb54c9d6c868f788da3eaa6f48d2c3 (patch)
tree429a38f476c8c0ecc1bf211df83b5596f8824ba6 /src
parentf01833e03eea786a05635cfbe142f581e9281b51 (diff)
downloadzig-6561a98a61bb54c9d6c868f788da3eaa6f48d2c3.tar.gz
zig-6561a98a61bb54c9d6c868f788da3eaa6f48d2c3.zip
incremental: correctly handle dead exporters
Resolves: #23604
Diffstat (limited to 'src')
-rw-r--r--src/Zcu/PerThread.zig32
1 files changed, 29 insertions, 3 deletions
diff --git a/src/Zcu/PerThread.zig b/src/Zcu/PerThread.zig
index 095cd6c1cb..9084e8b0ca 100644
--- a/src/Zcu/PerThread.zig
+++ b/src/Zcu/PerThread.zig
@@ -2837,6 +2837,11 @@ pub fn processExports(pt: Zcu.PerThread) !void {
const zcu = pt.zcu;
const gpa = zcu.gpa;
+ if (zcu.single_exports.count() == 0 and zcu.multi_exports.count() == 0) {
+ // We can avoid a call to `resolveReferences` in this case.
+ return;
+ }
+
// First, construct a mapping of every exported value and Nav to the indices of all its different exports.
var nav_exports: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, std.ArrayListUnmanaged(Zcu.Export.Index)) = .empty;
var uav_exports: std.AutoArrayHashMapUnmanaged(InternPool.Index, std.ArrayListUnmanaged(Zcu.Export.Index)) = .empty;
@@ -2857,8 +2862,18 @@ pub fn processExports(pt: Zcu.PerThread) !void {
// So, this ensureTotalCapacity serves as a reasonable (albeit very approximate) optimization.
try nav_exports.ensureTotalCapacity(gpa, zcu.single_exports.count() + zcu.multi_exports.count());
- for (zcu.single_exports.values()) |export_idx| {
+ const unit_references = try zcu.resolveReferences();
+
+ for (zcu.single_exports.keys(), zcu.single_exports.values()) |exporter, export_idx| {
const exp = export_idx.ptr(zcu);
+ if (!unit_references.contains(exporter)) {
+ // This export might already have been sent to the linker on a previous update, in which case we need to delete it.
+ // The linker export API should be modified to eliminate this call. #23616
+ if (zcu.comp.bin_file) |lf| {
+ lf.deleteExport(exp.exported, exp.opts.name);
+ }
+ continue;
+ }
const value_ptr, const found_existing = switch (exp.exported) {
.nav => |nav| gop: {
const gop = try nav_exports.getOrPut(gpa, nav);
@@ -2873,8 +2888,19 @@ pub fn processExports(pt: Zcu.PerThread) !void {
try value_ptr.append(gpa, export_idx);
}
- for (zcu.multi_exports.values()) |info| {
- for (zcu.all_exports.items[info.index..][0..info.len], info.index..) |exp, export_idx| {
+ for (zcu.multi_exports.keys(), zcu.multi_exports.values()) |exporter, info| {
+ const exports = zcu.all_exports.items[info.index..][0..info.len];
+ if (!unit_references.contains(exporter)) {
+ // This export might already have been sent to the linker on a previous update, in which case we need to delete it.
+ // The linker export API should be modified to eliminate this loop. #23616
+ if (zcu.comp.bin_file) |lf| {
+ for (exports) |exp| {
+ lf.deleteExport(exp.exported, exp.opts.name);
+ }
+ }
+ continue;
+ }
+ for (exports, info.index..) |exp, export_idx| {
const value_ptr, const found_existing = switch (exp.exported) {
.nav => |nav| gop: {
const gop = try nav_exports.getOrPut(gpa, nav);