diff options
| author | Matthew Lugg <mlugg@mlugg.co.uk> | 2024-12-25 02:58:27 +0000 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2024-12-25 02:58:27 +0000 |
| commit | 497592c9b45a94fb7b6028bf45b80f183e395a9b (patch) | |
| tree | 467873c408750cb4223f3ccf31775e42ec9fbd5c /src/Compilation.zig | |
| parent | af5e731729592af4a5716edd3b1e03264d66ea46 (diff) | |
| parent | 3afda4322c34dedc2319701fdfac3505c8d311e9 (diff) | |
| download | zig-497592c9b45a94fb7b6028bf45b80f183e395a9b.tar.gz zig-497592c9b45a94fb7b6028bf45b80f183e395a9b.zip | |
Merge pull request #22303 from mlugg/131-new
compiler: analyze type and value of global declarations separately
Diffstat (limited to 'src/Compilation.zig')
| -rw-r--r-- | src/Compilation.zig | 81 |
1 files changed, 52 insertions, 29 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig index 8b158390b6..28c5efab6c 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -348,12 +348,15 @@ const Job = union(enum) { /// Corresponds to the task in `link.Task`. /// Only needed for backends that haven't yet been updated to not race against Sema. codegen_type: InternPool.Index, - /// The `Cau` must be semantically analyzed (and possibly export itself). + /// The `AnalUnit`, which is *not* a `func`, must be semantically analyzed. + /// This may be its first time being analyzed, or it may be outdated. + /// If the unit is a function, a `codegen_func` job will then be queued. + analyze_comptime_unit: InternPool.AnalUnit, + /// This function must be semantically analyzed. /// This may be its first time being analyzed, or it may be outdated. - analyze_cau: InternPool.Cau.Index, - /// Analyze the body of a runtime function. /// After analysis, a `codegen_func` job will be queued. /// These must be separate jobs to ensure any needed type resolution occurs *before* codegen. + /// This job is separate from `analyze_comptime_unit` because it has a different priority. analyze_func: InternPool.Index, /// The main source file for the module needs to be analyzed. analyze_mod: *Package.Module, @@ -2903,6 +2906,7 @@ const Header = extern struct { file_deps_len: u32, src_hash_deps_len: u32, nav_val_deps_len: u32, + nav_ty_deps_len: u32, namespace_deps_len: u32, namespace_name_deps_len: u32, first_dependency_len: u32, @@ -2946,6 +2950,7 @@ pub fn saveState(comp: *Compilation) !void { .file_deps_len = @intCast(ip.file_deps.count()), .src_hash_deps_len = @intCast(ip.src_hash_deps.count()), .nav_val_deps_len = @intCast(ip.nav_val_deps.count()), + .nav_ty_deps_len = @intCast(ip.nav_ty_deps.count()), .namespace_deps_len = @intCast(ip.namespace_deps.count()), .namespace_name_deps_len = @intCast(ip.namespace_name_deps.count()), .first_dependency_len = @intCast(ip.first_dependency.count()), @@ -2976,6 +2981,8 @@ pub fn saveState(comp: *Compilation) !void { addBuf(&bufs, mem.sliceAsBytes(ip.src_hash_deps.values())); addBuf(&bufs, mem.sliceAsBytes(ip.nav_val_deps.keys())); addBuf(&bufs, mem.sliceAsBytes(ip.nav_val_deps.values())); + addBuf(&bufs, mem.sliceAsBytes(ip.nav_ty_deps.keys())); + addBuf(&bufs, mem.sliceAsBytes(ip.nav_ty_deps.values())); addBuf(&bufs, mem.sliceAsBytes(ip.namespace_deps.keys())); addBuf(&bufs, mem.sliceAsBytes(ip.namespace_deps.values())); addBuf(&bufs, mem.sliceAsBytes(ip.namespace_name_deps.keys())); @@ -3141,8 +3148,10 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle { } const file_index = switch (anal_unit.unwrap()) { - .cau => |cau| zcu.namespacePtr(ip.getCau(cau).namespace).file_scope, - .func => |ip_index| (zcu.funcInfo(ip_index).zir_body_inst.resolveFull(ip) orelse continue).file, + .@"comptime" => |cu| ip.getComptimeUnit(cu).zir_index.resolveFile(ip), + .nav_val, .nav_ty => |nav| ip.getNav(nav).analysis.?.zir_index.resolveFile(ip), + .type => |ty| Type.fromInterned(ty).typeDeclInst(zcu).?.resolveFile(ip), + .func => |ip_index| zcu.funcInfo(ip_index).zir_body_inst.resolveFile(ip), }; // Skip errors for AnalUnits within files that had a parse failure. @@ -3374,11 +3383,9 @@ pub fn addModuleErrorMsg( const rt_file_path = try src.file_scope.fullPath(gpa); defer gpa.free(rt_file_path); const name = switch (ref.referencer.unwrap()) { - .cau => |cau| switch (ip.getCau(cau).owner.unwrap()) { - .nav => |nav| ip.getNav(nav).name.toSlice(ip), - .type => |ty| Type.fromInterned(ty).containerTypeName(ip).toSlice(ip), - .none => "comptime", - }, + .@"comptime" => "comptime", + .nav_val, .nav_ty => |nav| ip.getNav(nav).name.toSlice(ip), + .type => |ty| Type.fromInterned(ty).containerTypeName(ip).toSlice(ip), .func => |f| ip.getNav(zcu.funcInfo(f).owner_nav).name.toSlice(ip), }; try ref_traces.append(gpa, .{ @@ -3641,10 +3648,14 @@ fn performAllTheWorkInner( // If there's no work queued, check if there's anything outdated // which we need to work on, and queue it if so. if (try zcu.findOutdatedToAnalyze()) |outdated| { - switch (outdated.unwrap()) { - .cau => |cau| try comp.queueJob(.{ .analyze_cau = cau }), - .func => |func| try comp.queueJob(.{ .analyze_func = func }), - } + try comp.queueJob(switch (outdated.unwrap()) { + .func => |f| .{ .analyze_func = f }, + .@"comptime", + .nav_ty, + .nav_val, + .type, + => .{ .analyze_comptime_unit = outdated }, + }); continue; } } @@ -3667,13 +3678,13 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre .codegen_nav => |nav_index| { const zcu = comp.zcu.?; const nav = zcu.intern_pool.getNav(nav_index); - if (nav.analysis_owner.unwrap()) |cau| { - const unit = InternPool.AnalUnit.wrap(.{ .cau = cau }); + if (nav.analysis != null) { + const unit: InternPool.AnalUnit = .wrap(.{ .nav_val = nav_index }); if (zcu.failed_analysis.contains(unit) or zcu.transitive_failed_analysis.contains(unit)) { return; } } - assert(nav.status == .resolved); + assert(nav.status == .fully_resolved); comp.dispatchCodegenTask(tid, .{ .codegen_nav = nav_index }); }, .codegen_func => |func| { @@ -3688,36 +3699,48 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid)); defer pt.deactivate(); - pt.ensureFuncBodyAnalyzed(func) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, + + pt.ensureFuncBodyUpToDate(func) catch |err| switch (err) { + error.OutOfMemory => |e| return e, error.AnalysisFail => return, }; }, - .analyze_cau => |cau_index| { + .analyze_comptime_unit => |unit| { + const named_frame = tracy.namedFrame("analyze_comptime_unit"); + defer named_frame.end(); + const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid)); defer pt.deactivate(); - pt.ensureCauAnalyzed(cau_index) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, + + const maybe_err: Zcu.SemaError!void = switch (unit.unwrap()) { + .@"comptime" => |cu| pt.ensureComptimeUnitUpToDate(cu), + .nav_ty => |nav| pt.ensureNavTypeUpToDate(nav), + .nav_val => |nav| pt.ensureNavValUpToDate(nav), + .type => |ty| if (pt.ensureTypeUpToDate(ty)) |_| {} else |err| err, + .func => unreachable, + }; + maybe_err catch |err| switch (err) { + error.OutOfMemory => |e| return e, error.AnalysisFail => return, }; + queue_test_analysis: { if (!comp.config.is_test) break :queue_test_analysis; + const nav = switch (unit.unwrap()) { + .nav_val => |nav| nav, + else => break :queue_test_analysis, + }; // Check if this is a test function. const ip = &pt.zcu.intern_pool; - const cau = ip.getCau(cau_index); - const nav_index = switch (cau.owner.unwrap()) { - .none, .type => break :queue_test_analysis, - .nav => |nav| nav, - }; - if (!pt.zcu.test_functions.contains(nav_index)) { + if (!pt.zcu.test_functions.contains(nav)) { break :queue_test_analysis; } // Tests are always emitted in test binaries. The decl_refs are created by // Zcu.populateTestFunctions, but this will not queue body analysis, so do // that now. - try pt.zcu.ensureFuncBodyAnalysisQueued(ip.getNav(nav_index).status.resolved.val); + try pt.zcu.ensureFuncBodyAnalysisQueued(ip.getNav(nav).status.fully_resolved.val); } }, .resolve_type_fully => |ty| { |
