diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2023-05-29 23:41:40 -0700 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2023-05-29 23:41:40 -0700 |
| commit | 22b0457dd4b73d3cce2e52b77b3b7c9f857c0409 (patch) | |
| tree | 111e73c005fa6fcdbc7157f0133b69cbf790a4bf /src/Module.zig | |
| parent | ac9f72d87e8eeb4a9d0dead80b61420485279ddd (diff) | |
| parent | 0f58d34ef7935dd9877f4969ed6ec7d582cd104c (diff) | |
| download | zig-22b0457dd4b73d3cce2e52b77b3b7c9f857c0409.tar.gz zig-22b0457dd4b73d3cce2e52b77b3b7c9f857c0409.zip | |
Merge pull request #15891 from mlugg/fix/dont-emit-fn-called-at-comptime
Prevent analysis of functions only referenced at comptime
Diffstat (limited to 'src/Module.zig')
| -rw-r--r-- | src/Module.zig | 74 |
1 files changed, 59 insertions, 15 deletions
diff --git a/src/Module.zig b/src/Module.zig index 61843f5a8f..59ee21d8cf 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -1638,6 +1638,10 @@ pub const Fn = struct { inferred_error_sets: InferredErrorSetList = .{}, pub const Analysis = enum { + /// This function has not yet undergone analysis, because we have not + /// seen a potential runtime call. It may be analyzed in future. + none, + /// Analysis for this function has been queued, but not yet completed. queued, /// This function intentionally only has ZIR generated because it is marked /// inline, which means no runtime version of the function will be generated. @@ -4323,7 +4327,7 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void { .complete, .codegen_failure_retryable => { switch (func.state) { .sema_failure, .dependency_failure => return error.AnalysisFail, - .queued => {}, + .none, .queued => {}, .in_progress => unreachable, .inline_only => unreachable, // don't queue work for this .success => return, @@ -4426,6 +4430,60 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void { } } +/// Ensure this function's body is or will be analyzed and emitted. This should +/// be called whenever a potential runtime call of a function is seen. +/// +/// The caller is responsible for ensuring the function decl itself is already +/// analyzed, and for ensuring it can exist at runtime (see +/// `sema.fnHasRuntimeBits`). This function does *not* guarantee that the body +/// will be analyzed when it returns: for that, see `ensureFuncBodyAnalyzed`. +pub fn ensureFuncBodyAnalysisQueued(mod: *Module, func: *Fn) !void { + const decl_index = func.owner_decl; + const decl = mod.declPtr(decl_index); + + switch (decl.analysis) { + .unreferenced => unreachable, + .in_progress => unreachable, + .outdated => unreachable, + + .file_failure, + .sema_failure, + .liveness_failure, + .codegen_failure, + .dependency_failure, + .sema_failure_retryable, + .codegen_failure_retryable, + // The function analysis failed, but we've already emitted an error for + // that. The callee doesn't need the function to be analyzed right now, + // so its analysis can safely continue. + => return, + + .complete => {}, + } + + assert(decl.has_tv); + + switch (func.state) { + .none => {}, + .queued => return, + // As above, we don't need to forward errors here. + .sema_failure, .dependency_failure => return, + .in_progress => return, + .inline_only => unreachable, // don't queue work for this + .success => return, + } + + // Decl itself is safely analyzed, and body analysis is not yet queued + + try mod.comp.work_queue.writeItem(.{ .codegen_func = func }); + if (mod.emit_h != null) { + // TODO: we ideally only want to do this if the function's type changed + // since the last update + try mod.comp.work_queue.writeItem(.{ .emit_h_decl = decl_index }); + } + func.state = .queued; +} + pub fn updateEmbedFile(mod: *Module, embed_file: *EmbedFile) SemaError!void { const tracy = trace(@src()); defer tracy.end(); @@ -4733,20 +4791,6 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !bool { decl.analysis = .complete; decl.generation = mod.generation; - const has_runtime_bits = try sema.fnHasRuntimeBits(decl.ty); - - if (has_runtime_bits) { - // We don't fully codegen the decl until later, but we do need to reserve a global - // offset table index for it. This allows us to codegen decls out of dependency - // order, increasing how many computations can be done in parallel. - try mod.comp.work_queue.writeItem(.{ .codegen_func = func }); - if (type_changed and mod.emit_h != null) { - try mod.comp.work_queue.writeItem(.{ .emit_h_decl = decl_index }); - } - } else if (!prev_is_inline and prev_type_has_bits) { - mod.comp.bin_file.freeDecl(decl_index); - } - const is_inline = decl.ty.fnCallingConvention() == .Inline; if (decl.is_exported) { const export_src: LazySrcLoc = .{ .token_offset = @boolToInt(decl.is_pub) }; |
