aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2021-01-02 22:01:51 -0800
committerGitHub <noreply@github.com>2021-01-02 22:01:51 -0800
commitd8f3f14532c4b5d65377efaef015c3855137dccf (patch)
treed0927df77323d64bff52501b50ef8543a077d4d8 /src
parent3d151fbfc8db71f87ee84dd33c49910584708a04 (diff)
parent654832253a7857e78aab85e28ed09fb16b632dd2 (diff)
downloadzig-d8f3f14532c4b5d65377efaef015c3855137dccf.tar.gz
zig-d8f3f14532c4b5d65377efaef015c3855137dccf.zip
Merge pull request #7647 from ziglang/stage2-comptime-fn-call
stage2: comptime function calls and inline function calls
Diffstat (limited to 'src')
-rw-r--r--src/Compilation.zig15
-rw-r--r--src/Module.zig264
-rw-r--r--src/astgen.zig38
-rw-r--r--src/codegen.zig10
-rw-r--r--src/codegen/c.zig2
-rw-r--r--src/codegen/wasm.zig2
-rw-r--r--src/config.zig.in1
-rw-r--r--src/link/Elf.zig10
-rw-r--r--src/link/MachO/DebugSymbols.zig10
-rw-r--r--src/llvm_backend.zig2
-rw-r--r--src/main.zig2
-rw-r--r--src/value.zig13
-rw-r--r--src/zir.zig361
-rw-r--r--src/zir_sema.zig277
14 files changed, 737 insertions, 270 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig
index de115b9b40..9a06aee561 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -1459,24 +1459,29 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
const module = self.bin_file.options.module.?;
if (decl.typed_value.most_recent.typed_value.val.castTag(.function)) |payload| {
const func = payload.data;
- switch (func.analysis) {
+ switch (func.state) {
.queued => module.analyzeFnBody(decl, func) catch |err| switch (err) {
error.AnalysisFail => {
- assert(func.analysis != .in_progress);
+ assert(func.state != .in_progress);
continue;
},
error.OutOfMemory => return error.OutOfMemory,
},
.in_progress => unreachable,
+ .inline_only => unreachable, // don't queue work for this
.sema_failure, .dependency_failure => continue,
.success => {},
}
- // Here we tack on additional allocations to the Decl's arena. The allocations are
- // lifetime annotations in the ZIR.
+ // Here we tack on additional allocations to the Decl's arena. The allocations
+ // are lifetime annotations in the ZIR.
var decl_arena = decl.typed_value.most_recent.arena.?.promote(module.gpa);
defer decl.typed_value.most_recent.arena.?.* = decl_arena.state;
log.debug("analyze liveness of {s}\n", .{decl.name});
- try liveness.analyze(module.gpa, &decl_arena.allocator, func.analysis.success);
+ try liveness.analyze(module.gpa, &decl_arena.allocator, func.body);
+
+ if (std.builtin.mode == .Debug and self.verbose_ir) {
+ func.dump(module.*);
+ }
}
assert(decl.typed_value.most_recent.typed_value.ty.hasCodeGenBits());
diff --git a/src/Module.zig b/src/Module.zig
index d1719059c4..24ea48043b 100644
--- a/src/Module.zig
+++ b/src/Module.zig
@@ -268,6 +268,11 @@ pub const Decl = struct {
}
}
+ /// Asserts that the `Decl` is part of AST and not ZIRModule.
+ pub fn getFileScope(self: *Decl) *Scope.File {
+ return self.scope.cast(Scope.Container).?.file_scope;
+ }
+
fn removeDependant(self: *Decl, other: *Decl) void {
self.dependants.removeAssertDiscard(other);
}
@@ -281,46 +286,32 @@ pub const Decl = struct {
/// Extern functions do not have this data structure; they are represented by
/// the `Decl` only, with a `Value` tag of `extern_fn`.
pub const Fn = struct {
- /// This memory owned by the Decl's TypedValue.Managed arena allocator.
- analysis: union(enum) {
- queued: *ZIR,
+ owner_decl: *Decl,
+ /// Contains un-analyzed ZIR instructions generated from Zig source AST.
+ /// Even after we finish analysis, the ZIR is kept in memory, so that
+ /// comptime and inline function calls can happen.
+ zir: zir.Module.Body,
+ /// undefined unless analysis state is `success`.
+ body: Body,
+ state: Analysis,
+
+ pub const Analysis = enum {
+ queued,
+ /// This function intentionally only has ZIR generated because it is marked
+ /// inline, which means no runtime version of the function will be generated.
+ inline_only,
in_progress,
/// There will be a corresponding ErrorMsg in Module.failed_decls
sema_failure,
- /// This Fn might be OK but it depends on another Decl which did not successfully complete
- /// semantic analysis.
+ /// This Fn might be OK but it depends on another Decl which did not
+ /// successfully complete semantic analysis.
dependency_failure,
- success: Body,
- },
- owner_decl: *Decl,
-
- /// This memory is temporary and points to stack memory for the duration
- /// of Fn analysis.
- pub const Analysis = struct {
- inner_block: Scope.Block,
- };
-
- /// Contains un-analyzed ZIR instructions generated from Zig source AST.
- pub const ZIR = struct {
- body: zir.Module.Body,
- arena: std.heap.ArenaAllocator.State,
+ success,
};
/// For debugging purposes.
pub fn dump(self: *Fn, mod: Module) void {
- std.debug.print("Module.Function(name={s}) ", .{self.owner_decl.name});
- switch (self.analysis) {
- .queued => {
- std.debug.print("queued\n", .{});
- },
- .in_progress => {
- std.debug.print("in_progress\n", .{});
- },
- else => {
- std.debug.print("\n", .{});
- zir.dumpFn(mod, self);
- },
- }
+ zir.dumpFn(mod, self);
}
};
@@ -761,21 +752,60 @@ pub const Scope = struct {
/// during semantic analysis of the block.
pub const Block = struct {
pub const base_tag: Tag = .block;
+
base: Scope = Scope{ .tag = base_tag },
parent: ?*Block,
+ /// Maps ZIR to TZIR. Shared to sub-blocks.
+ inst_table: *InstTable,
func: ?*Fn,
decl: *Decl,
instructions: ArrayListUnmanaged(*Inst),
/// Points to the arena allocator of DeclAnalysis
arena: *Allocator,
label: ?Label = null,
+ inlining: ?*Inlining,
is_comptime: bool,
+ pub const InstTable = std.AutoHashMap(*zir.Inst, *Inst);
+
+ /// This `Block` maps a block ZIR instruction to the corresponding
+ /// TZIR instruction for break instruction analysis.
pub const Label = struct {
zir_block: *zir.Inst.Block,
+ merges: Merges,
+ };
+
+ /// This `Block` indicates that an inline function call is happening
+ /// and return instructions should be analyzed as a break instruction
+ /// to this TZIR block instruction.
+ /// It is shared among all the blocks in an inline or comptime called
+ /// function.
+ pub const Inlining = struct {
+ /// Shared state among the entire inline/comptime call stack.
+ shared: *Shared,
+ /// We use this to count from 0 so that arg instructions know
+ /// which parameter index they are, without having to store
+ /// a parameter index with each arg instruction.
+ param_index: usize,
+ casted_args: []*Inst,
+ merges: Merges,
+
+ pub const Shared = struct {
+ caller: ?*Fn,
+ branch_count: u64,
+ branch_quota: u64,
+ };
+ };
+
+ pub const Merges = struct {
results: ArrayListUnmanaged(*Inst),
block_inst: *Inst.Block,
};
+
+ /// For debugging purposes.
+ pub fn dump(self: *Block, mod: Module) void {
+ zir.dumpBlock(mod, self);
+ }
};
/// This is a temporary structure, references to it are valid only
@@ -992,11 +1022,11 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
defer tracy.end();
const container_scope = decl.scope.cast(Scope.Container).?;
- const tree = try self.getAstTree(container_scope);
+ const tree = try self.getAstTree(container_scope.file_scope);
const ast_node = tree.root_node.decls()[decl.src_index];
switch (ast_node.tag) {
.FnProto => {
- const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", ast_node);
+ const fn_proto = ast_node.castTag(.FnProto).?;
decl.analysis = .in_progress;
@@ -1062,7 +1092,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.param_types = param_types,
}, .{});
- if (self.comp.verbose_ir) {
+ if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {};
}
@@ -1071,12 +1101,17 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
errdefer decl_arena.deinit();
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
+ var inst_table = Scope.Block.InstTable.init(self.gpa);
+ defer inst_table.deinit();
+
var block_scope: Scope.Block = .{
.parent = null,
+ .inst_table = &inst_table,
.func = null,
.decl = decl,
.instructions = .{},
.arena = &decl_arena.allocator,
+ .inlining = null,
.is_comptime = false,
};
defer block_scope.instructions.deinit(self.gpa);
@@ -1113,14 +1148,11 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const new_func = try decl_arena.allocator.create(Fn);
const fn_payload = try decl_arena.allocator.create(Value.Payload.Function);
- const fn_zir = blk: {
- // This scope's arena memory is discarded after the ZIR generation
- // pass completes, and semantic analysis of it completes.
- var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
- errdefer gen_scope_arena.deinit();
+ const fn_zir: zir.Module.Body = blk: {
+ // We put the ZIR inside the Decl arena.
var gen_scope: Scope.GenZIR = .{
.decl = decl,
- .arena = &gen_scope_arena.allocator,
+ .arena = &decl_arena.allocator,
.parent = decl.scope,
};
defer gen_scope.instructions.deinit(self.gpa);
@@ -1131,8 +1163,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
for (fn_proto.params()) |param, i| {
const name_token = param.name_token.?;
const src = tree.token_locs[name_token].start;
- const param_name = tree.tokenSlice(name_token); // TODO: call identifierTokenString
- const arg = try gen_scope_arena.allocator.create(zir.Inst.Arg);
+ const param_name = try self.identifierTokenString(&gen_scope.base, name_token);
+ const arg = try decl_arena.allocator.create(zir.Inst.Arg);
arg.* = .{
.base = .{
.tag = .arg,
@@ -1144,7 +1176,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.kw_args = .{},
};
gen_scope.instructions.items[i] = &arg.base;
- const sub_scope = try gen_scope_arena.allocator.create(Scope.LocalVal);
+ const sub_scope = try decl_arena.allocator.create(Scope.LocalVal);
sub_scope.* = .{
.parent = params_scope,
.gen_zir = &gen_scope,
@@ -1165,22 +1197,29 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
_ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid);
}
- if (self.comp.verbose_ir) {
+ if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {};
}
- const fn_zir = try gen_scope_arena.allocator.create(Fn.ZIR);
- fn_zir.* = .{
- .body = .{
- .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
- },
- .arena = gen_scope_arena.state,
+ break :blk .{
+ .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
};
- break :blk fn_zir;
};
+ const is_inline = blk: {
+ if (fn_proto.getExternExportInlineToken()) |maybe_inline_token| {
+ if (tree.token_ids[maybe_inline_token] == .Keyword_inline) {
+ break :blk true;
+ }
+ }
+ break :blk false;
+ };
+ const anal_state = ([2]Fn.Analysis{ .queued, .inline_only })[@boolToInt(is_inline)];
+
new_func.* = .{
- .analysis = .{ .queued = fn_zir },
+ .state = anal_state,
+ .zir = fn_zir,
+ .body = undefined,
.owner_decl = decl,
};
fn_payload.* = .{
@@ -1189,11 +1228,16 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
};
var prev_type_has_bits = false;
+ var prev_is_inline = false;
var type_changed = true;
if (decl.typedValueManaged()) |tvm| {
prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits();
type_changed = !tvm.typed_value.ty.eql(fn_type);
+ if (tvm.typed_value.val.castTag(.function)) |payload| {
+ const prev_func = payload.data;
+ prev_is_inline = prev_func.state == .inline_only;
+ }
tvm.deinit(self.gpa);
}
@@ -1211,18 +1255,26 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
decl.analysis = .complete;
decl.generation = self.generation;
- if (fn_type.hasCodeGenBits()) {
+ if (!is_inline and fn_type.hasCodeGenBits()) {
// We don't fully codegen the decl until later, but we do need to reserve a global
// offset table index for it. This allows us to codegen decls out of dependency order,
// increasing how many computations can be done in parallel.
try self.comp.bin_file.allocateDeclIndexes(decl);
try self.comp.work_queue.writeItem(.{ .codegen_decl = decl });
- } else if (prev_type_has_bits) {
+ } else if (!prev_is_inline and prev_type_has_bits) {
self.comp.bin_file.freeDecl(decl);
}
if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
if (tree.token_ids[maybe_export_token] == .Keyword_export) {
+ if (is_inline) {
+ return self.failTok(
+ &block_scope.base,
+ maybe_export_token,
+ "export of inline function",
+ .{},
+ );
+ }
const export_src = tree.token_locs[maybe_export_token].start;
const name_loc = tree.token_locs[fn_proto.getNameToken().?];
const name = tree.tokenSliceLoc(name_loc);
@@ -1230,7 +1282,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
try self.analyzeExport(&block_scope.base, export_src, name, decl);
}
}
- return type_changed;
+ return type_changed or is_inline != prev_is_inline;
},
.VarDecl => {
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", ast_node);
@@ -1242,12 +1294,17 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
errdefer decl_arena.deinit();
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
+ var decl_inst_table = Scope.Block.InstTable.init(self.gpa);
+ defer decl_inst_table.deinit();
+
var block_scope: Scope.Block = .{
.parent = null,
+ .inst_table = &decl_inst_table,
.func = null,
.decl = decl,
.instructions = .{},
.arena = &decl_arena.allocator,
+ .inlining = null,
.is_comptime = true,
};
defer block_scope.instructions.deinit(self.gpa);
@@ -1303,23 +1360,30 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const src = tree.token_locs[init_node.firstToken()].start;
const init_inst = try astgen.expr(self, &gen_scope.base, init_result_loc, init_node);
- if (self.comp.verbose_ir) {
+ if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {};
}
+ var var_inst_table = Scope.Block.InstTable.init(self.gpa);
+ defer var_inst_table.deinit();
+
var inner_block: Scope.Block = .{
.parent = null,
+ .inst_table = &var_inst_table,
.func = null,
.decl = decl,
.instructions = .{},
.arena = &gen_scope_arena.allocator,
+ .inlining = null,
.is_comptime = true,
};
defer inner_block.instructions.deinit(self.gpa);
- try zir_sema.analyzeBody(self, &inner_block.base, .{ .instructions = gen_scope.instructions.items });
+ try zir_sema.analyzeBody(self, &inner_block, .{
+ .instructions = gen_scope.instructions.items,
+ });
// The result location guarantees the type coercion.
- const analyzed_init_inst = init_inst.analyzed_inst.?;
+ const analyzed_init_inst = var_inst_table.get(init_inst).?;
// The is_comptime in the Scope.Block guarantees the result is comptime-known.
const val = analyzed_init_inst.value().?;
@@ -1347,7 +1411,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.val = Value.initTag(.type_type),
});
const var_type = try astgen.expr(self, &type_scope.base, .{ .ty = type_type }, type_node);
- if (self.comp.verbose_ir) {
+ if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "var_type", decl.name, type_scope.instructions.items) catch {};
}
@@ -1423,21 +1487,26 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
defer gen_scope.instructions.deinit(self.gpa);
_ = try astgen.comptimeExpr(self, &gen_scope.base, .none, comptime_decl.expr);
- if (self.comp.verbose_ir) {
+ if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
zir.dumpZir(self.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {};
}
+ var inst_table = Scope.Block.InstTable.init(self.gpa);
+ defer inst_table.deinit();
+
var block_scope: Scope.Block = .{
.parent = null,
+ .inst_table = &inst_table,
.func = null,
.decl = decl,
.instructions = .{},
.arena = &analysis_arena.allocator,
+ .inlining = null,
.is_comptime = true,
};
defer block_scope.instructions.deinit(self.gpa);
- _ = try zir_sema.analyzeBody(self, &block_scope.base, .{
+ _ = try zir_sema.analyzeBody(self, &block_scope, .{
.instructions = gen_scope.instructions.items,
});
@@ -1496,12 +1565,10 @@ fn getSrcModule(self: *Module, root_scope: *Scope.ZIRModule) !*zir.Module {
}
}
-fn getAstTree(self: *Module, container_scope: *Scope.Container) !*ast.Tree {
+pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree {
const tracy = trace(@src());
defer tracy.end();
- const root_scope = container_scope.file_scope;
-
switch (root_scope.status) {
.never_loaded, .unloaded_success => {
try self.failed_files.ensureCapacity(self.gpa, self.failed_files.items().len + 1);
@@ -1549,7 +1616,7 @@ pub fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void
// We may be analyzing it for the first time, or this may be
// an incremental update. This code handles both cases.
- const tree = try self.getAstTree(container_scope);
+ const tree = try self.getAstTree(container_scope.file_scope);
const decls = tree.root_node.decls();
try self.comp.work_queue.ensureUnusedCapacity(decls.len);
@@ -1806,25 +1873,28 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
// Use the Decl's arena for function memory.
var arena = decl.typed_value.most_recent.arena.?.promote(self.gpa);
defer decl.typed_value.most_recent.arena.?.* = arena.state;
+ var inst_table = Scope.Block.InstTable.init(self.gpa);
+ defer inst_table.deinit();
var inner_block: Scope.Block = .{
.parent = null,
+ .inst_table = &inst_table,
.func = func,
.decl = decl,
.instructions = .{},
.arena = &arena.allocator,
+ .inlining = null,
.is_comptime = false,
};
defer inner_block.instructions.deinit(self.gpa);
- const fn_zir = func.analysis.queued;
- defer fn_zir.arena.promote(self.gpa).deinit();
- func.analysis = .{ .in_progress = {} };
+ func.state = .in_progress;
log.debug("set {s} to in_progress\n", .{decl.name});
- try zir_sema.analyzeBody(self, &inner_block.base, fn_zir.body);
+ try zir_sema.analyzeBody(self, &inner_block, func.zir);
const instructions = try arena.allocator.dupe(*Inst, inner_block.instructions.items);
- func.analysis = .{ .success = .{ .instructions = instructions } };
+ func.state = .success;
+ func.body = .{ .instructions = instructions };
log.debug("set {s} to success\n", .{decl.name});
}
@@ -2321,7 +2391,7 @@ pub fn analyzeDeclRef(self: *Module, scope: *Scope, src: usize, decl: *Decl) Inn
self.ensureDeclAnalyzed(decl) catch |err| {
if (scope.cast(Scope.Block)) |block| {
if (block.func) |func| {
- func.analysis = .dependency_failure;
+ func.state = .dependency_failure;
} else {
block.decl.analysis = .dependency_failure;
}
@@ -3020,11 +3090,20 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Com
},
.block => {
const block = scope.cast(Scope.Block).?;
- if (block.func) |func| {
- func.analysis = .sema_failure;
+ if (block.inlining) |inlining| {
+ if (inlining.shared.caller) |func| {
+ func.state = .sema_failure;
+ } else {
+ block.decl.analysis = .sema_failure;
+ block.decl.generation = self.generation;
+ }
} else {
- block.decl.analysis = .sema_failure;
- block.decl.generation = self.generation;
+ if (block.func) |func| {
+ func.state = .sema_failure;
+ } else {
+ block.decl.analysis = .sema_failure;
+ block.decl.generation = self.generation;
+ }
}
self.failed_decls.putAssumeCapacityNoClobber(block.decl, err_msg);
},
@@ -3380,10 +3459,12 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic
var fail_block: Scope.Block = .{
.parent = parent_block,
+ .inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
+ .inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer fail_block.instructions.deinit(mod.gpa);
@@ -3427,3 +3508,34 @@ pub fn validateVarType(mod: *Module, scope: *Scope, src: usize, ty: Type) !void
return mod.fail(scope, src, "variable of type '{}' must be const or comptime", .{ty});
}
}
+
+/// Identifier token -> String (allocated in scope.arena())
+pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
+ const tree = scope.tree();
+
+ const ident_name = tree.tokenSlice(token);
+ if (mem.startsWith(u8, ident_name, "@")) {
+ const raw_string = ident_name[1..];
+ var bad_index: usize = undefined;
+ return std.zig.parseStringLiteral(scope.arena(), raw_string, &bad_index) catch |err| switch (err) {
+ error.InvalidCharacter => {
+ const bad_byte = raw_string[bad_index];
+ const src = tree.token_locs[token].start;
+ return mod.fail(scope, src + 1 + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
+ },
+ else => |e| return e,
+ };
+ }
+ return ident_name;
+}
+
+pub fn emitBackwardBranch(mod: *Module, block: *Scope.Block, src: usize) !void {
+ const shared = block.inlining.?.shared;
+ shared.branch_count += 1;
+ if (shared.branch_count > shared.branch_quota) {
+ // TODO show the "called from here" stack
+ return mod.fail(&block.base, src, "evaluation exceeded {d} backwards branches", .{
+ shared.branch_quota,
+ });
+ }
+}
diff --git a/src/astgen.zig b/src/astgen.zig
index a50fa026ca..7e4e9e2271 100644
--- a/src/astgen.zig
+++ b/src/astgen.zig
@@ -384,7 +384,7 @@ fn breakExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpr
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
else => if (node.getLabel()) |break_label| {
- const label_name = try identifierTokenString(mod, parent_scope, break_label);
+ const label_name = try mod.identifierTokenString(parent_scope, break_label);
return mod.failTok(parent_scope, break_label, "label not found: '{s}'", .{label_name});
} else {
return mod.failTok(parent_scope, src, "break expression outside loop", .{});
@@ -426,7 +426,7 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
else => if (node.getLabel()) |break_label| {
- const label_name = try identifierTokenString(mod, parent_scope, break_label);
+ const label_name = try mod.identifierTokenString(parent_scope, break_label);
return mod.failTok(parent_scope, break_label, "label not found: '{s}'", .{label_name});
} else {
return mod.failTok(parent_scope, src, "continue expression outside loop", .{});
@@ -551,7 +551,7 @@ fn varDecl(
}
const tree = scope.tree();
const name_src = tree.token_locs[node.name_token].start;
- const ident_name = try identifierTokenString(mod, scope, node.name_token);
+ const ident_name = try mod.identifierTokenString(scope, node.name_token);
// Local variables shadowing detection, including function parameters.
{
@@ -843,7 +843,7 @@ fn typeInixOp(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp, op_ins
fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.EnumLiteral) !*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.name].start;
- const name = try identifierTokenString(mod, scope, node.name);
+ const name = try mod.identifierTokenString(scope, node.name);
return addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{});
}
@@ -864,7 +864,7 @@ fn errorSetDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Erro
for (decls) |decl, i| {
const tag = decl.castTag(.ErrorTag).?;
- fields[i] = try identifierTokenString(mod, scope, tag.name_token);
+ fields[i] = try mod.identifierTokenString(scope, tag.name_token);
}
// analyzing the error set results in a decl ref, so we might need to dereference it
@@ -988,36 +988,16 @@ fn orelseCatchExpr(
/// Return whether the identifier names of two tokens are equal. Resolves @"" tokens without allocating.
/// OK in theory it could do it without allocating. This implementation allocates when the @"" form is used.
fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: ast.TokenIndex) !bool {
- const ident_name_1 = try identifierTokenString(mod, scope, token1);
- const ident_name_2 = try identifierTokenString(mod, scope, token2);
+ const ident_name_1 = try mod.identifierTokenString(scope, token1);
+ const ident_name_2 = try mod.identifierTokenString(scope, token2);
return mem.eql(u8, ident_name_1, ident_name_2);
}
-/// Identifier token -> String (allocated in scope.arena())
-fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
- const tree = scope.tree();
-
- const ident_name = tree.tokenSlice(token);
- if (mem.startsWith(u8, ident_name, "@")) {
- const raw_string = ident_name[1..];
- var bad_index: usize = undefined;
- return std.zig.parseStringLiteral(scope.arena(), raw_string, &bad_index) catch |err| switch (err) {
- error.InvalidCharacter => {
- const bad_byte = raw_string[bad_index];
- const src = tree.token_locs[token].start;
- return mod.fail(scope, src + 1 + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
- },
- else => |e| return e,
- };
- }
- return ident_name;
-}
-
pub fn identifierStringInst(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.token].start;
- const ident_name = try identifierTokenString(mod, scope, node.token);
+ const ident_name = try mod.identifierTokenString(scope, node.token);
return addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = ident_name }, .{});
}
@@ -1936,7 +1916,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
defer tracy.end();
const tree = scope.tree();
- const ident_name = try identifierTokenString(mod, scope, ident.token);
+ const ident_name = try mod.identifierTokenString(scope, ident.token);
const src = tree.token_locs[ident.token].start;
if (mem.eql(u8, ident_name, "_")) {
return mod.failNode(scope, &ident.base, "TODO implement '_' identifier", .{});
diff --git a/src/codegen.zig b/src/codegen.zig
index 6530b687e5..58be73a31c 100644
--- a/src/codegen.zig
+++ b/src/codegen.zig
@@ -532,7 +532,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
self.code.items.len += 4;
try self.dbgSetPrologueEnd();
- try self.genBody(self.mod_fn.analysis.success);
+ try self.genBody(self.mod_fn.body);
const stack_end = self.max_end_stack;
if (stack_end > math.maxInt(i32))
@@ -576,7 +576,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
});
} else {
try self.dbgSetPrologueEnd();
- try self.genBody(self.mod_fn.analysis.success);
+ try self.genBody(self.mod_fn.body);
try self.dbgSetEpilogueBegin();
}
},
@@ -593,7 +593,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
try self.dbgSetPrologueEnd();
- try self.genBody(self.mod_fn.analysis.success);
+ try self.genBody(self.mod_fn.body);
// Backpatch stack offset
const stack_end = self.max_end_stack;
@@ -638,13 +638,13 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.pop(.al, .{ .fp, .pc }).toU32());
} else {
try self.dbgSetPrologueEnd();
- try self.genBody(self.mod_fn.analysis.success);
+ try self.genBody(self.mod_fn.body);
try self.dbgSetEpilogueBegin();
}
},
else => {
try self.dbgSetPrologueEnd();
- try self.genBody(self.mod_fn.analysis.success);
+ try self.genBody(self.mod_fn.body);
try self.dbgSetEpilogueBegin();
},
}
diff --git a/src/codegen/c.zig b/src/codegen/c.zig
index 684a03eb79..1a89e22d48 100644
--- a/src/codegen/c.zig
+++ b/src/codegen/c.zig
@@ -275,7 +275,7 @@ pub fn generate(file: *C, module: *Module, decl: *Decl) !void {
try writer.writeAll(" {");
const func: *Module.Fn = func_payload.data;
- const instructions = func.analysis.success.instructions;
+ const instructions = func.body.instructions;
if (instructions.len > 0) {
try writer.writeAll("\n");
for (instructions) |inst| {
diff --git a/src/codegen/wasm.zig b/src/codegen/wasm.zig
index c7ad59f5d1..036243dcca 100644
--- a/src/codegen/wasm.zig
+++ b/src/codegen/wasm.zig
@@ -63,7 +63,7 @@ pub fn genCode(buf: *ArrayList(u8), decl: *Decl) !void {
// TODO: check for and handle death of instructions
const tv = decl.typed_value.most_recent.typed_value;
const mod_fn = tv.val.castTag(.function).?.data;
- for (mod_fn.analysis.success.instructions) |inst| try genInst(buf, decl, inst);
+ for (mod_fn.body.instructions) |inst| try genInst(buf, decl, inst);
// Write 'end' opcode
try writer.writeByte(0x0B);
diff --git a/src/config.zig.in b/src/config.zig.in
index 9d16cf3824..0dbd3f3c91 100644
--- a/src/config.zig.in
+++ b/src/config.zig.in
@@ -2,7 +2,6 @@ pub const have_llvm = true;
pub const version: [:0]const u8 = "@ZIG_VERSION@";
pub const semver = try @import("std").SemanticVersion.parse(version);
pub const log_scopes: []const []const u8 = &[_][]const u8{};
-pub const zir_dumps: []const []const u8 = &[_][]const u8{};
pub const enable_tracy = false;
pub const is_stage1 = true;
pub const skip_non_native = false;
diff --git a/src/link/Elf.zig b/src/link/Elf.zig
index 116d7c9859..d74236f8c1 100644
--- a/src/link/Elf.zig
+++ b/src/link/Elf.zig
@@ -2178,16 +2178,6 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
else => false,
};
if (is_fn) {
- const zir_dumps = if (std.builtin.is_test) &[0][]const u8{} else build_options.zir_dumps;
- if (zir_dumps.len != 0) {
- for (zir_dumps) |fn_name| {
- if (mem.eql(u8, mem.spanZ(decl.name), fn_name)) {
- std.debug.print("\n{s}\n", .{decl.name});
- typed_value.val.castTag(.function).?.data.dump(module.*);
- }
- }
- }
-
// For functions we need to add a prologue to the debug line program.
try dbg_line_buffer.ensureCapacity(26);
diff --git a/src/link/MachO/DebugSymbols.zig b/src/link/MachO/DebugSymbols.zig
index c70fcc5825..11f87d5495 100644
--- a/src/link/MachO/DebugSymbols.zig
+++ b/src/link/MachO/DebugSymbols.zig
@@ -936,16 +936,6 @@ pub fn initDeclDebugBuffers(
const typed_value = decl.typed_value.most_recent.typed_value;
switch (typed_value.ty.zigTypeTag()) {
.Fn => {
- const zir_dumps = if (std.builtin.is_test) &[0][]const u8{} else build_options.zir_dumps;
- if (zir_dumps.len != 0) {
- for (zir_dumps) |fn_name| {
- if (mem.eql(u8, mem.spanZ(decl.name), fn_name)) {
- std.debug.print("\n{}\n", .{decl.name});
- typed_value.val.cast(Value.Payload.Function).?.func.dump(module.*);
- }
- }
- }
-
// For functions we need to add a prologue to the debug line program.
try dbg_line_buffer.ensureCapacity(26);
diff --git a/src/llvm_backend.zig b/src/llvm_backend.zig
index 51d1a0840e..97406797b6 100644
--- a/src/llvm_backend.zig
+++ b/src/llvm_backend.zig
@@ -294,7 +294,7 @@ pub const LLVMIRModule = struct {
const entry_block = llvm_func.appendBasicBlock("Entry");
self.builder.positionBuilderAtEnd(entry_block);
- const instructions = func.analysis.success.instructions;
+ const instructions = func.body.instructions;
for (instructions) |inst| {
switch (inst.tag) {
.breakpoint => try self.genBreakpoint(inst.castTag(.breakpoint).?),
diff --git a/src/main.zig b/src/main.zig
index 519671bccf..155dcc74de 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -1818,7 +1818,7 @@ fn buildOutputType(
};
updateModule(gpa, comp, zir_out_path, hook) catch |err| switch (err) {
- error.SemanticAnalyzeFail => process.exit(1),
+ error.SemanticAnalyzeFail => if (!watch) process.exit(1),
else => |e| return e,
};
try comp.makeBinFileExecutable();
diff --git a/src/value.zig b/src/value.zig
index 10f58fa44f..11c385b446 100644
--- a/src/value.zig
+++ b/src/value.zig
@@ -330,11 +330,14 @@ pub const Value = extern union {
.int_type => return self.copyPayloadShallow(allocator, Payload.IntType),
.int_u64 => return self.copyPayloadShallow(allocator, Payload.U64),
.int_i64 => return self.copyPayloadShallow(allocator, Payload.I64),
- .int_big_positive => {
- @panic("TODO implement copying of big ints");
- },
- .int_big_negative => {
- @panic("TODO implement copying of big ints");
+ .int_big_positive, .int_big_negative => {
+ const old_payload = self.cast(Payload.BigInt).?;
+ const new_payload = try allocator.create(Payload.BigInt);
+ new_payload.* = .{
+ .base = .{ .tag = self.ptr_otherwise.tag },
+ .data = try allocator.dupe(std.math.big.Limb, old_payload.data),
+ };
+ return Value{ .ptr_otherwise = &new_payload.base };
},
.function => return self.copyPayloadShallow(allocator, Payload.Function),
.extern_fn => return self.copyPayloadShallow(allocator, Payload.Decl),
diff --git a/src/zir.zig b/src/zir.zig
index 0593cbd8fd..3fd2ac7c80 100644
--- a/src/zir.zig
+++ b/src/zir.zig
@@ -25,12 +25,13 @@ pub const Decl = struct {
/// These are instructions that correspond to the ZIR text format. See `ir.Inst` for
/// in-memory, analyzed instructions with types and values.
+/// We use a table to map these instruction to their respective semantically analyzed
+/// instructions because it is possible to have multiple analyses on the same ZIR
+/// happening at the same time.
pub const Inst = struct {
tag: Tag,
/// Byte offset into the source.
src: usize,
- /// Pre-allocated field for mapping ZIR text instructions to post-analysis instructions.
- analyzed_inst: ?*ir.Inst = null,
/// These names are used directly as the instruction names in the text format.
pub const Tag = enum {
@@ -793,7 +794,9 @@ pub const Inst = struct {
fn_type: *Inst,
body: Module.Body,
},
- kw_args: struct {},
+ kw_args: struct {
+ is_inline: bool = false,
+ },
};
pub const FnType = struct {
@@ -1847,44 +1850,325 @@ pub fn emit(allocator: *Allocator, old_module: *IrModule) !Module {
/// For debugging purposes, prints a function representation to stderr.
pub fn dumpFn(old_module: IrModule, module_fn: *IrModule.Fn) void {
const allocator = old_module.gpa;
- var ctx: EmitZIR = .{
+ var ctx: DumpTzir = .{
.allocator = allocator,
- .decls = .{},
.arena = std.heap.ArenaAllocator.init(allocator),
.old_module = &old_module,
- .next_auto_name = 0,
- .names = std.StringArrayHashMap(void).init(allocator),
- .primitive_table = std.AutoHashMap(Inst.Primitive.Builtin, *Decl).init(allocator),
- .indent = 0,
- .block_table = std.AutoHashMap(*ir.Inst.Block, *Inst.Block).init(allocator),
- .loop_table = std.AutoHashMap(*ir.Inst.Loop, *Inst.Loop).init(allocator),
- .metadata = std.AutoHashMap(*Inst, Module.MetaData).init(allocator),
- .body_metadata = std.AutoHashMap(*Module.Body, Module.BodyMetaData).init(allocator),
+ .module_fn = module_fn,
+ .indent = 2,
+ .inst_table = DumpTzir.InstTable.init(allocator),
+ .partial_inst_table = DumpTzir.InstTable.init(allocator),
+ .const_table = DumpTzir.InstTable.init(allocator),
};
- defer ctx.metadata.deinit();
- defer ctx.body_metadata.deinit();
- defer ctx.block_table.deinit();
- defer ctx.loop_table.deinit();
- defer ctx.decls.deinit(allocator);
- defer ctx.names.deinit();
- defer ctx.primitive_table.deinit();
+ defer ctx.inst_table.deinit();
+ defer ctx.partial_inst_table.deinit();
+ defer ctx.const_table.deinit();
defer ctx.arena.deinit();
- const fn_ty = module_fn.owner_decl.typed_value.most_recent.typed_value.ty;
- _ = ctx.emitFn(module_fn, 0, fn_ty) catch |err| {
- std.debug.print("unable to dump function: {s}\n", .{@errorName(err)});
- return;
- };
- var module = Module{
- .decls = ctx.decls.items,
- .arena = ctx.arena,
- .metadata = ctx.metadata,
- .body_metadata = ctx.body_metadata,
- };
-
- module.dump();
+ switch (module_fn.state) {
+ .queued => std.debug.print("(queued)", .{}),
+ .inline_only => std.debug.print("(inline_only)", .{}),
+ .in_progress => std.debug.print("(in_progress)", .{}),
+ .sema_failure => std.debug.print("(sema_failure)", .{}),
+ .dependency_failure => std.debug.print("(dependency_failure)", .{}),
+ .success => {
+ const writer = std.io.getStdErr().writer();
+ ctx.dump(module_fn.body, writer) catch @panic("failed to dump TZIR");
+ },
+ }
}
+const DumpTzir = struct {
+ allocator: *Allocator,
+ arena: std.heap.ArenaAllocator,
+ old_module: *const IrModule,
+ module_fn: *IrModule.Fn,
+ indent: usize,
+ inst_table: InstTable,
+ partial_inst_table: InstTable,
+ const_table: InstTable,
+ next_index: usize = 0,
+ next_partial_index: usize = 0,
+ next_const_index: usize = 0,
+
+ const InstTable = std.AutoArrayHashMap(*ir.Inst, usize);
+
+ fn dump(dtz: *DumpTzir, body: ir.Body, writer: std.fs.File.Writer) !void {
+ // First pass to pre-populate the table so that we can show even invalid references.
+ // Must iterate the same order we iterate the second time.
+ // We also look for constants and put them in the const_table.
+ for (body.instructions) |inst| {
+ try dtz.inst_table.put(inst, dtz.next_index);
+ dtz.next_index += 1;
+ switch (inst.tag) {
+ .alloc,
+ .retvoid,
+ .unreach,
+ .breakpoint,
+ .dbg_stmt,
+ => {},
+
+ .ref,
+ .ret,
+ .bitcast,
+ .not,
+ .isnonnull,
+ .isnull,
+ .iserr,
+ .ptrtoint,
+ .floatcast,
+ .intcast,
+ .load,
+ .unwrap_optional,
+ .wrap_optional,
+ => {
+ const un_op = inst.cast(ir.Inst.UnOp).?;
+ try dtz.findConst(un_op.operand);
+ },
+
+ .add,
+ .sub,
+ .cmp_lt,
+ .cmp_lte,
+ .cmp_eq,
+ .cmp_gte,
+ .cmp_gt,
+ .cmp_neq,
+ .store,
+ .booland,
+ .boolor,
+ .bitand,
+ .bitor,
+ .xor,
+ => {
+ const bin_op = inst.cast(ir.Inst.BinOp).?;
+ try dtz.findConst(bin_op.lhs);
+ try dtz.findConst(bin_op.rhs);
+ },
+
+ .arg => {},
+
+ .br => {
+ const br = inst.castTag(.br).?;
+ try dtz.findConst(&br.block.base);
+ try dtz.findConst(br.operand);
+ },
+
+ .brvoid => {
+ const brvoid = inst.castTag(.brvoid).?;
+ try dtz.findConst(&brvoid.block.base);
+ },
+
+ // TODO fill out this debug printing
+ .assembly,
+ .block,
+ .call,
+ .condbr,
+ .constant,
+ .loop,
+ .varptr,
+ .switchbr,
+ => {},
+ }
+ }
+
+ std.debug.print("Module.Function(name={s}):\n", .{dtz.module_fn.owner_decl.name});
+
+ for (dtz.const_table.items()) |entry| {
+ const constant = entry.key.castTag(.constant).?;
+ try writer.print(" @{d}: {} = {};\n", .{
+ entry.value, constant.base.ty, constant.val,
+ });
+ }
+
+ return dtz.dumpBody(body, writer);
+ }
+
+ fn dumpBody(dtz: *DumpTzir, body: ir.Body, writer: std.fs.File.Writer) !void {
+ for (body.instructions) |inst| {
+ const my_index = dtz.next_partial_index;
+ try dtz.partial_inst_table.put(inst, my_index);
+ dtz.next_partial_index += 1;
+
+ try writer.writeByteNTimes(' ', dtz.indent);
+ try writer.print("%{d}: {} = {s}(", .{
+ my_index, inst.ty, @tagName(inst.tag),
+ });
+ switch (inst.tag) {
+ .alloc,
+ .retvoid,
+ .unreach,
+ .breakpoint,
+ .dbg_stmt,
+ => try writer.writeAll(")\n"),
+
+ .ref,
+ .ret,
+ .bitcast,
+ .not,
+ .isnonnull,
+ .isnull,
+ .iserr,
+ .ptrtoint,
+ .floatcast,
+ .intcast,
+ .load,
+ .unwrap_optional,
+ .wrap_optional,
+ => {
+ const un_op = inst.cast(ir.Inst.UnOp).?;
+ if (dtz.partial_inst_table.get(un_op.operand)) |operand_index| {
+ try writer.print("%{d})\n", .{operand_index});
+ } else if (dtz.const_table.get(un_op.operand)) |operand_index| {
+ try writer.print("@{d})\n", .{operand_index});
+ } else if (dtz.inst_table.get(un_op.operand)) |operand_index| {
+ try writer.print("%{d}) // Instruction does not dominate all uses!\n", .{
+ operand_index,
+ });
+ } else {
+ try writer.writeAll("!BADREF!)\n");
+ }
+ },
+
+ .add,
+ .sub,
+ .cmp_lt,
+ .cmp_lte,
+ .cmp_eq,
+ .cmp_gte,
+ .cmp_gt,
+ .cmp_neq,
+ .store,
+ .booland,
+ .boolor,
+ .bitand,
+ .bitor,
+ .xor,
+ => {
+ var lhs_kinky: ?usize = null;
+ var rhs_kinky: ?usize = null;
+
+ const bin_op = inst.cast(ir.Inst.BinOp).?;
+ if (dtz.partial_inst_table.get(bin_op.lhs)) |operand_index| {
+ try writer.print("%{d}, ", .{operand_index});
+ } else if (dtz.const_table.get(bin_op.lhs)) |operand_index| {
+ try writer.print("@{d}, ", .{operand_index});
+ } else if (dtz.inst_table.get(bin_op.lhs)) |operand_index| {
+ lhs_kinky = operand_index;
+ try writer.print("%{d}, ", .{operand_index});
+ } else {
+ try writer.writeAll("!BADREF!, ");
+ }
+ if (dtz.partial_inst_table.get(bin_op.rhs)) |operand_index| {
+ try writer.print("%{d}", .{operand_index});
+ } else if (dtz.const_table.get(bin_op.rhs)) |operand_index| {
+ try writer.print("@{d}", .{operand_index});
+ } else if (dtz.inst_table.get(bin_op.rhs)) |operand_index| {
+ rhs_kinky = operand_index;
+ try writer.print("%{d}", .{operand_index});
+ } else {
+ try writer.writeAll("!BADREF!");
+ }
+ if (lhs_kinky != null or rhs_kinky != null) {
+ try writer.writeAll(") // Instruction does not dominate all uses!");
+ if (lhs_kinky) |lhs| {
+ try writer.print(" %{d}", .{lhs});
+ }
+ if (rhs_kinky) |rhs| {
+ try writer.print(" %{d}", .{rhs});
+ }
+ try writer.writeAll("\n");
+ } else {
+ try writer.writeAll(")\n");
+ }
+ },
+
+ .arg => {
+ const arg = inst.castTag(.arg).?;
+ try writer.print("{s})\n", .{arg.name});
+ },
+
+ .br => {
+ const br = inst.castTag(.br).?;
+
+ var lhs_kinky: ?usize = null;
+ var rhs_kinky: ?usize = null;
+
+ if (dtz.partial_inst_table.get(&br.block.base)) |operand_index| {
+ try writer.print("%{d}, ", .{operand_index});
+ } else if (dtz.const_table.get(&br.block.base)) |operand_index| {
+ try writer.print("@{d}, ", .{operand_index});
+ } else if (dtz.inst_table.get(&br.block.base)) |operand_index| {
+ lhs_kinky = operand_index;
+ try writer.print("%{d}, ", .{operand_index});
+ } else {
+ try writer.writeAll("!BADREF!, ");
+ }
+
+ if (dtz.partial_inst_table.get(br.operand)) |operand_index| {
+ try writer.print("%{d}", .{operand_index});
+ } else if (dtz.const_table.get(br.operand)) |operand_index| {
+ try writer.print("@{d}", .{operand_index});
+ } else if (dtz.inst_table.get(br.operand)) |operand_index| {
+ rhs_kinky = operand_index;
+ try writer.print("%{d}", .{operand_index});
+ } else {
+ try writer.writeAll("!BADREF!");
+ }
+
+ if (lhs_kinky != null or rhs_kinky != null) {
+ try writer.writeAll(") // Instruction does not dominate all uses!");
+ if (lhs_kinky) |lhs| {
+ try writer.print(" %{d}", .{lhs});
+ }
+ if (rhs_kinky) |rhs| {
+ try writer.print(" %{d}", .{rhs});
+ }
+ try writer.writeAll("\n");
+ } else {
+ try writer.writeAll(")\n");
+ }
+ },
+
+ .brvoid => {
+ const brvoid = inst.castTag(.brvoid).?;
+ if (dtz.partial_inst_table.get(&brvoid.block.base)) |operand_index| {
+ try writer.print("%{d})\n", .{operand_index});
+ } else if (dtz.const_table.get(&brvoid.block.base)) |operand_index| {
+ try writer.print("@{d})\n", .{operand_index});
+ } else if (dtz.inst_table.get(&brvoid.block.base)) |operand_index| {
+ try writer.print("%{d}) // Instruction does not dominate all uses!\n", .{
+ operand_index,
+ });
+ } else {
+ try writer.writeAll("!BADREF!)\n");
+ }
+ },
+
+ // TODO fill out this debug printing
+ .assembly,
+ .block,
+ .call,
+ .condbr,
+ .constant,
+ .loop,
+ .varptr,
+ .switchbr,
+ => {
+ try writer.writeAll("!TODO!)\n");
+ },
+ }
+ }
+ }
+
+ fn findConst(dtz: *DumpTzir, operand: *ir.Inst) !void {
+ if (operand.tag == .constant) {
+ try dtz.const_table.put(operand, dtz.next_const_index);
+ dtz.next_const_index += 1;
+ }
+ }
+};
+
const EmitZIR = struct {
allocator: *Allocator,
arena: std.heap.ArenaAllocator,
@@ -2072,11 +2356,12 @@ const EmitZIR = struct {
var instructions = std.ArrayList(*Inst).init(self.allocator);
defer instructions.deinit();
- switch (module_fn.analysis) {
+ switch (module_fn.state) {
.queued => unreachable,
.in_progress => unreachable,
- .success => |body| {
- try self.emitBody(body, &inst_table, &instructions);
+ .inline_only => unreachable,
+ .success => {
+ try self.emitBody(module_fn.body, &inst_table, &instructions);
},
.sema_failure => {
const err_msg = self.old_module.failed_decls.get(module_fn.owner_decl).?;
@@ -2154,7 +2439,9 @@ const EmitZIR = struct {
.fn_type = fn_type.inst,
.body = .{ .instructions = arena_instrs },
},
- .kw_args = .{},
+ .kw_args = .{
+ .is_inline = module_fn.state == .inline_only,
+ },
};
return self.emitUnnamedDecl(&fn_inst.base);
}
diff --git a/src/zir_sema.zig b/src/zir_sema.zig
index f9cd0e1a3d..a5627933e1 100644
--- a/src/zir_sema.zig
+++ b/src/zir_sema.zig
@@ -159,16 +159,11 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
}
}
-pub fn analyzeBody(mod: *Module, scope: *Scope, body: zir.Module.Body) !void {
- for (body.instructions) |src_inst, i| {
- const analyzed_inst = try analyzeInst(mod, scope, src_inst);
- src_inst.analyzed_inst = analyzed_inst;
+pub fn analyzeBody(mod: *Module, block: *Scope.Block, body: zir.Module.Body) !void {
+ for (body.instructions) |src_inst| {
+ const analyzed_inst = try analyzeInst(mod, &block.base, src_inst);
+ try block.inst_table.putNoClobber(src_inst, analyzed_inst);
if (analyzed_inst.ty.zigTypeTag() == .NoReturn) {
- for (body.instructions[i..]) |unreachable_inst| {
- if (unreachable_inst.castTag(.dbg_stmt)) |dbg_stmt| {
- return mod.fail(scope, dbg_stmt.base.src, "unreachable code", .{});
- }
- }
break;
}
}
@@ -180,8 +175,8 @@ pub fn analyzeBodyValueAsType(
zir_result_inst: *zir.Inst,
body: zir.Module.Body,
) !Type {
- try analyzeBody(mod, &block_scope.base, body);
- const result_inst = zir_result_inst.analyzed_inst.?;
+ try analyzeBody(mod, block_scope, body);
+ const result_inst = block_scope.inst_table.get(zir_result_inst).?;
const val = try mod.resolveConstValue(&block_scope.base, result_inst);
return val.toType(block_scope.base.arena());
}
@@ -264,30 +259,9 @@ fn resolveCompleteZirDecl(mod: *Module, scope: *Scope, src_decl: *zir.Decl) Inne
return decl;
}
-/// TODO Look into removing this function. The body is only needed for .zir files, not .zig files.
-pub fn resolveInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*Inst {
- if (old_inst.analyzed_inst) |inst| return inst;
-
- // If this assert trips, the instruction that was referenced did not get properly
- // analyzed before it was referenced.
- const zir_module = scope.namespace().cast(Scope.ZIRModule).?;
- const entry = if (old_inst.cast(zir.Inst.DeclVal)) |declval| blk: {
- const decl_name = declval.positionals.name;
- const entry = zir_module.contents.module.findDecl(decl_name) orelse
- return mod.fail(scope, old_inst.src, "decl '{s}' not found", .{decl_name});
- break :blk entry;
- } else blk: {
- // If this assert trips, the instruction that was referenced did not get
- // properly analyzed by a previous instruction analysis before it was
- // referenced by the current one.
- break :blk zir_module.contents.module.findInstDecl(old_inst).?;
- };
- const decl = try resolveCompleteZirDecl(mod, scope, entry.decl);
- const decl_ref = try mod.analyzeDeclRef(scope, old_inst.src, decl);
- // Note: it would be tempting here to store the result into old_inst.analyzed_inst field,
- // but this would prevent the analyzeDeclRef from happening, which is needed to properly
- // detect Decl dependencies and dependency failures on updates.
- return mod.analyzeDeref(scope, old_inst.src, decl_ref, old_inst.src);
+pub fn resolveInst(mod: *Module, scope: *Scope, zir_inst: *zir.Inst) InnerError!*Inst {
+ const block = scope.cast(Scope.Block).?;
+ return block.inst_table.get(zir_inst).?; // Instruction does not dominate all uses!
}
fn resolveConstString(mod: *Module, scope: *Scope, old_inst: *zir.Inst) ![]u8 {
@@ -575,7 +549,12 @@ fn analyzeInstCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) In
}
fn analyzeInstArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
- const b = try mod.requireRuntimeBlock(scope, inst.base.src);
+ const b = try mod.requireFunctionBlock(scope, inst.base.src);
+ if (b.inlining) |inlining| {
+ const param_index = inlining.param_index;
+ inlining.param_index += 1;
+ return inlining.casted_args[param_index];
+ }
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const param_index = b.instructions.items.len;
const param_count = fn_ty.fnParamLen();
@@ -608,15 +587,17 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError
var child_block: Scope.Block = .{
.parent = parent_block,
+ .inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
+ .inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer child_block.instructions.deinit(mod.gpa);
- try analyzeBody(mod, &child_block.base, inst.positionals.body);
+ try analyzeBody(mod, &child_block, inst.positionals.body);
// Loop repetition is implied so the last instruction may or may not be a noreturn instruction.
@@ -630,16 +611,18 @@ fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_c
var child_block: Scope.Block = .{
.parent = parent_block,
+ .inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
.label = null,
+ .inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime or is_comptime,
};
defer child_block.instructions.deinit(mod.gpa);
- try analyzeBody(mod, &child_block.base, inst.positionals.body);
+ try analyzeBody(mod, &child_block, inst.positionals.body);
try parent_block.instructions.appendSlice(mod.gpa, child_block.instructions.items);
@@ -668,6 +651,7 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
var child_block: Scope.Block = .{
.parent = parent_block,
+ .inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
@@ -675,38 +659,53 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
// TODO @as here is working around a stage1 miscompilation bug :(
.label = @as(?Scope.Block.Label, Scope.Block.Label{
.zir_block = inst,
- .results = .{},
- .block_inst = block_inst,
+ .merges = .{
+ .results = .{},
+ .block_inst = block_inst,
+ },
}),
+ .inlining = parent_block.inlining,
.is_comptime = is_comptime or parent_block.is_comptime,
};
- const label = &child_block.label.?;
+ const merges = &child_block.label.?.merges;
defer child_block.instructions.deinit(mod.gpa);
- defer label.results.deinit(mod.gpa);
+ defer merges.results.deinit(mod.gpa);
- try analyzeBody(mod, &child_block.base, inst.positionals.body);
+ try analyzeBody(mod, &child_block, inst.positionals.body);
+
+ return analyzeBlockBody(mod, scope, &child_block, merges);
+}
+
+fn analyzeBlockBody(
+ mod: *Module,
+ scope: *Scope,
+ child_block: *Scope.Block,
+ merges: *Scope.Block.Merges,
+) InnerError!*Inst {
+ const parent_block = scope.cast(Scope.Block).?;
// Blocks must terminate with noreturn instruction.
assert(child_block.instructions.items.len != 0);
assert(child_block.instructions.items[child_block.instructions.items.len - 1].ty.isNoReturn());
- if (label.results.items.len == 0) {
- // No need for a block instruction. We can put the new instructions directly into the parent block.
+ if (merges.results.items.len == 0) {
+ // No need for a block instruction. We can put the new instructions
+ // directly into the parent block.
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
return copied_instructions[copied_instructions.len - 1];
}
- if (label.results.items.len == 1) {
+ if (merges.results.items.len == 1) {
const last_inst_index = child_block.instructions.items.len - 1;
const last_inst = child_block.instructions.items[last_inst_index];
if (last_inst.breakBlock()) |br_block| {
- if (br_block == block_inst) {
+ if (br_block == merges.block_inst) {
// No need for a block instruction. We can put the new instructions directly into the parent block.
// Here we omit the break instruction.
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items[0..last_inst_index]);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
- return label.results.items[0];
+ return merges.results.items[0];
}
}
}
@@ -715,10 +714,10 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
// Need to set the type and emit the Block instruction. This allows machine code generation
// to emit a jump instruction to after the block when it encounters the break.
- try parent_block.instructions.append(mod.gpa, &block_inst.base);
- block_inst.base.ty = try mod.resolvePeerTypes(scope, label.results.items);
- block_inst.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
- return &block_inst.base;
+ try parent_block.instructions.append(mod.gpa, &merges.block_inst.base);
+ merges.block_inst.base.ty = try mod.resolvePeerTypes(scope, merges.results.items);
+ merges.block_inst.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
+ return &merges.block_inst.base;
}
fn analyzeInstBreakpoint(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
@@ -826,28 +825,108 @@ fn analyzeInstCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError
const ret_type = func.ty.fnReturnType();
- const b = try mod.requireRuntimeBlock(scope, inst.base.src);
+ const b = try mod.requireFunctionBlock(scope, inst.base.src);
+ const is_comptime_call = b.is_comptime or inst.kw_args.modifier == .compile_time;
+ const is_inline_call = is_comptime_call or inst.kw_args.modifier == .always_inline or blk: {
+ // This logic will get simplified by
+ // https://github.com/ziglang/zig/issues/6429
+ if (try mod.resolveDefinedValue(scope, func)) |func_val| {
+ const module_fn = switch (func_val.tag()) {
+ .function => func_val.castTag(.function).?.data,
+ else => break :blk false,
+ };
+ break :blk module_fn.state == .inline_only;
+ }
+ break :blk false;
+ };
+ if (is_inline_call) {
+ const func_val = try mod.resolveConstValue(scope, func);
+ const module_fn = switch (func_val.tag()) {
+ .function => func_val.castTag(.function).?.data,
+ .extern_fn => return mod.fail(scope, inst.base.src, "{s} call of extern function", .{
+ @as([]const u8, if (is_comptime_call) "comptime" else "inline"),
+ }),
+ else => unreachable,
+ };
+
+ // Analyze the ZIR. The same ZIR gets analyzed into a runtime function
+ // or an inlined call depending on what union tag the `label` field is
+ // set to in the `Scope.Block`.
+ // This block instruction will be used to capture the return value from the
+ // inlined function.
+ const block_inst = try scope.arena().create(Inst.Block);
+ block_inst.* = .{
+ .base = .{
+ .tag = Inst.Block.base_tag,
+ .ty = ret_type,
+ .src = inst.base.src,
+ },
+ .body = undefined,
+ };
+ // If this is the top of the inline/comptime call stack, we use this data.
+ // Otherwise we pass on the shared data from the parent scope.
+ var shared_inlining = Scope.Block.Inlining.Shared{
+ .branch_count = 0,
+ .branch_quota = 1000,
+ .caller = b.func,
+ };
+ // This one is shared among sub-blocks within the same callee, but not
+ // shared among the entire inline/comptime call stack.
+ var inlining = Scope.Block.Inlining{
+ .shared = if (b.inlining) |inlining| inlining.shared else &shared_inlining,
+ .param_index = 0,
+ .casted_args = casted_args,
+ .merges = .{
+ .results = .{},
+ .block_inst = block_inst,
+ },
+ };
+ var inst_table = Scope.Block.InstTable.init(mod.gpa);
+ defer inst_table.deinit();
+
+ var child_block: Scope.Block = .{
+ .parent = null,
+ .inst_table = &inst_table,
+ .func = module_fn,
+ // Note that we pass the caller's Decl, not the callee. This causes
+ // compile errors to be attached (correctly) to the caller's Decl.
+ .decl = scope.decl().?,
+ .instructions = .{},
+ .arena = scope.arena(),
+ .label = null,
+ .inlining = &inlining,
+ .is_comptime = is_comptime_call,
+ };
+ const merges = &child_block.inlining.?.merges;
+
+ defer child_block.instructions.deinit(mod.gpa);
+ defer merges.results.deinit(mod.gpa);
+
+ try mod.emitBackwardBranch(&child_block, inst.base.src);
+
+ // This will have return instructions analyzed as break instructions to
+ // the block_inst above.
+ try analyzeBody(mod, &child_block, module_fn.zir);
+
+ const result = try analyzeBlockBody(mod, scope, &child_block, merges);
+ if (result.castTag(.constant)) |constant| {
+ log.debug("inline call resulted in {}", .{constant.val});
+ } else {
+ log.debug("inline call resulted in {}", .{result});
+ }
+ return result;
+ }
+
return mod.addCall(b, inst.base.src, ret_type, func, casted_args);
}
fn analyzeInstFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!*Inst {
const fn_type = try resolveType(mod, scope, fn_inst.positionals.fn_type);
- const fn_zir = blk: {
- var fn_arena = std.heap.ArenaAllocator.init(mod.gpa);
- errdefer fn_arena.deinit();
-
- const fn_zir = try scope.arena().create(Module.Fn.ZIR);
- fn_zir.* = .{
- .body = .{
- .instructions = fn_inst.positionals.body.instructions,
- },
- .arena = fn_arena.state,
- };
- break :blk fn_zir;
- };
const new_func = try scope.arena().create(Module.Fn);
new_func.* = .{
- .analysis = .{ .queued = fn_zir },
+ .state = if (fn_inst.kw_args.is_inline) .inline_only else .queued,
+ .zir = fn_inst.positionals.body,
+ .body = undefined,
.owner_decl = scope.decl().?,
};
return mod.constInst(scope, fn_inst.base.src, .{
@@ -1312,17 +1391,17 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
const item = try mod.resolveConstValue(scope, casted);
if (target_val.eql(item)) {
- try analyzeBody(mod, scope, case.body);
+ try analyzeBody(mod, scope.cast(Scope.Block).?, case.body);
return mod.constNoReturn(scope, inst.base.src);
}
}
- try analyzeBody(mod, scope, inst.positionals.else_body);
+ try analyzeBody(mod, scope.cast(Scope.Block).?, inst.positionals.else_body);
return mod.constNoReturn(scope, inst.base.src);
}
if (inst.positionals.cases.len == 0) {
// no cases just analyze else_branch
- try analyzeBody(mod, scope, inst.positionals.else_body);
+ try analyzeBody(mod, scope.cast(Scope.Block).?, inst.positionals.else_body);
return mod.constNoReturn(scope, inst.base.src);
}
@@ -1331,10 +1410,12 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
var case_block: Scope.Block = .{
.parent = parent_block,
+ .inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
+ .inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer case_block.instructions.deinit(mod.gpa);
@@ -1347,7 +1428,7 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
const casted = try mod.coerce(scope, target.ty, resolved);
const item = try mod.resolveConstValue(scope, casted);
- try analyzeBody(mod, &case_block.base, case.body);
+ try analyzeBody(mod, &case_block, case.body);
cases[i] = .{
.item = item,
@@ -1356,7 +1437,7 @@ fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) In
}
case_block.instructions.items.len = 0;
- try analyzeBody(mod, &case_block.base, inst.positionals.else_body);
+ try analyzeBody(mod, &case_block, inst.positionals.else_body);
const else_body: ir.Body = .{
.instructions = try parent_block.arena.dupe(*Inst, case_block.instructions.items),
@@ -1509,7 +1590,7 @@ fn analyzeInstImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerErr
return mod.fail(scope, inst.base.src, "unable to find '{s}'", .{operand});
},
else => {
- // TODO user friendly error to string
+ // TODO: make sure this gets retried and not cached
return mod.fail(scope, inst.base.src, "unable to open '{s}': {s}", .{ operand, @errorName(err) });
},
};
@@ -1674,24 +1755,26 @@ fn analyzeInstComptimeOp(mod: *Module, scope: *Scope, res_type: Type, inst: *zir
}
const is_int = res_type.isInt() or res_type.zigTypeTag() == .ComptimeInt;
- const value = try switch (inst.base.tag) {
+ const value = switch (inst.base.tag) {
.add => blk: {
const val = if (is_int)
- Module.intAdd(scope.arena(), lhs_val, rhs_val)
+ try Module.intAdd(scope.arena(), lhs_val, rhs_val)
else
- mod.floatAdd(scope, res_type, inst.base.src, lhs_val, rhs_val);
+ try mod.floatAdd(scope, res_type, inst.base.src, lhs_val, rhs_val);
break :blk val;
},
.sub => blk: {
const val = if (is_int)
- Module.intSub(scope.arena(), lhs_val, rhs_val)
+ try Module.intSub(scope.arena(), lhs_val, rhs_val)
else
- mod.floatSub(scope, res_type, inst.base.src, lhs_val, rhs_val);
+ try mod.floatSub(scope, res_type, inst.base.src, lhs_val, rhs_val);
break :blk val;
},
else => return mod.fail(scope, inst.base.src, "TODO Implement arithmetic operand '{s}'", .{@tagName(inst.base.tag)}),
};
+ log.debug("{s}({}, {}) result: {}", .{ @tagName(inst.base.tag), lhs_val, rhs_val, value });
+
return mod.constInst(scope, inst.base.src, .{
.ty = res_type,
.val = value,
@@ -1860,35 +1943,39 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
const uncasted_cond = try resolveInst(mod, scope, inst.positionals.condition);
const cond = try mod.coerce(scope, Type.initTag(.bool), uncasted_cond);
+ const parent_block = scope.cast(Scope.Block).?;
+
if (try mod.resolveDefinedValue(scope, cond)) |cond_val| {
const body = if (cond_val.toBool()) &inst.positionals.then_body else &inst.positionals.else_body;
- try analyzeBody(mod, scope, body.*);
+ try analyzeBody(mod, parent_block, body.*);
return mod.constNoReturn(scope, inst.base.src);
}
- const parent_block = try mod.requireRuntimeBlock(scope, inst.base.src);
-
var true_block: Scope.Block = .{
.parent = parent_block,
+ .inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
+ .inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer true_block.instructions.deinit(mod.gpa);
- try analyzeBody(mod, &true_block.base, inst.positionals.then_body);
+ try analyzeBody(mod, &true_block, inst.positionals.then_body);
var false_block: Scope.Block = .{
.parent = parent_block,
+ .inst_table = parent_block.inst_table,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
+ .inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
};
defer false_block.instructions.deinit(mod.gpa);
- try analyzeBody(mod, &false_block.base, inst.positionals.else_body);
+ try analyzeBody(mod, &false_block, inst.positionals.else_body);
const then_body: ir.Body = .{ .instructions = try scope.arena().dupe(*Inst, true_block.instructions.items) };
const else_body: ir.Body = .{ .instructions = try scope.arena().dupe(*Inst, false_block.instructions.items) };
@@ -1912,12 +1999,26 @@ fn analyzeInstUnreachable(
fn analyzeInstRet(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const operand = try resolveInst(mod, scope, inst.positionals.operand);
- const b = try mod.requireRuntimeBlock(scope, inst.base.src);
+ const b = try mod.requireFunctionBlock(scope, inst.base.src);
+
+ if (b.inlining) |inlining| {
+ // We are inlining a function call; rewrite the `ret` as a `break`.
+ try inlining.merges.results.append(mod.gpa, operand);
+ return mod.addBr(b, inst.base.src, inlining.merges.block_inst, operand);
+ }
+
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, operand);
}
fn analyzeInstRetVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
- const b = try mod.requireRuntimeBlock(scope, inst.base.src);
+ const b = try mod.requireFunctionBlock(scope, inst.base.src);
+ if (b.inlining) |inlining| {
+ // We are inlining a function call; rewrite the `retvoid` as a `breakvoid`.
+ const void_inst = try mod.constVoid(scope, inst.base.src);
+ try inlining.merges.results.append(mod.gpa, void_inst);
+ return mod.addBr(b, inst.base.src, inlining.merges.block_inst, void_inst);
+ }
+
if (b.func) |func| {
// Need to emit a compile error if returning void is not allowed.
const void_inst = try mod.constVoid(scope, inst.base.src);
@@ -1949,9 +2050,9 @@ fn analyzeBreak(
while (opt_block) |block| {
if (block.label) |*label| {
if (label.zir_block == zir_block) {
- try label.results.append(mod.gpa, operand);
- const b = try mod.requireRuntimeBlock(scope, src);
- return mod.addBr(b, src, label.block_inst, operand);
+ try label.merges.results.append(mod.gpa, operand);
+ const b = try mod.requireFunctionBlock(scope, src);
+ return mod.addBr(b, src, label.merges.block_inst, operand);
}
}
opt_block = block.parent;