aboutsummaryrefslogtreecommitdiff
path: root/src/codegen
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2023-04-07 11:12:44 -0400
committerGitHub <noreply@github.com>2023-04-07 11:12:44 -0400
commit48f98494fda57c5eca7b4ac899046c1dd285e471 (patch)
treefd96476ec302a30c51f24ac91fc5ea5e2b6637d9 /src/codegen
parent086639630800fc52bd727163e85d174ec1ac1103 (diff)
parenta7f674d6c1ba3d72a324aa918929c5beb36a8306 (diff)
downloadzig-48f98494fda57c5eca7b4ac899046c1dd285e471.tar.gz
zig-48f98494fda57c5eca7b4ac899046c1dd285e471.zip
Merge pull request #15195 from mlugg/fix/liveness-loop-defer-deaths
Liveness: defer deaths of externally-scoped instructions in loop bodies
Diffstat (limited to 'src/codegen')
-rw-r--r--src/codegen/c.zig253
1 files changed, 122 insertions, 131 deletions
diff --git a/src/codegen/c.zig b/src/codegen/c.zig
index 6577089806..f659827f09 100644
--- a/src/codegen/c.zig
+++ b/src/codegen/c.zig
@@ -55,7 +55,7 @@ const BlockData = struct {
result: CValue,
};
-pub const CValueMap = std.AutoHashMap(Air.Inst.Ref, CValue);
+pub const CValueMap = std.AutoHashMap(Air.Inst.Index, CValue);
pub const LazyFnKey = union(enum) {
tag_name: Decl.Index,
@@ -77,9 +77,8 @@ pub const LazyFnMap = std.AutoArrayHashMapUnmanaged(LazyFnKey, LazyFnValue);
const LoopDepth = u16;
const Local = struct {
cty_idx: CType.Index,
- /// How many loops the last definition was nested in.
- loop_depth: LoopDepth,
alignas: CType.AlignAs,
+ is_in_clone: bool,
pub fn getType(local: Local) LocalType {
return .{ .cty_idx = local.cty_idx, .alignas = local.alignas };
@@ -90,7 +89,6 @@ const LocalIndex = u16;
const LocalType = struct { cty_idx: CType.Index, alignas: CType.AlignAs };
const LocalsList = std.AutoArrayHashMapUnmanaged(LocalIndex, void);
const LocalsMap = std.AutoArrayHashMapUnmanaged(LocalType, LocalsList);
-const LocalsStack = std.ArrayListUnmanaged(LocalsMap);
const ValueRenderLocation = enum {
FunctionArgument,
@@ -279,41 +277,43 @@ pub const Function = struct {
/// Which locals are available for reuse, based on Type.
/// Only locals in the last stack entry are available for reuse,
/// other entries will become available on loop exit.
- free_locals_stack: LocalsStack = .{},
- free_locals_clone_depth: LoopDepth = 0,
+ free_locals_map: LocalsMap = .{},
+ is_in_clone: bool = false,
/// Locals which will not be freed by Liveness. This is used after a
- /// Function body is lowered in order to make `free_locals_stack` have
+ /// Function body is lowered in order to make `free_locals_map` have
/// 100% of the locals within so that it can be used to render the block
/// of variable declarations at the top of a function, sorted descending
/// by type alignment.
/// The value is whether the alloc is static or not.
allocs: std.AutoArrayHashMapUnmanaged(LocalIndex, bool) = .{},
- /// Needed for memory used by the keys of free_locals_stack entries.
+ /// Needed for memory used by the keys of free_locals_map entries.
arena: std.heap.ArenaAllocator,
- fn resolveInst(f: *Function, inst: Air.Inst.Ref) !CValue {
- const gop = try f.value_map.getOrPut(inst);
- if (gop.found_existing) return gop.value_ptr.*;
-
- const val = f.air.value(inst).?;
- const ty = f.air.typeOf(inst);
-
- const result: CValue = if (lowersToArray(ty, f.object.dg.module.getTarget())) result: {
- const writer = f.object.code_header.writer();
- const alignment = 0;
- const decl_c_value = try f.allocLocalValue(ty, alignment);
- const gpa = f.object.dg.gpa;
- try f.allocs.put(gpa, decl_c_value.new_local, true);
- try writer.writeAll("static ");
- try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, Const, alignment, .complete);
- try writer.writeAll(" = ");
- try f.object.dg.renderValue(writer, ty, val, .StaticInitializer);
- try writer.writeAll(";\n ");
- break :result decl_c_value;
- } else .{ .constant = inst };
+ fn resolveInst(f: *Function, ref: Air.Inst.Ref) !CValue {
+ if (Air.refToIndex(ref)) |inst| {
+ const gop = try f.value_map.getOrPut(inst);
+ if (gop.found_existing) return gop.value_ptr.*;
- gop.value_ptr.* = result;
- return result;
+ const val = f.air.value(ref).?;
+ const ty = f.air.typeOf(ref);
+
+ const result: CValue = if (lowersToArray(ty, f.object.dg.module.getTarget())) result: {
+ const writer = f.object.code_header.writer();
+ const alignment = 0;
+ const decl_c_value = try f.allocLocalValue(ty, alignment);
+ const gpa = f.object.dg.gpa;
+ try f.allocs.put(gpa, decl_c_value.new_local, true);
+ try writer.writeAll("static ");
+ try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, Const, alignment, .complete);
+ try writer.writeAll(" = ");
+ try f.object.dg.renderValue(writer, ty, val, .StaticInitializer);
+ try writer.writeAll(";\n ");
+ break :result decl_c_value;
+ } else .{ .constant = ref };
+
+ gop.value_ptr.* = result;
+ return result;
+ } else return .{ .constant = ref };
}
fn wantSafety(f: *Function) bool {
@@ -323,18 +323,14 @@ pub const Function = struct {
};
}
- fn getFreeLocals(f: *Function) *LocalsMap {
- return &f.free_locals_stack.items[f.free_locals_stack.items.len - 1];
- }
-
/// Skips the reuse logic.
fn allocLocalValue(f: *Function, ty: Type, alignment: u32) !CValue {
const gpa = f.object.dg.gpa;
const target = f.object.dg.module.getTarget();
try f.locals.append(gpa, .{
.cty_idx = try f.typeToIndex(ty, .complete),
- .loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1),
.alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target)),
+ .is_in_clone = f.is_in_clone,
});
return .{ .new_local = @intCast(LocalIndex, f.locals.items.len - 1) };
}
@@ -348,13 +344,11 @@ pub const Function = struct {
/// Only allocates the local; does not print anything.
fn allocAlignedLocal(f: *Function, ty: Type, _: CQualifiers, alignment: u32) !CValue {
const target = f.object.dg.module.getTarget();
- if (f.getFreeLocals().getPtr(.{
+ if (f.free_locals_map.getPtr(.{
.cty_idx = try f.typeToIndex(ty, .complete),
.alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target)),
})) |locals_list| {
if (locals_list.popOrNull()) |local_entry| {
- const local = &f.locals.items[local_entry.key];
- local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
return .{ .new_local = local_entry.key };
}
}
@@ -485,10 +479,7 @@ pub const Function = struct {
const gpa = f.object.dg.gpa;
f.allocs.deinit(gpa);
f.locals.deinit(gpa);
- for (f.free_locals_stack.items) |*free_locals| {
- deinitFreeLocalsMap(gpa, free_locals);
- }
- f.free_locals_stack.deinit(gpa);
+ deinitFreeLocalsMap(gpa, &f.free_locals_map);
f.blocks.deinit(gpa);
f.value_map.deinit();
f.lazy_fns.deinit(gpa);
@@ -2592,8 +2583,7 @@ pub fn genFunc(f: *Function) !void {
o.code_header.appendSliceAssumeCapacity("{\n ");
const empty_header_len = o.code_header.items.len;
- f.free_locals_stack.clearRetainingCapacity();
- try f.free_locals_stack.append(gpa, .{});
+ f.free_locals_map.clearRetainingCapacity();
const main_body = f.air.getMainBody();
try genBody(f, main_body);
@@ -2605,7 +2595,8 @@ pub fn genFunc(f: *Function) !void {
// Liveness analysis, however, locals from alloc instructions will be
// missing. These are added now to complete the map. Then we can sort by
// alignment, descending.
- const free_locals = f.getFreeLocals();
+ const free_locals = &f.free_locals_map;
+ assert(f.value_map.count() == 0); // there must not be any unfreed locals
for (f.allocs.keys(), f.allocs.values()) |local_index, value| {
if (value) continue; // static
const local = f.locals.items[local_index];
@@ -3007,7 +2998,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
if (result_value == .new_local) {
log.debug("map %{d} to t{d}", .{ inst, result_value.new_local });
}
- try f.value_map.putNoClobber(Air.indexToRef(inst), switch (result_value) {
+ try f.value_map.putNoClobber(inst, switch (result_value) {
.none => continue,
.new_local => |i| .{ .local = i },
else => result_value,
@@ -3093,17 +3084,21 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const child_ty = ptr_ty.childType();
const ptr = try f.resolveInst(bin_op.lhs);
- if (!child_ty.hasRuntimeBitsIgnoreComptime()) {
- if (f.liveness.operandDies(inst, 1)) try die(f, inst, bin_op.rhs);
- return ptr;
- }
const index = try f.resolveInst(bin_op.rhs);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const writer = f.object.writer();
const local = try f.allocLocal(inst, f.air.typeOfIndex(inst));
try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = (");
+ try writer.writeAll(" = ");
+
+ if (!child_ty.hasRuntimeBitsIgnoreComptime()) {
+ try f.writeCValue(writer, ptr, .Initializer);
+ try writer.writeAll(";\n");
+ return local;
+ }
+
+ try writer.writeByte('(');
try f.renderType(writer, inst_ty);
try writer.writeAll(")&(");
if (ptr_ty.ptrSize() == .One) {
@@ -3229,12 +3224,11 @@ fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
}
fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
- const inst_ty = f.air.typeOfIndex(inst);
+ if (f.liveness.isUnused(inst)) return .none;
+ const inst_ty = f.air.typeOfIndex(inst);
const elem_type = inst_ty.elemType();
- if (!elem_type.isFnOrHasRuntimeBitsIgnoreComptime()) {
- return .{ .undef = inst_ty };
- }
+ if (!elem_type.isFnOrHasRuntimeBitsIgnoreComptime()) return .{ .undef = inst_ty };
const target = f.object.dg.module.getTarget();
const local = try f.allocAlignedLocal(
@@ -3249,12 +3243,11 @@ fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
}
fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
- const inst_ty = f.air.typeOfIndex(inst);
+ if (f.liveness.isUnused(inst)) return .none;
+ const inst_ty = f.air.typeOfIndex(inst);
const elem_ty = inst_ty.elemType();
- if (!elem_ty.isFnOrHasRuntimeBitsIgnoreComptime()) {
- return .{ .undef = inst_ty };
- }
+ if (!elem_ty.isFnOrHasRuntimeBitsIgnoreComptime()) return .{ .undef = inst_ty };
const target = f.object.dg.module.getTarget();
const local = try f.allocAlignedLocal(
@@ -3274,10 +3267,22 @@ fn airArg(f: *Function, inst: Air.Inst.Index) !CValue {
const i = f.next_arg_index;
f.next_arg_index += 1;
- return if (inst_cty != try f.typeToIndex(inst_ty, .complete))
+ const result: CValue = if (inst_cty != try f.typeToIndex(inst_ty, .complete))
.{ .arg_array = i }
else
.{ .arg = i };
+
+ if (f.liveness.isUnused(inst)) {
+ const writer = f.object.writer();
+ try writer.writeByte('(');
+ try f.renderType(writer, Type.void);
+ try writer.writeByte(')');
+ try f.writeCValue(writer, result, .Other);
+ try writer.writeAll(";\n");
+ return .none;
+ }
+
+ return result;
}
fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -4191,21 +4196,23 @@ fn airCall(
var lowered_ret_buf: LowerFnRetTyBuffer = undefined;
const lowered_ret_ty = lowerFnRetTy(ret_ty, &lowered_ret_buf, target);
- const result_local = if (modifier == .always_tail) r: {
- try writer.writeAll("zig_always_tail return ");
- break :r .none;
- } else if (!lowered_ret_ty.hasRuntimeBitsIgnoreComptime())
- .none
- else if (f.liveness.isUnused(inst)) r: {
- try writer.writeByte('(');
- try f.renderType(writer, Type.void);
- try writer.writeByte(')');
- break :r .none;
- } else r: {
- const local = try f.allocLocal(inst, try lowered_ret_ty.copy(f.arena.allocator()));
- try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = ");
- break :r local;
+ const result_local = result: {
+ if (modifier == .always_tail) {
+ try writer.writeAll("zig_always_tail return ");
+ break :result .none;
+ } else if (!lowered_ret_ty.hasRuntimeBitsIgnoreComptime()) {
+ break :result .none;
+ } else if (f.liveness.isUnused(inst)) {
+ try writer.writeByte('(');
+ try f.renderType(writer, Type.void);
+ try writer.writeByte(')');
+ break :result .none;
+ } else {
+ const local = try f.allocLocal(inst, try lowered_ret_ty.copy(f.arena.allocator()));
+ try f.writeCValue(writer, local, .Other);
+ try writer.writeAll(" = ");
+ break :result local;
+ }
};
callee: {
@@ -4250,9 +4257,9 @@ fn airCall(
}
try writer.writeAll(");\n");
- const result = r: {
+ const result = result: {
if (result_local == .none or !lowersToArray(ret_ty, target))
- break :r result_local;
+ break :result result_local;
const array_local = try f.allocLocal(inst, ret_ty);
try writer.writeAll("memcpy(");
@@ -4263,7 +4270,7 @@ fn airCall(
try f.renderType(writer, ret_ty);
try writer.writeAll("));\n");
try freeLocal(f, inst, result_local.new_local, 0);
- break :r array_local;
+ break :result array_local;
};
return result;
@@ -4480,7 +4487,7 @@ fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
{
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = ");
- try f.writeCValue(writer, operand, .Other);
+ try f.writeCValue(writer, operand, .Initializer);
try writer.writeAll(";\n");
return local;
}
@@ -4630,30 +4637,16 @@ fn airLoop(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const loop = f.air.extraData(Air.Block, ty_pl.payload);
const body = f.air.extra[loop.end..][0..loop.data.body_len];
+ const liveness_loop = f.liveness.getLoop(inst);
const writer = f.object.writer();
- const gpa = f.object.dg.gpa;
- try f.free_locals_stack.insert(gpa, f.free_locals_stack.items.len - 1, .{});
-
try writer.writeAll("for (;;) ");
try genBody(f, body);
try writer.writeByte('\n');
- var old_free_locals = f.free_locals_stack.pop();
- defer deinitFreeLocalsMap(gpa, &old_free_locals);
- const new_free_locals = f.getFreeLocals();
- var it = new_free_locals.iterator();
- while (it.next()) |entry| {
- const gop = try old_free_locals.getOrPut(gpa, entry.key_ptr.*);
- if (gop.found_existing) {
- try gop.value_ptr.ensureUnusedCapacity(gpa, entry.value_ptr.count());
- for (entry.value_ptr.keys()) |local_index| {
- gop.value_ptr.putAssumeCapacityNoClobber(local_index, {});
- }
- } else gop.value_ptr.* = entry.value_ptr.move();
+ for (liveness_loop.deaths) |operand| {
+ try die(f, inst, Air.indexToRef(operand));
}
- deinitFreeLocalsMap(gpa, new_free_locals);
- new_free_locals.* = old_free_locals.move();
return .none;
}
@@ -4673,7 +4666,7 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
const gpa = f.object.dg.gpa;
var cloned_map = try f.value_map.clone();
defer cloned_map.deinit();
- var cloned_frees = try cloneFreeLocalsMap(gpa, f.getFreeLocals());
+ var cloned_frees = try cloneFreeLocalsMap(gpa, &f.free_locals_map);
defer deinitFreeLocalsMap(gpa, &cloned_frees);
// Remember how many locals there were before entering the then branch so
@@ -4684,8 +4677,8 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
// that we can notice and make sure not to use them in the else branch.
// Any new allocs must be removed from the free list.
const pre_allocs_len = @intCast(LocalIndex, f.allocs.count());
- const pre_clone_depth = f.free_locals_clone_depth;
- f.free_locals_clone_depth = @intCast(LoopDepth, f.free_locals_stack.items.len);
+ const was_in_clone = f.is_in_clone;
+ f.is_in_clone = true;
for (liveness_condbr.then_deaths) |operand| {
try die(f, inst, Air.indexToRef(operand));
@@ -4706,10 +4699,10 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
f.value_map.deinit();
f.value_map = cloned_map.move();
- const free_locals = f.getFreeLocals();
+ const free_locals = &f.free_locals_map;
deinitFreeLocalsMap(gpa, free_locals);
free_locals.* = cloned_frees.move();
- f.free_locals_clone_depth = pre_clone_depth;
+ f.is_in_clone = was_in_clone;
for (liveness_condbr.else_deaths) |operand| {
try die(f, inst, Air.indexToRef(operand));
}
@@ -4781,7 +4774,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
if (case_i != last_case_i) {
const old_value_map = f.value_map;
f.value_map = try old_value_map.clone();
- var free_locals = f.getFreeLocals();
+ var free_locals = &f.free_locals_map;
const old_free_locals = free_locals.*;
free_locals.* = try cloneFreeLocalsMap(gpa, free_locals);
@@ -4793,14 +4786,13 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
// we can notice and make sure not to use them in subsequent branches.
// Any new allocs must be removed from the free list.
const pre_allocs_len = @intCast(LocalIndex, f.allocs.count());
- const pre_clone_depth = f.free_locals_clone_depth;
- f.free_locals_clone_depth = @intCast(LoopDepth, f.free_locals_stack.items.len);
+ const was_in_clone = f.is_in_clone;
+ f.is_in_clone = true;
{
defer {
- f.free_locals_clone_depth = pre_clone_depth;
+ f.is_in_clone = was_in_clone;
f.value_map.deinit();
- free_locals = f.getFreeLocals();
deinitFreeLocalsMap(gpa, free_locals);
f.value_map = old_value_map;
free_locals.* = old_free_locals;
@@ -4862,8 +4854,8 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
const inputs = @ptrCast([]const Air.Inst.Ref, f.air.extra[extra_i..][0..extra.data.inputs_len]);
extra_i += inputs.len;
- const result = r: {
- if (!is_volatile and f.liveness.isUnused(inst)) break :r .none;
+ const result = result: {
+ if (!is_volatile and f.liveness.isUnused(inst)) break :result .none;
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
@@ -5091,7 +5083,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
}
}
- break :r local;
+ break :result if (f.liveness.isUnused(inst)) .none else local;
};
var bt = iterateBigTomb(f, inst);
@@ -7063,21 +7055,22 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
fn airPrefetch(f: *Function, inst: Air.Inst.Index) !CValue {
const prefetch = f.air.instructions.items(.data)[inst].prefetch;
- switch (prefetch.cache) {
- .data => {},
- // The available prefetch intrinsics do not accept a cache argument; only
- // address, rw, and locality. So unless the cache is data, we do not lower
- // this instruction.
- .instruction => return .none,
- }
+
const ptr = try f.resolveInst(prefetch.ptr);
try reap(f, inst, &.{prefetch.ptr});
+
const writer = f.object.writer();
- try writer.writeAll("zig_prefetch(");
- try f.writeCValue(writer, ptr, .FunctionArgument);
- try writer.print(", {d}, {d});\n", .{
- @enumToInt(prefetch.rw), prefetch.locality,
- });
+ switch (prefetch.cache) {
+ .data => {
+ try writer.writeAll("zig_prefetch(");
+ try f.writeCValue(writer, ptr, .FunctionArgument);
+ try writer.print(", {d}, {d});\n", .{ @enumToInt(prefetch.rw), prefetch.locality });
+ },
+ // The available prefetch intrinsics do not accept a cache argument; only
+ // address, rw, and locality.
+ .instruction => {},
+ }
+
return .none;
}
@@ -7857,8 +7850,8 @@ fn reap(f: *Function, inst: Air.Inst.Index, operands: []const Air.Inst.Ref) !voi
fn die(f: *Function, inst: Air.Inst.Index, ref: Air.Inst.Ref) !void {
const ref_inst = Air.refToIndex(ref) orelse return;
+ const c_value = (f.value_map.fetchRemove(ref_inst) orelse return).value;
if (f.air.instructions.items(.tag)[ref_inst] == .constant) return;
- const c_value = (f.value_map.fetchRemove(ref) orelse return).value;
const local_index = switch (c_value) {
.local, .new_local => |l| l,
else => return,
@@ -7870,8 +7863,8 @@ fn freeLocal(f: *Function, inst: Air.Inst.Index, local_index: LocalIndex, ref_in
const gpa = f.object.dg.gpa;
const local = &f.locals.items[local_index];
log.debug("%{d}: freeing t{d} (operand %{d})", .{ inst, local_index, ref_inst });
- if (local.loop_depth < f.free_locals_clone_depth) return;
- const gop = try f.free_locals_stack.items[local.loop_depth].getOrPut(gpa, local.getType());
+ if (f.is_in_clone != local.is_in_clone) return;
+ const gop = try f.free_locals_map.getOrPut(gpa, local.getType());
if (!gop.found_existing) gop.value_ptr.* = .{};
if (std.debug.runtime_safety) {
// If this trips, an unfreeable allocation was attempted to be freed.
@@ -7935,23 +7928,21 @@ fn noticeBranchFrees(
pre_allocs_len: LocalIndex,
inst: Air.Inst.Index,
) !void {
- const free_locals = f.getFreeLocals();
-
for (f.locals.items[pre_locals_len..], pre_locals_len..) |*local, local_i| {
const local_index = @intCast(LocalIndex, local_i);
if (f.allocs.contains(local_index)) {
if (std.debug.runtime_safety) {
// new allocs are no longer freeable, so make sure they aren't in the free list
- if (free_locals.getPtr(local.getType())) |locals_list| {
+ if (f.free_locals_map.getPtr(local.getType())) |locals_list| {
assert(!locals_list.contains(local_index));
}
}
continue;
}
- // free more deeply nested locals from other branches at current depth
- assert(local.loop_depth >= f.free_locals_stack.items.len - 1);
- local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
+ // free cloned locals from other branches at current cloned-ness
+ std.debug.assert(local.is_in_clone or !f.is_in_clone);
+ local.is_in_clone = f.is_in_clone;
try freeLocal(f, inst, local_index, 0);
}
@@ -7959,6 +7950,6 @@ fn noticeBranchFrees(
const local_index = @intCast(LocalIndex, local_i);
const local = &f.locals.items[local_index];
// new allocs are no longer freeable, so remove them from the free list
- if (free_locals.getPtr(local.getType())) |locals_list| _ = locals_list.swapRemove(local_index);
+ if (f.free_locals_map.getPtr(local.getType())) |locals_list| _ = locals_list.swapRemove(local_index);
}
}