aboutsummaryrefslogtreecommitdiff
path: root/src/codegen
diff options
context:
space:
mode:
Diffstat (limited to 'src/codegen')
-rw-r--r--src/codegen/c.zig136
-rw-r--r--src/codegen/c/type.zig135
2 files changed, 145 insertions, 126 deletions
diff --git a/src/codegen/c.zig b/src/codegen/c.zig
index 1af14cb372..e1fc715f8b 100644
--- a/src/codegen/c.zig
+++ b/src/codegen/c.zig
@@ -82,15 +82,20 @@ pub const LazyFnMap = std.AutoArrayHashMapUnmanaged(LazyFnKey, LazyFnValue);
const LoopDepth = u16;
const Local = struct {
- ty: Type,
- alignment: u32,
+ cty_idx: CType.Index,
/// How many loops the last definition was nested in.
loop_depth: LoopDepth,
+ alignas: CType.AlignAs,
+
+ pub fn getType(local: Local) LocalType {
+ return .{ .cty_idx = local.cty_idx, .alignas = local.alignas };
+ }
};
const LocalIndex = u16;
+const LocalType = struct { cty_idx: CType.Index, alignas: CType.AlignAs };
const LocalsList = std.ArrayListUnmanaged(LocalIndex);
-const LocalsMap = std.ArrayHashMapUnmanaged(Type, LocalsList, Type.HashContext32, true);
+const LocalsMap = std.AutoArrayHashMapUnmanaged(LocalType, LocalsList);
const LocalsStack = std.ArrayListUnmanaged(LocalsMap);
const ValueRenderLocation = enum {
@@ -296,10 +301,6 @@ pub const Function = struct {
/// Needed for memory used by the keys of free_locals_stack entries.
arena: std.heap.ArenaAllocator,
- fn tyHashCtx(f: Function) Type.HashContext32 {
- return .{ .mod = f.object.dg.module };
- }
-
fn resolveInst(f: *Function, inst: Air.Inst.Ref) !CValue {
const gop = try f.value_map.getOrPut(inst);
if (gop.found_existing) return gop.value_ptr.*;
@@ -339,10 +340,11 @@ pub const Function = struct {
/// Skips the reuse logic.
fn allocLocalValue(f: *Function, ty: Type, alignment: u32) !CValue {
const gpa = f.object.dg.gpa;
+ const target = f.object.dg.module.getTarget();
try f.locals.append(gpa, .{
- .ty = ty,
- .alignment = alignment,
+ .cty_idx = try f.typeToIndex(ty, .complete),
.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1),
+ .alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target)),
});
return .{ .new_local = @intCast(LocalIndex, f.locals.items.len - 1) };
}
@@ -355,14 +357,15 @@ pub const Function = struct {
/// Only allocates the local; does not print anything.
fn allocAlignedLocal(f: *Function, ty: Type, _: CQualifiers, alignment: u32) !CValue {
- if (f.getFreeLocals().getPtrContext(ty, f.tyHashCtx())) |locals_list| {
- for (locals_list.items, 0..) |local_index, i| {
+ const target = f.object.dg.module.getTarget();
+ if (f.getFreeLocals().getPtr(.{
+ .cty_idx = try f.typeToIndex(ty, .complete),
+ .alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target)),
+ })) |locals_list| {
+ if (locals_list.popOrNull()) |local_index| {
const local = &f.locals.items[local_index];
- if (local.alignment >= alignment) {
- local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
- _ = locals_list.swapRemove(i);
- return .{ .new_local = local_index };
- }
+ local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
+ return .{ .new_local = local_index };
}
}
@@ -1696,21 +1699,33 @@ pub const DeclGen = struct {
alignment: u32,
kind: CType.Kind,
) error{ OutOfMemory, AnalysisFail }!void {
+ const target = dg.module.getTarget();
+ const alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target));
+ try dg.renderCTypeAndName(w, try dg.typeToIndex(ty, kind), name, qualifiers, alignas);
+ }
+
+ fn renderCTypeAndName(
+ dg: *DeclGen,
+ w: anytype,
+ cty_idx: CType.Index,
+ name: CValue,
+ qualifiers: CQualifiers,
+ alignas: CType.AlignAs,
+ ) error{ OutOfMemory, AnalysisFail }!void {
const store = &dg.ctypes.set;
const module = dg.module;
- if (alignment != 0) switch (std.math.order(alignment, ty.abiAlignment(dg.module.getTarget()))) {
- .lt => try w.print("zig_under_align({}) ", .{alignment}),
+ switch (std.math.order(alignas.@"align", alignas.abi)) {
+ .lt => try w.print("zig_under_align({}) ", .{alignas.getAlign()}),
.eq => {},
- .gt => try w.print("zig_align({}) ", .{alignment}),
- };
+ .gt => try w.print("zig_align({}) ", .{alignas.getAlign()}),
+ }
- const idx = try dg.typeToIndex(ty, kind);
const trailing =
- try renderTypePrefix(dg.decl_index, store.*, module, w, idx, .suffix, qualifiers);
+ try renderTypePrefix(dg.decl_index, store.*, module, w, cty_idx, .suffix, qualifiers);
try w.print("{}", .{trailing});
try dg.writeCValue(w, name);
- try renderTypeSuffix(dg.decl_index, store.*, module, w, idx, .suffix, .{});
+ try renderTypeSuffix(dg.decl_index, store.*, module, w, cty_idx, .suffix, .{});
}
fn declIsGlobal(dg: *DeclGen, tv: TypedValue) bool {
@@ -2589,36 +2604,27 @@ pub fn genFunc(f: *Function) !void {
if (value) continue; // static
const local = f.locals.items[local_index];
log.debug("inserting local {d} into free_locals", .{local_index});
- const gop = try free_locals.getOrPutContext(gpa, local.ty, f.tyHashCtx());
+ const gop = try free_locals.getOrPut(gpa, local.getType());
if (!gop.found_existing) gop.value_ptr.* = .{};
try gop.value_ptr.append(gpa, local_index);
}
const SortContext = struct {
- target: std.Target,
- keys: []const Type,
+ keys: []const LocalType,
- pub fn lessThan(ctx: @This(), a_index: usize, b_index: usize) bool {
- const a_ty = ctx.keys[a_index];
- const b_ty = ctx.keys[b_index];
- return b_ty.abiAlignment(ctx.target) < a_ty.abiAlignment(ctx.target);
+ pub fn lessThan(ctx: @This(), lhs_index: usize, rhs_index: usize) bool {
+ const lhs_ty = ctx.keys[lhs_index];
+ const rhs_ty = ctx.keys[rhs_index];
+ return lhs_ty.alignas.getAlign() > rhs_ty.alignas.getAlign();
}
};
- const target = o.dg.module.getTarget();
- free_locals.sort(SortContext{ .target = target, .keys = free_locals.keys() });
+ free_locals.sort(SortContext{ .keys = free_locals.keys() });
const w = o.code_header.writer();
for (free_locals.values()) |list| {
for (list.items) |local_index| {
const local = f.locals.items[local_index];
- try o.dg.renderTypeAndName(
- w,
- local.ty,
- .{ .local = local_index },
- .{},
- local.alignment,
- .complete,
- );
+ try o.dg.renderCTypeAndName(w, local.cty_idx, .{ .local = local_index }, .{}, local.alignas);
try w.writeAll(";\n ");
}
}
@@ -4486,7 +4492,7 @@ fn airLoop(f: *Function, inst: Air.Inst.Index) !CValue {
const new_free_locals = f.getFreeLocals();
var it = new_free_locals.iterator();
while (it.next()) |entry| {
- const gop = try old_free_locals.getOrPutContext(gpa, entry.key_ptr.*, f.tyHashCtx());
+ const gop = try old_free_locals.getOrPut(gpa, entry.key_ptr.*);
if (gop.found_existing) {
try gop.value_ptr.appendSlice(gpa, entry.value_ptr.items);
} else {
@@ -4522,6 +4528,10 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
// that we can notice and use them in the else branch. Any new locals must
// necessarily be free already after the then branch is complete.
const pre_locals_len = @intCast(LocalIndex, f.locals.items.len);
+ // Remember how many allocs there were before entering the then branch so
+ // that we can notice and make sure not to use them in the else branch.
+ // Any new allocs must be removed from the free list.
+ const pre_allocs_len = @intCast(LocalIndex, f.allocs.count());
const pre_clone_depth = f.free_locals_clone_depth;
f.free_locals_clone_depth = @intCast(LoopDepth, f.free_locals_stack.items.len);
@@ -4552,7 +4562,7 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
try die(f, inst, Air.indexToRef(operand));
}
- try noticeBranchFrees(f, pre_locals_len, inst);
+ try noticeBranchFrees(f, pre_locals_len, pre_allocs_len, inst);
if (needs_else) {
try genBody(f, else_body);
@@ -4627,6 +4637,10 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
// we can notice and use them in subsequent branches. Any new locals must
// necessarily be free already after the previous branch is complete.
const pre_locals_len = @intCast(LocalIndex, f.locals.items.len);
+ // Remember how many allocs there were before entering each branch so that
+ // we can notice and make sure not to use them in subsequent branches.
+ // Any new allocs must be removed from the free list.
+ const pre_allocs_len = @intCast(LocalIndex, f.allocs.count());
const pre_clone_depth = f.free_locals_clone_depth;
f.free_locals_clone_depth = @intCast(LoopDepth, f.free_locals_stack.items.len);
@@ -4647,7 +4661,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
try genBody(f, case_body);
}
- try noticeBranchFrees(f, pre_locals_len, inst);
+ try noticeBranchFrees(f, pre_locals_len, pre_allocs_len, inst);
} else {
for (liveness.deaths[case_i]) |operand| {
try die(f, inst, Air.indexToRef(operand));
@@ -7441,11 +7455,7 @@ fn freeLocal(f: *Function, inst: Air.Inst.Index, local_index: LocalIndex, ref_in
const local = &f.locals.items[local_index];
log.debug("%{d}: freeing t{d} (operand %{d})", .{ inst, local_index, ref_inst });
if (local.loop_depth < f.free_locals_clone_depth) return;
- const gop = try f.free_locals_stack.items[local.loop_depth].getOrPutContext(
- gpa,
- local.ty,
- f.tyHashCtx(),
- );
+ const gop = try f.free_locals_stack.items[local.loop_depth].getOrPut(gpa, local.getType());
if (!gop.found_existing) gop.value_ptr.* = .{};
if (std.debug.runtime_safety) {
// If this trips, it means a local is being inserted into the
@@ -7504,14 +7514,40 @@ fn deinitFreeLocalsMap(gpa: mem.Allocator, map: *LocalsMap) void {
map.deinit(gpa);
}
-fn noticeBranchFrees(f: *Function, pre_locals_len: LocalIndex, inst: Air.Inst.Index) !void {
+fn noticeBranchFrees(
+ f: *Function,
+ pre_locals_len: LocalIndex,
+ pre_allocs_len: LocalIndex,
+ inst: Air.Inst.Index,
+) !void {
+ const free_locals = f.getFreeLocals();
+
for (f.locals.items[pre_locals_len..], pre_locals_len..) |*local, local_i| {
const local_index = @intCast(LocalIndex, local_i);
- if (f.allocs.contains(local_index)) continue; // allocs are not freeable
+ if (f.allocs.contains(local_index)) {
+ if (std.debug.runtime_safety) {
+ // new allocs are no longer freeable, so make sure they aren't in the free list
+ if (free_locals.getPtr(local.getType())) |locals_list| {
+ assert(mem.indexOfScalar(LocalIndex, locals_list.items, local_index) == null);
+ }
+ }
+ continue;
+ }
// free more deeply nested locals from other branches at current depth
assert(local.loop_depth >= f.free_locals_stack.items.len - 1);
local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
try freeLocal(f, inst, local_index, 0);
}
+
+ for (f.allocs.keys()[pre_allocs_len..]) |local_i| {
+ const local_index = @intCast(LocalIndex, local_i);
+ const local = &f.locals.items[local_index];
+ // new allocs are no longer freeable, so remove them from the free list
+ if (free_locals.getPtr(local.getType())) |locals_list| {
+ if (mem.indexOfScalar(LocalIndex, locals_list.items, local_index)) |i| {
+ _ = locals_list.swapRemove(i);
+ }
+ }
+ }
}
diff --git a/src/codegen/c/type.zig b/src/codegen/c/type.zig
index bd4b6d9a8d..1f1a220cd2 100644
--- a/src/codegen/c/type.zig
+++ b/src/codegen/c/type.zig
@@ -251,38 +251,6 @@ pub const CType = extern union {
type: Index,
alignas: AlignAs,
};
- pub const AlignAs = struct {
- @"align": std.math.Log2Int(u32),
- abi: std.math.Log2Int(u32),
-
- pub fn init(alignment: u32, abi_alignment: u32) AlignAs {
- assert(std.math.isPowerOfTwo(alignment));
- assert(std.math.isPowerOfTwo(abi_alignment));
- return .{
- .@"align" = std.math.log2_int(u32, alignment),
- .abi = std.math.log2_int(u32, abi_alignment),
- };
- }
- pub fn abiAlign(ty: Type, target: Target) AlignAs {
- const abi_align = ty.abiAlignment(target);
- return init(abi_align, abi_align);
- }
- pub fn fieldAlign(struct_ty: Type, field_i: usize, target: Target) AlignAs {
- return init(
- struct_ty.structFieldAlign(field_i, target),
- struct_ty.structFieldType(field_i).abiAlignment(target),
- );
- }
- pub fn unionPayloadAlign(union_ty: Type, target: Target) AlignAs {
- const union_obj = union_ty.cast(Type.Payload.Union).?.data;
- const union_payload_align = union_obj.abiAlignment(target, false);
- return init(union_payload_align, union_payload_align);
- }
-
- pub fn getAlign(self: AlignAs) u32 {
- return @as(u32, 1) << self.@"align";
- }
- };
};
pub const Unnamed = struct {
@@ -311,13 +279,57 @@ pub const CType = extern union {
};
};
+ pub const AlignAs = struct {
+ @"align": std.math.Log2Int(u32),
+ abi: std.math.Log2Int(u32),
+
+ pub fn init(alignment: u32, abi_alignment: u32) AlignAs {
+ const actual_align = if (alignment != 0) alignment else abi_alignment;
+ assert(std.math.isPowerOfTwo(actual_align));
+ assert(std.math.isPowerOfTwo(abi_alignment));
+ return .{
+ .@"align" = std.math.log2_int(u32, actual_align),
+ .abi = std.math.log2_int(u32, abi_alignment),
+ };
+ }
+ pub fn abiAlign(ty: Type, target: Target) AlignAs {
+ const abi_align = ty.abiAlignment(target);
+ return init(abi_align, abi_align);
+ }
+ pub fn fieldAlign(struct_ty: Type, field_i: usize, target: Target) AlignAs {
+ return init(
+ struct_ty.structFieldAlign(field_i, target),
+ struct_ty.structFieldType(field_i).abiAlignment(target),
+ );
+ }
+ pub fn unionPayloadAlign(union_ty: Type, target: Target) AlignAs {
+ const union_obj = union_ty.cast(Type.Payload.Union).?.data;
+ const union_payload_align = union_obj.abiAlignment(target, false);
+ return init(union_payload_align, union_payload_align);
+ }
+
+ pub fn getAlign(self: AlignAs) u32 {
+ return @as(u32, 1) << self.@"align";
+ }
+ };
+
pub const Index = u32;
pub const Store = struct {
arena: std.heap.ArenaAllocator.State = .{},
set: Set = .{},
pub const Set = struct {
- pub const Map = std.ArrayHashMapUnmanaged(CType, void, HashContext32, true);
+ pub const Map = std.ArrayHashMapUnmanaged(CType, void, HashContext, true);
+ const HashContext = struct {
+ store: *const Set,
+
+ pub fn hash(self: @This(), cty: CType) Map.Hash {
+ return @truncate(Map.Hash, cty.hash(self.store.*));
+ }
+ pub fn eql(_: @This(), lhs: CType, rhs: CType, _: usize) bool {
+ return lhs.eql(rhs);
+ }
+ };
map: Map = .{},
@@ -328,7 +340,7 @@ pub const CType = extern union {
pub fn indexToHash(self: Set, index: Index) Map.Hash {
if (index < Tag.no_payload_count)
- return (HashContext32{ .store = &self }).hash(self.indexToCType(index));
+ return (HashContext{ .store = &self }).hash(self.indexToCType(index));
return self.map.entries.items(.hash)[index - Tag.no_payload_count];
}
@@ -905,7 +917,7 @@ pub const CType = extern union {
self.storage.anon.fields[0] = .{
.name = "array",
.type = array_idx,
- .alignas = Payload.Fields.AlignAs.abiAlign(ty, lookup.getTarget()),
+ .alignas = AlignAs.abiAlign(ty, lookup.getTarget()),
};
self.initAnon(kind, fwd_idx, 1);
} else self.init(switch (kind) {
@@ -1004,12 +1016,12 @@ pub const CType = extern union {
self.storage.anon.fields[0] = .{
.name = "ptr",
.type = ptr_idx,
- .alignas = Payload.Fields.AlignAs.abiAlign(ptr_ty, target),
+ .alignas = AlignAs.abiAlign(ptr_ty, target),
};
self.storage.anon.fields[1] = .{
.name = "len",
.type = Tag.uintptr_t.toIndex(),
- .alignas = Payload.Fields.AlignAs.abiAlign(Type.usize, target),
+ .alignas = AlignAs.abiAlign(Type.usize, target),
};
self.initAnon(kind, fwd_idx, 2);
} else self.init(switch (kind) {
@@ -1125,7 +1137,7 @@ pub const CType = extern union {
self.storage.anon.fields[field_count] = .{
.name = "payload",
.type = payload_idx.?,
- .alignas = Payload.Fields.AlignAs.unionPayloadAlign(ty, target),
+ .alignas = AlignAs.unionPayloadAlign(ty, target),
};
field_count += 1;
}
@@ -1133,7 +1145,7 @@ pub const CType = extern union {
self.storage.anon.fields[field_count] = .{
.name = "tag",
.type = tag_idx.?,
- .alignas = Payload.Fields.AlignAs.abiAlign(tag_ty.?, target),
+ .alignas = AlignAs.abiAlign(tag_ty.?, target),
};
field_count += 1;
}
@@ -1158,11 +1170,7 @@ pub const CType = extern union {
const field_ty = ty.structFieldType(field_i);
if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
- const field_align = Payload.Fields.AlignAs.fieldAlign(
- ty,
- field_i,
- target,
- );
+ const field_align = AlignAs.fieldAlign(ty, field_i, target);
if (field_align.@"align" < field_align.abi) {
is_packed = true;
if (!lookup.isMutable()) break;
@@ -1235,12 +1243,12 @@ pub const CType = extern union {
self.storage.anon.fields[0] = .{
.name = "payload",
.type = payload_idx,
- .alignas = Payload.Fields.AlignAs.abiAlign(payload_ty, target),
+ .alignas = AlignAs.abiAlign(payload_ty, target),
};
self.storage.anon.fields[1] = .{
.name = "is_null",
.type = Tag.bool.toIndex(),
- .alignas = Payload.Fields.AlignAs.abiAlign(Type.bool, target),
+ .alignas = AlignAs.abiAlign(Type.bool, target),
};
self.initAnon(kind, fwd_idx, 2);
} else self.init(switch (kind) {
@@ -1273,12 +1281,12 @@ pub const CType = extern union {
self.storage.anon.fields[0] = .{
.name = "payload",
.type = payload_idx,
- .alignas = Payload.Fields.AlignAs.abiAlign(payload_ty, target),
+ .alignas = AlignAs.abiAlign(payload_ty, target),
};
self.storage.anon.fields[1] = .{
.name = "error",
.type = error_idx,
- .alignas = Payload.Fields.AlignAs.abiAlign(error_ty, target),
+ .alignas = AlignAs.abiAlign(error_ty, target),
};
self.initAnon(kind, fwd_idx, 2);
} else self.init(switch (kind) {
@@ -1551,7 +1559,7 @@ pub const CType = extern union {
.complete, .parameter, .payload => .complete,
.global => .global,
}).?,
- .alignas = Payload.Fields.AlignAs.fieldAlign(ty, field_i, target),
+ .alignas = AlignAs.fieldAlign(ty, field_i, target),
};
}
@@ -1635,28 +1643,6 @@ pub const CType = extern union {
}
}
- pub const HashContext64 = struct {
- store: *const Store.Set,
-
- pub fn hash(self: @This(), cty: CType) u64 {
- return cty.hash(self.store.*);
- }
- pub fn eql(_: @This(), lhs: CType, rhs: CType) bool {
- return lhs.eql(rhs);
- }
- };
-
- pub const HashContext32 = struct {
- store: *const Store.Set,
-
- pub fn hash(self: @This(), cty: CType) u32 {
- return @truncate(u32, cty.hash(self.store.*));
- }
- pub fn eql(_: @This(), lhs: CType, rhs: CType, _: usize) bool {
- return lhs.eql(rhs);
- }
- };
-
pub const TypeAdapter64 = struct {
kind: Kind,
lookup: Convert.Lookup,
@@ -1719,7 +1705,7 @@ pub const CType = extern union {
else => unreachable,
},
mem.span(c_field.name),
- ) or Payload.Fields.AlignAs.fieldAlign(ty, field_i, target).@"align" !=
+ ) or AlignAs.fieldAlign(ty, field_i, target).@"align" !=
c_field.alignas.@"align") return false;
}
return true;
@@ -1840,10 +1826,7 @@ pub const CType = extern union {
.Union => ty.unionFields().keys()[field_i],
else => unreachable,
});
- autoHash(
- hasher,
- Payload.Fields.AlignAs.fieldAlign(ty, field_i, target).@"align",
- );
+ autoHash(hasher, AlignAs.fieldAlign(ty, field_i, target).@"align");
}
},