aboutsummaryrefslogtreecommitdiff
path: root/src/type.zig
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2023-09-24 14:37:36 -0700
committerAndrew Kelley <andrew@ziglang.org>2023-09-24 14:37:36 -0700
commitc08c0fc6eddf601785abfbc5e5a9ab5c89d7cfbf (patch)
tree65cabc6acd2db112c1fa9557c2f34dfd04659113 /src/type.zig
parenta7088fd9a3edb037f0f51bb402a3c557334634f3 (diff)
downloadzig-c08c0fc6eddf601785abfbc5e5a9ab5c89d7cfbf.tar.gz
zig-c08c0fc6eddf601785abfbc5e5a9ab5c89d7cfbf.zip
revert "compiler: packed structs cache bit offsets"
This is mostly a revert of a7088fd9a3edb037f0f51bb402a3c557334634f3. Measurement revealed the commit actually regressed performance.
Diffstat (limited to 'src/type.zig')
-rw-r--r--src/type.zig12
1 files changed, 10 insertions, 2 deletions
diff --git a/src/type.zig b/src/type.zig
index 83a89b527b..e227487444 100644
--- a/src/type.zig
+++ b/src/type.zig
@@ -3025,8 +3025,16 @@ pub const Type = struct {
const ip = &mod.intern_pool;
const struct_type = ip.indexToKey(ty.toIntern()).struct_type;
assert(struct_type.layout == .Packed);
- assert(struct_type.haveLayout(ip));
- return struct_type.offsets.get(ip)[field_index];
+ comptime assert(Type.packed_struct_layout_version == 2);
+
+ var running_bits: u32 = 0;
+ for (struct_type.field_types.get(ip), 0..) |field_ty, i| {
+ if (i == field_index) break;
+ if (!field_ty.toType().hasRuntimeBits(mod)) continue;
+ const field_bits: u32 = @intCast(field_ty.toType().bitSize(mod));
+ running_bits += field_bits;
+ }
+ return running_bits;
}
pub fn packedStructFieldByteOffset(ty: Type, field_index: usize, mod: *Module) u32 {