aboutsummaryrefslogtreecommitdiff
path: root/src/Value.zig
diff options
context:
space:
mode:
authormlugg <mlugg@mlugg.co.uk>2024-03-26 00:05:08 +0000
committermlugg <mlugg@mlugg.co.uk>2024-03-26 13:48:06 +0000
commit884d957b6c291961536c10401f60264da26cba30 (patch)
tree4138aabd75ae3a13bf0efbb4fe89ec1bb7fd82cb /src/Value.zig
parent5ec6e3036b2772e6efc08726b22c560dedd556bc (diff)
downloadzig-884d957b6c291961536c10401f60264da26cba30.tar.gz
zig-884d957b6c291961536c10401f60264da26cba30.zip
compiler: eliminate legacy Value representation
Good riddance! Most of these changes are trivial. There's a fix for a minor bug this exposed in `Value.readFromPackedMemory`, but aside from that, it's all just things like changing `intern` calls to `toIntern`.
Diffstat (limited to 'src/Value.zig')
-rw-r--r--src/Value.zig783
1 files changed, 177 insertions, 606 deletions
diff --git a/src/Value.zig b/src/Value.zig
index af24d68d7c..040961fa38 100644
--- a/src/Value.zig
+++ b/src/Value.zig
@@ -13,124 +13,8 @@ const Sema = @import("Sema.zig");
const InternPool = @import("InternPool.zig");
const Value = @This();
-/// We are migrating towards using this for every Value object. However, many
-/// values are still represented the legacy way. This is indicated by using
-/// InternPool.Index.none.
ip_index: InternPool.Index,
-/// This is the raw data, with no bookkeeping, no memory awareness,
-/// no de-duplication, and no type system awareness.
-/// This union takes advantage of the fact that the first page of memory
-/// is unmapped, giving us 4096 possible enum tags that have no payload.
-legacy: extern union {
- ptr_otherwise: *Payload,
-},
-
-// Keep in sync with tools/stage2_pretty_printers_common.py
-pub const Tag = enum(usize) {
- // The first section of this enum are tags that require no payload.
- // After this, the tag requires a payload.
-
- /// When the type is error union:
- /// * If the tag is `.@"error"`, the error union is an error.
- /// * If the tag is `.eu_payload`, the error union is a payload.
- /// * A nested error such as `anyerror!(anyerror!T)` in which the the outer error union
- /// is non-error, but the inner error union is an error, is represented as
- /// a tag of `.eu_payload`, with a sub-tag of `.@"error"`.
- eu_payload,
- /// When the type is optional:
- /// * If the tag is `.null_value`, the optional is null.
- /// * If the tag is `.opt_payload`, the optional is a payload.
- /// * A nested optional such as `??T` in which the the outer optional
- /// is non-null, but the inner optional is null, is represented as
- /// a tag of `.opt_payload`, with a sub-tag of `.null_value`.
- opt_payload,
- /// Pointer and length as sub `Value` objects.
- slice,
- /// A slice of u8 whose memory is managed externally.
- bytes,
- /// This value is repeated some number of times. The amount of times to repeat
- /// is stored externally.
- repeated,
- /// An instance of a struct, array, or vector.
- /// Each element/field stored as a `Value`.
- /// In the case of sentinel-terminated arrays, the sentinel value *is* stored,
- /// so the slice length will be one more than the type's array length.
- aggregate,
- /// An instance of a union.
- @"union",
-
- pub fn Type(comptime t: Tag) type {
- return switch (t) {
- .eu_payload,
- .opt_payload,
- .repeated,
- => Payload.SubValue,
- .slice => Payload.Slice,
- .bytes => Payload.Bytes,
- .aggregate => Payload.Aggregate,
- .@"union" => Payload.Union,
- };
- }
-
- pub fn create(comptime t: Tag, ally: Allocator, data: Data(t)) error{OutOfMemory}!Value {
- const ptr = try ally.create(t.Type());
- ptr.* = .{
- .base = .{ .tag = t },
- .data = data,
- };
- return Value{
- .ip_index = .none,
- .legacy = .{ .ptr_otherwise = &ptr.base },
- };
- }
-
- pub fn Data(comptime t: Tag) type {
- return std.meta.fieldInfo(t.Type(), .data).type;
- }
-};
-
-pub fn initPayload(payload: *Payload) Value {
- return Value{
- .ip_index = .none,
- .legacy = .{ .ptr_otherwise = payload },
- };
-}
-
-pub fn tag(self: Value) Tag {
- assert(self.ip_index == .none);
- return self.legacy.ptr_otherwise.tag;
-}
-
-/// Prefer `castTag` to this.
-pub fn cast(self: Value, comptime T: type) ?*T {
- if (self.ip_index != .none) {
- return null;
- }
- if (@hasField(T, "base_tag")) {
- return self.castTag(T.base_tag);
- }
- inline for (@typeInfo(Tag).Enum.fields) |field| {
- const t = @as(Tag, @enumFromInt(field.value));
- if (self.legacy.ptr_otherwise.tag == t) {
- if (T == t.Type()) {
- return @fieldParentPtr(T, "base", self.legacy.ptr_otherwise);
- }
- return null;
- }
- }
- unreachable;
-}
-
-pub fn castTag(self: Value, comptime t: Tag) ?*t.Type() {
- if (self.ip_index != .none) return null;
-
- if (self.legacy.ptr_otherwise.tag == t)
- return @fieldParentPtr(t.Type(), "base", self.legacy.ptr_otherwise);
-
- return null;
-}
-
pub fn format(val: Value, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
_ = val;
_ = fmt;
@@ -148,33 +32,7 @@ pub fn dump(
out_stream: anytype,
) !void {
comptime assert(fmt.len == 0);
- if (start_val.ip_index != .none) {
- try out_stream.print("(interned: {})", .{start_val.toIntern()});
- return;
- }
- var val = start_val;
- while (true) switch (val.tag()) {
- .aggregate => {
- return out_stream.writeAll("(aggregate)");
- },
- .@"union" => {
- return out_stream.writeAll("(union value)");
- },
- .bytes => return out_stream.print("\"{}\"", .{std.zig.fmtEscapes(val.castTag(.bytes).?.data)}),
- .repeated => {
- try out_stream.writeAll("(repeated) ");
- val = val.castTag(.repeated).?.data;
- },
- .eu_payload => {
- try out_stream.writeAll("(eu_payload) ");
- val = val.castTag(.repeated).?.data;
- },
- .opt_payload => {
- try out_stream.writeAll("(opt_payload) ");
- val = val.castTag(.repeated).?.data;
- },
- .slice => return out_stream.writeAll("(slice)"),
- };
+ try out_stream.print("(interned: {})", .{start_val.toIntern()});
}
pub fn fmtDebug(val: Value) std.fmt.Formatter(dump) {
@@ -252,162 +110,9 @@ fn arrayToIpString(val: Value, len_u64: u64, mod: *Module) !InternPool.NullTermi
return ip.getOrPutTrailingString(gpa, len);
}
-pub fn intern2(val: Value, ty: Type, mod: *Module) Allocator.Error!InternPool.Index {
- if (val.ip_index != .none) return val.ip_index;
- return intern(val, ty, mod);
-}
-
-pub fn intern(val: Value, ty: Type, mod: *Module) Allocator.Error!InternPool.Index {
- if (val.ip_index != .none) return (try mod.getCoerced(val, ty)).toIntern();
- const ip = &mod.intern_pool;
- switch (val.tag()) {
- .eu_payload => {
- const pl = val.castTag(.eu_payload).?.data;
- return mod.intern(.{ .error_union = .{
- .ty = ty.toIntern(),
- .val = .{ .payload = try pl.intern(ty.errorUnionPayload(mod), mod) },
- } });
- },
- .opt_payload => {
- const pl = val.castTag(.opt_payload).?.data;
- return mod.intern(.{ .opt = .{
- .ty = ty.toIntern(),
- .val = try pl.intern(ty.optionalChild(mod), mod),
- } });
- },
- .slice => {
- const pl = val.castTag(.slice).?.data;
- return mod.intern(.{ .slice = .{
- .ty = ty.toIntern(),
- .len = try pl.len.intern(Type.usize, mod),
- .ptr = try pl.ptr.intern(ty.slicePtrFieldType(mod), mod),
- } });
- },
- .bytes => {
- const pl = val.castTag(.bytes).?.data;
- return mod.intern(.{ .aggregate = .{
- .ty = ty.toIntern(),
- .storage = .{ .bytes = pl },
- } });
- },
- .repeated => {
- const pl = val.castTag(.repeated).?.data;
- return mod.intern(.{ .aggregate = .{
- .ty = ty.toIntern(),
- .storage = .{ .repeated_elem = try pl.intern(ty.childType(mod), mod) },
- } });
- },
- .aggregate => {
- const len = @as(usize, @intCast(ty.arrayLen(mod)));
- const old_elems = val.castTag(.aggregate).?.data[0..len];
- const new_elems = try mod.gpa.alloc(InternPool.Index, old_elems.len);
- defer mod.gpa.free(new_elems);
- const ty_key = ip.indexToKey(ty.toIntern());
- for (new_elems, old_elems, 0..) |*new_elem, old_elem, field_i|
- new_elem.* = try old_elem.intern(switch (ty_key) {
- .struct_type => ty.structFieldType(field_i, mod),
- .anon_struct_type => |info| Type.fromInterned(info.types.get(ip)[field_i]),
- inline .array_type, .vector_type => |info| Type.fromInterned(info.child),
- else => unreachable,
- }, mod);
- return mod.intern(.{ .aggregate = .{
- .ty = ty.toIntern(),
- .storage = .{ .elems = new_elems },
- } });
- },
- .@"union" => {
- const pl = val.castTag(.@"union").?.data;
- if (pl.tag) |pl_tag| {
- return mod.intern(.{ .un = .{
- .ty = ty.toIntern(),
- .tag = try pl_tag.intern(ty.unionTagTypeHypothetical(mod), mod),
- .val = try pl.val.intern(ty.unionFieldType(pl_tag, mod).?, mod),
- } });
- } else {
- return mod.intern(.{ .un = .{
- .ty = ty.toIntern(),
- .tag = .none,
- .val = try pl.val.intern(try ty.unionBackingType(mod), mod),
- } });
- }
- },
- }
-}
-
-pub fn unintern(val: Value, arena: Allocator, mod: *Module) Allocator.Error!Value {
- return if (val.ip_index == .none) val else switch (mod.intern_pool.indexToKey(val.toIntern())) {
- .int_type,
- .ptr_type,
- .array_type,
- .vector_type,
- .opt_type,
- .anyframe_type,
- .error_union_type,
- .simple_type,
- .struct_type,
- .anon_struct_type,
- .union_type,
- .opaque_type,
- .enum_type,
- .func_type,
- .error_set_type,
- .inferred_error_set_type,
-
- .undef,
- .simple_value,
- .variable,
- .extern_func,
- .func,
- .int,
- .err,
- .enum_literal,
- .enum_tag,
- .empty_enum_value,
- .float,
- .ptr,
- => val,
-
- .error_union => |error_union| switch (error_union.val) {
- .err_name => val,
- .payload => |payload| Tag.eu_payload.create(arena, Value.fromInterned(payload)),
- },
-
- .slice => |slice| Tag.slice.create(arena, .{
- .ptr = Value.fromInterned(slice.ptr),
- .len = Value.fromInterned(slice.len),
- }),
-
- .opt => |opt| switch (opt.val) {
- .none => val,
- else => |payload| Tag.opt_payload.create(arena, Value.fromInterned(payload)),
- },
-
- .aggregate => |aggregate| switch (aggregate.storage) {
- .bytes => |bytes| Tag.bytes.create(arena, try arena.dupe(u8, bytes)),
- .elems => |old_elems| {
- const new_elems = try arena.alloc(Value, old_elems.len);
- for (new_elems, old_elems) |*new_elem, old_elem| new_elem.* = Value.fromInterned(old_elem);
- return Tag.aggregate.create(arena, new_elems);
- },
- .repeated_elem => |elem| Tag.repeated.create(arena, Value.fromInterned(elem)),
- },
-
- .un => |un| Tag.@"union".create(arena, .{
- // toValue asserts that the value cannot be .none which is valid on unions.
- .tag = if (un.tag == .none) null else Value.fromInterned(un.tag),
- .val = Value.fromInterned(un.val),
- }),
-
- .memoized_call => unreachable,
- };
-}
-
pub fn fromInterned(i: InternPool.Index) Value {
assert(i != .none);
- return .{
- .ip_index = i,
- .legacy = undefined,
- };
+ return .{ .ip_index = i };
}
pub fn toIntern(val: Value) InternPool.Index {
@@ -492,24 +197,24 @@ pub fn isFuncBody(val: Value, mod: *Module) bool {
}
pub fn getFunction(val: Value, mod: *Module) ?InternPool.Key.Func {
- return if (val.ip_index != .none) switch (mod.intern_pool.indexToKey(val.toIntern())) {
+ return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.func => |x| x,
else => null,
- } else null;
+ };
}
pub fn getExternFunc(val: Value, mod: *Module) ?InternPool.Key.ExternFunc {
- return if (val.ip_index != .none) switch (mod.intern_pool.indexToKey(val.toIntern())) {
+ return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.extern_func => |extern_func| extern_func,
else => null,
- } else null;
+ };
}
pub fn getVariable(val: Value, mod: *Module) ?InternPool.Key.Variable {
- return if (val.ip_index != .none) switch (mod.intern_pool.indexToKey(val.toIntern())) {
+ return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.variable => |variable| variable,
else => null,
- } else null;
+ };
}
/// If the value fits in a u64, return it, otherwise null.
@@ -677,25 +382,14 @@ pub fn writeToMemory(val: Value, ty: Type, mod: *Module, buffer: []u8) error{
.auto => return error.IllDefinedMemoryLayout,
.@"extern" => for (0..struct_type.field_types.len) |i| {
const off: usize = @intCast(ty.structFieldOffset(i, mod));
- const field_val = switch (val.ip_index) {
- .none => switch (val.tag()) {
- .bytes => {
- buffer[off] = val.castTag(.bytes).?.data[i];
- continue;
- },
- .aggregate => val.castTag(.aggregate).?.data[i],
- .repeated => val.castTag(.repeated).?.data,
- else => unreachable,
+ const field_val = Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
+ .bytes => |bytes| {
+ buffer[off] = bytes[i];
+ continue;
},
- else => Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
- .bytes => |bytes| {
- buffer[off] = bytes[i];
- continue;
- },
- .elems => |elems| elems[i],
- .repeated_elem => |elem| elem,
- }),
- };
+ .elems => |elems| elems[i],
+ .repeated_elem => |elem| elem,
+ });
const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]);
try writeToMemory(field_val, field_ty, mod, buffer[off..]);
},
@@ -842,19 +536,11 @@ pub fn writeToPackedMemory(
assert(struct_type.layout == .@"packed");
var bits: u16 = 0;
for (0..struct_type.field_types.len) |i| {
- const field_val = switch (val.ip_index) {
- .none => switch (val.tag()) {
- .bytes => unreachable,
- .aggregate => val.castTag(.aggregate).?.data[i],
- .repeated => val.castTag(.repeated).?.data,
- else => unreachable,
- },
- else => Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
- .bytes => unreachable,
- .elems => |elems| elems[i],
- .repeated_elem => |elem| elem,
- }),
- };
+ const field_val = Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
+ .bytes => unreachable,
+ .elems => |elems| elems[i],
+ .repeated_elem => |elem| elem,
+ });
const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]);
const field_bits: u16 = @intCast(field_ty.bitSize(mod));
try field_val.writeToPackedMemory(field_ty, mod, buffer, bit_offset + bits);
@@ -972,7 +658,7 @@ pub fn readFromMemory(
const elems = try arena.alloc(InternPool.Index, @as(usize, @intCast(ty.arrayLen(mod))));
var offset: usize = 0;
for (elems) |*elem| {
- elem.* = try (try readFromMemory(elem_ty, mod, buffer[offset..], arena)).intern(elem_ty, mod);
+ elem.* = (try readFromMemory(elem_ty, mod, buffer[offset..], arena)).toIntern();
offset += @as(usize, @intCast(elem_size));
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
@@ -997,7 +683,7 @@ pub fn readFromMemory(
const field_ty = Type.fromInterned(field_types.get(ip)[i]);
const off: usize = @intCast(ty.structFieldOffset(i, mod));
const sz: usize = @intCast(field_ty.abiSize(mod));
- field_val.* = try (try readFromMemory(field_ty, mod, buffer[off..(off + sz)], arena)).intern(field_ty, mod);
+ field_val.* = (try readFromMemory(field_ty, mod, buffer[off..(off + sz)], arena)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -1027,7 +713,7 @@ pub fn readFromMemory(
.@"extern" => {
const union_size = ty.abiSize(mod);
const array_ty = try mod.arrayType(.{ .len = union_size, .child = .u8_type });
- const val = try (try readFromMemory(array_ty, mod, buffer, arena)).intern(array_ty, mod);
+ const val = (try readFromMemory(array_ty, mod, buffer, arena)).toIntern();
return Value.fromInterned((try mod.intern(.{ .un = .{
.ty = ty.toIntern(),
.tag = .none,
@@ -1094,7 +780,7 @@ pub fn readFromPackedMemory(
return Value.true;
}
},
- .Int, .Enum => |ty_tag| {
+ .Int => {
if (buffer.len == 0) return mod.intValue(ty, 0);
const int_info = ty.intInfo(mod);
const bits = int_info.bits;
@@ -1102,21 +788,10 @@ pub fn readFromPackedMemory(
// Fast path for integers <= u64
if (bits <= 64) {
- const int_ty = switch (ty_tag) {
- .Int => ty,
- .Enum => ty.intTagType(mod),
- else => unreachable,
- };
- return mod.getCoerced(switch (int_info.signedness) {
- .signed => return mod.intValue(
- int_ty,
- std.mem.readVarPackedInt(i64, buffer, bit_offset, bits, endian, .signed),
- ),
- .unsigned => return mod.intValue(
- int_ty,
- std.mem.readVarPackedInt(u64, buffer, bit_offset, bits, endian, .unsigned),
- ),
- }, ty);
+ return mod.intValue(
+ ty,
+ std.mem.readVarPackedInt(i64, buffer, bit_offset, bits, endian, int_info.signedness),
+ );
}
// Slow path, we have to construct a big-int
@@ -1129,6 +804,11 @@ pub fn readFromPackedMemory(
bigint.readPackedTwosComplement(buffer, bit_offset, bits, endian, int_info.signedness);
return mod.intValue_big(ty, bigint.toConst());
},
+ .Enum => {
+ const int_ty = ty.intTagType(mod);
+ const int_val = try Value.readFromPackedMemory(int_ty, mod, buffer, bit_offset, arena);
+ return mod.getCoerced(int_val, ty);
+ },
.Float => return Value.fromInterned((try mod.intern(.{ .float = .{
.ty = ty.toIntern(),
.storage = switch (ty.floatBits(target)) {
@@ -1149,7 +829,7 @@ pub fn readFromPackedMemory(
for (elems, 0..) |_, i| {
// On big-endian systems, LLVM reverses the element order of vectors by default
const tgt_elem_i = if (endian == .big) elems.len - i - 1 else i;
- elems[tgt_elem_i] = try (try readFromPackedMemory(elem_ty, mod, buffer, bit_offset + bits, arena)).intern(elem_ty, mod);
+ elems[tgt_elem_i] = (try readFromPackedMemory(elem_ty, mod, buffer, bit_offset + bits, arena)).toIntern();
bits += elem_bit_size;
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
@@ -1166,7 +846,7 @@ pub fn readFromPackedMemory(
for (field_vals, 0..) |*field_val, i| {
const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]);
const field_bits: u16 = @intCast(field_ty.bitSize(mod));
- field_val.* = try (try readFromPackedMemory(field_ty, mod, buffer, bit_offset + bits, arena)).intern(field_ty, mod);
+ field_val.* = (try readFromPackedMemory(field_ty, mod, buffer, bit_offset + bits, arena)).toIntern();
bits += field_bits;
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
@@ -1611,51 +1291,42 @@ pub fn maybeElemValue(val: Value, mod: *Module, index: usize) Allocator.Error!?V
}
pub fn maybeElemValueFull(val: Value, sema: ?*Sema, mod: *Module, index: usize) Allocator.Error!?Value {
- return switch (val.ip_index) {
- .none => switch (val.tag()) {
- .bytes => try mod.intValue(Type.u8, val.castTag(.bytes).?.data[index]),
- .repeated => val.castTag(.repeated).?.data,
- .aggregate => val.castTag(.aggregate).?.data[index],
- .slice => val.castTag(.slice).?.data.ptr.maybeElemValueFull(sema, mod, index),
- else => null,
- },
- else => switch (mod.intern_pool.indexToKey(val.toIntern())) {
- .undef => |ty| Value.fromInterned((try mod.intern(.{
- .undef = Type.fromInterned(ty).elemType2(mod).toIntern(),
- }))),
- .slice => |slice| return Value.fromInterned(slice.ptr).maybeElemValueFull(sema, mod, index),
- .ptr => |ptr| switch (ptr.addr) {
- .decl => |decl| mod.declPtr(decl).val.maybeElemValueFull(sema, mod, index),
- .anon_decl => |anon_decl| Value.fromInterned(anon_decl.val).maybeElemValueFull(sema, mod, index),
- .comptime_alloc => |idx| if (sema) |s| Value.fromInterned(
- try s.getComptimeAlloc(idx).val.intern(mod, s.arena),
- ).maybeElemValueFull(sema, mod, index) else null,
- .int, .eu_payload => null,
- .opt_payload => |base| Value.fromInterned(base).maybeElemValueFull(sema, mod, index),
- .comptime_field => |field_val| Value.fromInterned(field_val).maybeElemValueFull(sema, mod, index),
- .elem => |elem| Value.fromInterned(elem.base).maybeElemValueFull(sema, mod, index + @as(usize, @intCast(elem.index))),
- .field => |field| if (Value.fromInterned(field.base).pointerDecl(mod)) |decl_index| {
- const base_decl = mod.declPtr(decl_index);
- const field_val = try base_decl.val.fieldValue(mod, @as(usize, @intCast(field.index)));
- return field_val.maybeElemValueFull(sema, mod, index);
- } else null,
- },
- .opt => |opt| Value.fromInterned(opt.val).maybeElemValueFull(sema, mod, index),
- .aggregate => |aggregate| {
- const len = mod.intern_pool.aggregateTypeLen(aggregate.ty);
- if (index < len) return Value.fromInterned(switch (aggregate.storage) {
- .bytes => |bytes| try mod.intern(.{ .int = .{
- .ty = .u8_type,
- .storage = .{ .u64 = bytes[index] },
- } }),
- .elems => |elems| elems[index],
- .repeated_elem => |elem| elem,
- });
- assert(index == len);
- return Value.fromInterned(mod.intern_pool.indexToKey(aggregate.ty).array_type.sentinel);
- },
- else => null,
+ return switch (mod.intern_pool.indexToKey(val.toIntern())) {
+ .undef => |ty| Value.fromInterned((try mod.intern(.{
+ .undef = Type.fromInterned(ty).elemType2(mod).toIntern(),
+ }))),
+ .slice => |slice| return Value.fromInterned(slice.ptr).maybeElemValueFull(sema, mod, index),
+ .ptr => |ptr| switch (ptr.addr) {
+ .decl => |decl| mod.declPtr(decl).val.maybeElemValueFull(sema, mod, index),
+ .anon_decl => |anon_decl| Value.fromInterned(anon_decl.val).maybeElemValueFull(sema, mod, index),
+ .comptime_alloc => |idx| if (sema) |s| Value.fromInterned(
+ try s.getComptimeAlloc(idx).val.intern(mod, s.arena),
+ ).maybeElemValueFull(sema, mod, index) else null,
+ .int, .eu_payload => null,
+ .opt_payload => |base| Value.fromInterned(base).maybeElemValueFull(sema, mod, index),
+ .comptime_field => |field_val| Value.fromInterned(field_val).maybeElemValueFull(sema, mod, index),
+ .elem => |elem| Value.fromInterned(elem.base).maybeElemValueFull(sema, mod, index + @as(usize, @intCast(elem.index))),
+ .field => |field| if (Value.fromInterned(field.base).pointerDecl(mod)) |decl_index| {
+ const base_decl = mod.declPtr(decl_index);
+ const field_val = try base_decl.val.fieldValue(mod, @as(usize, @intCast(field.index)));
+ return field_val.maybeElemValueFull(sema, mod, index);
+ } else null,
+ },
+ .opt => |opt| Value.fromInterned(opt.val).maybeElemValueFull(sema, mod, index),
+ .aggregate => |aggregate| {
+ const len = mod.intern_pool.aggregateTypeLen(aggregate.ty);
+ if (index < len) return Value.fromInterned(switch (aggregate.storage) {
+ .bytes => |bytes| try mod.intern(.{ .int = .{
+ .ty = .u8_type,
+ .storage = .{ .u64 = bytes[index] },
+ } }),
+ .elems => |elems| elems[index],
+ .repeated_elem => |elem| elem,
+ });
+ assert(index == len);
+ return Value.fromInterned(mod.intern_pool.indexToKey(aggregate.ty).array_type.sentinel);
},
+ else => null,
};
}
@@ -1688,85 +1359,61 @@ pub fn sliceArray(
) error{OutOfMemory}!Value {
// TODO: write something like getCoercedInts to avoid needing to dupe
const mod = sema.mod;
- return switch (val.ip_index) {
- .none => switch (val.tag()) {
- .slice => val.castTag(.slice).?.data.ptr.sliceArray(sema, start, end),
- .bytes => Tag.bytes.create(sema.arena, val.castTag(.bytes).?.data[start..end]),
- .repeated => val,
- .aggregate => Tag.aggregate.create(sema.arena, val.castTag(.aggregate).?.data[start..end]),
+ return switch (mod.intern_pool.indexToKey(val.toIntern())) {
+ .ptr => |ptr| switch (ptr.addr) {
+ .decl => |decl| try mod.declPtr(decl).val.sliceArray(sema, start, end),
+ .comptime_alloc => |idx| try Value.fromInterned(
+ try sema.getComptimeAlloc(idx).val.intern(mod, sema.arena),
+ ).sliceArray(sema, start, end),
+ .comptime_field => |comptime_field| Value.fromInterned(comptime_field)
+ .sliceArray(sema, start, end),
+ .elem => |elem| Value.fromInterned(elem.base)
+ .sliceArray(sema, start + @as(usize, @intCast(elem.index)), end + @as(usize, @intCast(elem.index))),
else => unreachable,
},
- else => switch (mod.intern_pool.indexToKey(val.toIntern())) {
- .ptr => |ptr| switch (ptr.addr) {
- .decl => |decl| try mod.declPtr(decl).val.sliceArray(sema, start, end),
- .comptime_alloc => |idx| try Value.fromInterned(
- try sema.getComptimeAlloc(idx).val.intern(mod, sema.arena),
- ).sliceArray(sema, start, end),
- .comptime_field => |comptime_field| Value.fromInterned(comptime_field)
- .sliceArray(sema, start, end),
- .elem => |elem| Value.fromInterned(elem.base)
- .sliceArray(sema, start + @as(usize, @intCast(elem.index)), end + @as(usize, @intCast(elem.index))),
+ .aggregate => |aggregate| Value.fromInterned((try mod.intern(.{ .aggregate = .{
+ .ty = switch (mod.intern_pool.indexToKey(mod.intern_pool.typeOf(val.toIntern()))) {
+ .array_type => |array_type| try mod.arrayType(.{
+ .len = @as(u32, @intCast(end - start)),
+ .child = array_type.child,
+ .sentinel = if (end == array_type.len) array_type.sentinel else .none,
+ }),
+ .vector_type => |vector_type| try mod.vectorType(.{
+ .len = @as(u32, @intCast(end - start)),
+ .child = vector_type.child,
+ }),
else => unreachable,
+ }.toIntern(),
+ .storage = switch (aggregate.storage) {
+ .bytes => .{ .bytes = try sema.arena.dupe(u8, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.bytes[start..end]) },
+ .elems => .{ .elems = try sema.arena.dupe(InternPool.Index, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.elems[start..end]) },
+ .repeated_elem => |elem| .{ .repeated_elem = elem },
},
- .aggregate => |aggregate| Value.fromInterned((try mod.intern(.{ .aggregate = .{
- .ty = switch (mod.intern_pool.indexToKey(mod.intern_pool.typeOf(val.toIntern()))) {
- .array_type => |array_type| try mod.arrayType(.{
- .len = @as(u32, @intCast(end - start)),
- .child = array_type.child,
- .sentinel = if (end == array_type.len) array_type.sentinel else .none,
- }),
- .vector_type => |vector_type| try mod.vectorType(.{
- .len = @as(u32, @intCast(end - start)),
- .child = vector_type.child,
- }),
- else => unreachable,
- }.toIntern(),
- .storage = switch (aggregate.storage) {
- .bytes => .{ .bytes = try sema.arena.dupe(u8, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.bytes[start..end]) },
- .elems => .{ .elems = try sema.arena.dupe(InternPool.Index, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.elems[start..end]) },
- .repeated_elem => |elem| .{ .repeated_elem = elem },
- },
- } }))),
- else => unreachable,
- },
+ } }))),
+ else => unreachable,
};
}
pub fn fieldValue(val: Value, mod: *Module, index: usize) !Value {
- return switch (val.ip_index) {
- .none => switch (val.tag()) {
- .aggregate => {
- const field_values = val.castTag(.aggregate).?.data;
- return field_values[index];
- },
- .@"union" => {
- const payload = val.castTag(.@"union").?.data;
- // TODO assert the tag is correct
- return payload.val;
- },
- else => unreachable,
- },
- else => switch (mod.intern_pool.indexToKey(val.toIntern())) {
- .undef => |ty| Value.fromInterned((try mod.intern(.{
- .undef = Type.fromInterned(ty).structFieldType(index, mod).toIntern(),
- }))),
- .aggregate => |aggregate| Value.fromInterned(switch (aggregate.storage) {
- .bytes => |bytes| try mod.intern(.{ .int = .{
- .ty = .u8_type,
- .storage = .{ .u64 = bytes[index] },
- } }),
- .elems => |elems| elems[index],
- .repeated_elem => |elem| elem,
- }),
- // TODO assert the tag is correct
- .un => |un| Value.fromInterned(un.val),
- else => unreachable,
- },
+ return switch (mod.intern_pool.indexToKey(val.toIntern())) {
+ .undef => |ty| Value.fromInterned((try mod.intern(.{
+ .undef = Type.fromInterned(ty).structFieldType(index, mod).toIntern(),
+ }))),
+ .aggregate => |aggregate| Value.fromInterned(switch (aggregate.storage) {
+ .bytes => |bytes| try mod.intern(.{ .int = .{
+ .ty = .u8_type,
+ .storage = .{ .u64 = bytes[index] },
+ } }),
+ .elems => |elems| elems[index],
+ .repeated_elem => |elem| elem,
+ }),
+ // TODO assert the tag is correct
+ .un => |un| Value.fromInterned(un.val),
+ else => unreachable,
};
}
pub fn unionTag(val: Value, mod: *Module) ?Value {
- if (val.ip_index == .none) return val.castTag(.@"union").?.data.tag;
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.undef, .enum_tag => val,
.un => |un| if (un.tag != .none) Value.fromInterned(un.tag) else return null,
@@ -1775,7 +1422,6 @@ pub fn unionTag(val: Value, mod: *Module) ?Value {
}
pub fn unionValue(val: Value, mod: *Module) Value {
- if (val.ip_index == .none) return val.castTag(.@"union").?.data.val;
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.un => |un| Value.fromInterned(un.val),
else => unreachable,
@@ -1821,7 +1467,7 @@ pub fn elemPtr(
}
pub fn isUndef(val: Value, mod: *Module) bool {
- return val.ip_index != .none and mod.intern_pool.isUndef(val.toIntern());
+ return mod.intern_pool.isUndef(val.toIntern());
}
/// TODO: check for cases such as array that is not marked undef but all the element
@@ -1915,7 +1561,7 @@ pub fn floatFromIntAdvanced(val: Value, arena: Allocator, int_ty: Type, float_ty
const scalar_ty = float_ty.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try floatFromIntScalar(elem_val, scalar_ty, mod, opt_sema)).intern(scalar_ty, mod);
+ scalar.* = (try floatFromIntScalar(elem_val, scalar_ty, mod, opt_sema)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_ty.toIntern(),
@@ -1993,7 +1639,7 @@ pub fn intAddSat(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try intAddSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod);
+ scalar.* = (try intAddSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2043,7 +1689,7 @@ pub fn intSubSat(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try intSubSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod);
+ scalar.* = (try intSubSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2095,8 +1741,8 @@ pub fn intMulWithOverflow(
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
const of_math_result = try intMulWithOverflowScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod);
- of.* = try of_math_result.overflow_bit.intern(Type.u1, mod);
- scalar.* = try of_math_result.wrapped_result.intern(scalar_ty, mod);
+ of.* = of_math_result.overflow_bit.toIntern();
+ scalar.* = of_math_result.wrapped_result.toIntern();
}
return OverflowArithmeticResult{
.overflow_bit = Value.fromInterned((try mod.intern(.{ .aggregate = .{
@@ -2161,7 +1807,7 @@ pub fn numberMulWrap(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try numberMulWrapScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod);
+ scalar.* = (try numberMulWrapScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2207,7 +1853,7 @@ pub fn intMulSat(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try intMulSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod);
+ scalar.* = (try intMulSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2283,7 +1929,7 @@ pub fn bitwiseNot(val: Value, ty: Type, arena: Allocator, mod: *Module) !Value {
const scalar_ty = ty.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try bitwiseNotScalar(elem_val, scalar_ty, arena, mod)).intern(scalar_ty, mod);
+ scalar.* = (try bitwiseNotScalar(elem_val, scalar_ty, arena, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2326,7 +1972,7 @@ pub fn bitwiseAnd(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try bitwiseAndScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod);
+ scalar.* = (try bitwiseAndScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2365,7 +2011,7 @@ pub fn bitwiseNand(lhs: Value, rhs: Value, ty: Type, arena: Allocator, mod: *Mod
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try bitwiseNandScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod);
+ scalar.* = (try bitwiseNandScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2393,7 +2039,7 @@ pub fn bitwiseOr(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *M
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try bitwiseOrScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod);
+ scalar.* = (try bitwiseOrScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2431,7 +2077,7 @@ pub fn bitwiseXor(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try bitwiseXorScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod);
+ scalar.* = (try bitwiseXorScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2497,7 +2143,7 @@ fn intDivInner(lhs: Value, rhs: Value, ty: Type, overflow_idx: *usize, allocator
},
else => |e| return e,
};
- scalar.* = try val.intern(scalar_ty, mod);
+ scalar.* = val.toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2545,7 +2191,7 @@ pub fn intDivFloor(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod:
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try intDivFloorScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod);
+ scalar.* = (try intDivFloorScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2587,7 +2233,7 @@ pub fn intMod(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Modu
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try intModScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod);
+ scalar.* = (try intModScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2624,7 +2270,6 @@ pub fn intModScalar(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod:
/// Returns true if the value is a floating point type and is NaN. Returns false otherwise.
pub fn isNan(val: Value, mod: *const Module) bool {
- if (val.ip_index == .none) return false;
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.float => |float| switch (float.storage) {
inline else => |x| std.math.isNan(x),
@@ -2635,7 +2280,6 @@ pub fn isNan(val: Value, mod: *const Module) bool {
/// Returns true if the value is a floating point type and is infinite. Returns false otherwise.
pub fn isInf(val: Value, mod: *const Module) bool {
- if (val.ip_index == .none) return false;
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.float => |float| switch (float.storage) {
inline else => |x| std.math.isInf(x),
@@ -2645,7 +2289,6 @@ pub fn isInf(val: Value, mod: *const Module) bool {
}
pub fn isNegativeInf(val: Value, mod: *const Module) bool {
- if (val.ip_index == .none) return false;
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
.float => |float| switch (float.storage) {
inline else => |x| std.math.isNegativeInf(x),
@@ -2661,7 +2304,7 @@ pub fn floatRem(lhs: Value, rhs: Value, float_type: Type, arena: Allocator, mod:
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try floatRemScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatRemScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -2694,7 +2337,7 @@ pub fn floatMod(lhs: Value, rhs: Value, float_type: Type, arena: Allocator, mod:
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try floatModScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatModScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -2755,7 +2398,7 @@ fn intMulInner(lhs: Value, rhs: Value, ty: Type, overflow_idx: *usize, allocator
},
else => |e| return e,
};
- scalar.* = try val.intern(scalar_ty, mod);
+ scalar.* = val.toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2797,7 +2440,7 @@ pub fn intTrunc(val: Value, ty: Type, allocator: Allocator, signedness: std.buil
const scalar_ty = ty.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, bits, mod)).intern(scalar_ty, mod);
+ scalar.* = (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, bits, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2822,7 +2465,7 @@ pub fn intTruncBitsAsValue(
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
const bits_elem = try bits.elemValue(mod, i);
- scalar.* = try (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, @as(u16, @intCast(bits_elem.toUnsignedInt(mod))), mod)).intern(scalar_ty, mod);
+ scalar.* = (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, @as(u16, @intCast(bits_elem.toUnsignedInt(mod))), mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2862,7 +2505,7 @@ pub fn shl(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module)
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try shlScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod);
+ scalar.* = (try shlScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -2912,8 +2555,8 @@ pub fn shlWithOverflow(
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
const of_math_result = try shlWithOverflowScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod);
- of.* = try of_math_result.overflow_bit.intern(Type.u1, mod);
- scalar.* = try of_math_result.wrapped_result.intern(scalar_ty, mod);
+ of.* = of_math_result.overflow_bit.toIntern();
+ scalar.* = of_math_result.wrapped_result.toIntern();
}
return OverflowArithmeticResult{
.overflow_bit = Value.fromInterned((try mod.intern(.{ .aggregate = .{
@@ -2973,7 +2616,7 @@ pub fn shlSat(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try shlSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod);
+ scalar.* = (try shlSatScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -3023,7 +2666,7 @@ pub fn shlTrunc(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try shlTruncScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).intern(scalar_ty, mod);
+ scalar.* = (try shlTruncScalar(lhs_elem, rhs_elem, scalar_ty, arena, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -3053,7 +2696,7 @@ pub fn shr(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module)
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try shrScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).intern(scalar_ty, mod);
+ scalar.* = (try shrScalar(lhs_elem, rhs_elem, scalar_ty, allocator, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -3105,7 +2748,7 @@ pub fn floatNeg(
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try floatNegScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatNegScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3148,7 +2791,7 @@ pub fn floatAdd(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try floatAddScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatAddScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3192,7 +2835,7 @@ pub fn floatSub(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try floatSubScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatSubScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3236,7 +2879,7 @@ pub fn floatDiv(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try floatDivScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatDivScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3280,7 +2923,7 @@ pub fn floatDivFloor(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try floatDivFloorScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatDivFloorScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3324,7 +2967,7 @@ pub fn floatDivTrunc(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try floatDivTruncScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatDivTruncScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3368,7 +3011,7 @@ pub fn floatMul(
for (result_data, 0..) |*scalar, i| {
const lhs_elem = try lhs.elemValue(mod, i);
const rhs_elem = try rhs.elemValue(mod, i);
- scalar.* = try (try floatMulScalar(lhs_elem, rhs_elem, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floatMulScalar(lhs_elem, rhs_elem, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3405,7 +3048,7 @@ pub fn sqrt(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try sqrtScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try sqrtScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3437,7 +3080,7 @@ pub fn sin(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try sinScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try sinScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3469,7 +3112,7 @@ pub fn cos(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try cosScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try cosScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3501,7 +3144,7 @@ pub fn tan(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try tanScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try tanScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3533,7 +3176,7 @@ pub fn exp(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try expScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try expScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3565,7 +3208,7 @@ pub fn exp2(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try exp2Scalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try exp2Scalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3597,7 +3240,7 @@ pub fn log(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try logScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try logScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3629,7 +3272,7 @@ pub fn log2(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try log2Scalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try log2Scalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3661,7 +3304,7 @@ pub fn log10(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Valu
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try log10Scalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try log10Scalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3693,7 +3336,7 @@ pub fn abs(val: Value, ty: Type, arena: Allocator, mod: *Module) !Value {
const scalar_ty = ty.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try absScalar(elem_val, scalar_ty, mod, arena)).intern(scalar_ty, mod);
+ scalar.* = (try absScalar(elem_val, scalar_ty, mod, arena)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -3744,7 +3387,7 @@ pub fn floor(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Valu
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try floorScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try floorScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3776,7 +3419,7 @@ pub fn ceil(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try ceilScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try ceilScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3808,7 +3451,7 @@ pub fn round(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Valu
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try roundScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try roundScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3840,7 +3483,7 @@ pub fn trunc(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Valu
const scalar_ty = float_type.scalarType(mod);
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
- scalar.* = try (try truncScalar(elem_val, scalar_ty, mod)).intern(scalar_ty, mod);
+ scalar.* = (try truncScalar(elem_val, scalar_ty, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3881,7 +3524,7 @@ pub fn mulAdd(
const mulend1_elem = try mulend1.elemValue(mod, i);
const mulend2_elem = try mulend2.elemValue(mod, i);
const addend_elem = try addend.elemValue(mod, i);
- scalar.* = try (try mulAddScalar(scalar_ty, mulend1_elem, mulend2_elem, addend_elem, mod)).intern(scalar_ty, mod);
+ scalar.* = (try mulAddScalar(scalar_ty, mulend1_elem, mulend2_elem, addend_elem, mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = float_type.toIntern(),
@@ -3957,98 +3600,26 @@ pub fn intValueBounds(val: Value, mod: *Module) !?[2]Value {
};
}
-/// This type is not copyable since it may contain pointers to its inner data.
-pub const Payload = struct {
- tag: Tag,
-
- pub const Slice = struct {
- base: Payload,
- data: struct {
- ptr: Value,
- len: Value,
- },
- };
-
- pub const Bytes = struct {
- base: Payload,
- /// Includes the sentinel, if any.
- data: []const u8,
- };
-
- pub const SubValue = struct {
- base: Payload,
- data: Value,
- };
-
- pub const Aggregate = struct {
- base: Payload,
- /// Field values. The types are according to the struct or array type.
- /// The length is provided here so that copying a Value does not depend on the Type.
- data: []Value,
- };
-
- pub const Union = struct {
- pub const base_tag = Tag.@"union";
-
- base: Payload = .{ .tag = base_tag },
- data: Data,
-
- pub const Data = struct {
- tag: ?Value,
- val: Value,
- };
- };
-};
-
pub const BigIntSpace = InternPool.Key.Int.Storage.BigIntSpace;
-pub const zero_usize: Value = .{ .ip_index = .zero_usize, .legacy = undefined };
-pub const zero_u8: Value = .{ .ip_index = .zero_u8, .legacy = undefined };
-pub const zero_comptime_int: Value = .{ .ip_index = .zero, .legacy = undefined };
-pub const one_comptime_int: Value = .{ .ip_index = .one, .legacy = undefined };
-pub const negative_one_comptime_int: Value = .{ .ip_index = .negative_one, .legacy = undefined };
-pub const undef: Value = .{ .ip_index = .undef, .legacy = undefined };
-pub const @"void": Value = .{ .ip_index = .void_value, .legacy = undefined };
-pub const @"null": Value = .{ .ip_index = .null_value, .legacy = undefined };
-pub const @"false": Value = .{ .ip_index = .bool_false, .legacy = undefined };
-pub const @"true": Value = .{ .ip_index = .bool_true, .legacy = undefined };
-pub const @"unreachable": Value = .{ .ip_index = .unreachable_value, .legacy = undefined };
-
-pub const generic_poison: Value = .{ .ip_index = .generic_poison, .legacy = undefined };
-pub const generic_poison_type: Value = .{ .ip_index = .generic_poison_type, .legacy = undefined };
-pub const empty_struct: Value = .{ .ip_index = .empty_struct, .legacy = undefined };
+pub const zero_usize: Value = .{ .ip_index = .zero_usize };
+pub const zero_u8: Value = .{ .ip_index = .zero_u8 };
+pub const zero_comptime_int: Value = .{ .ip_index = .zero };
+pub const one_comptime_int: Value = .{ .ip_index = .one };
+pub const negative_one_comptime_int: Value = .{ .ip_index = .negative_one };
+pub const undef: Value = .{ .ip_index = .undef };
+pub const @"void": Value = .{ .ip_index = .void_value };
+pub const @"null": Value = .{ .ip_index = .null_value };
+pub const @"false": Value = .{ .ip_index = .bool_false };
+pub const @"true": Value = .{ .ip_index = .bool_true };
+pub const @"unreachable": Value = .{ .ip_index = .unreachable_value };
+
+pub const generic_poison: Value = .{ .ip_index = .generic_poison };
+pub const generic_poison_type: Value = .{ .ip_index = .generic_poison_type };
+pub const empty_struct: Value = .{ .ip_index = .empty_struct };
pub fn makeBool(x: bool) Value {
return if (x) Value.true else Value.false;
}
pub const RuntimeIndex = InternPool.RuntimeIndex;
-
-/// This function is used in the debugger pretty formatters in tools/ to fetch the
-/// Tag to Payload mapping to facilitate fancy debug printing for this type.
-fn dbHelper(self: *Value, tag_to_payload_map: *map: {
- const tags = @typeInfo(Tag).Enum.fields;
- var fields: [tags.len]std.builtin.Type.StructField = undefined;
- for (&fields, tags) |*field, t| field.* = .{
- .name = t.name ++ "",
- .type = *@field(Tag, t.name).Type(),
- .default_value = null,
- .is_comptime = false,
- .alignment = 0,
- };
- break :map @Type(.{ .Struct = .{
- .layout = .@"extern",
- .fields = &fields,
- .decls = &.{},
- .is_tuple = false,
- } });
-}) void {
- _ = self;
- _ = tag_to_payload_map;
-}
-
-comptime {
- if (!builtin.strip_debug_info) {
- _ = &dbHelper;
- }
-}