aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/Compilation.zig21
-rw-r--r--src/InternPool.zig354
-rw-r--r--src/Module.zig113
-rw-r--r--src/Package.zig4
-rw-r--r--src/Package/Fetch.zig615
-rw-r--r--src/Package/Fetch/git.zig57
-rw-r--r--src/Package/Fetch/testdata/duplicate_paths.tar.gzbin0 -> 3230 bytes
-rw-r--r--src/Package/Fetch/testdata/duplicate_paths_excluded.tar.gzbin0 -> 3237 bytes
-rw-r--r--src/Package/Fetch/testdata/executables.tar.gzbin0 -> 430 bytes
-rw-r--r--src/Package/Fetch/testdata/no_root.tar.gzbin0 -> 3172 bytes
-rw-r--r--src/Sema.zig581
-rw-r--r--src/Value.zig180
-rw-r--r--src/arch/aarch64/CodeGen.zig4
-rw-r--r--src/arch/wasm/CodeGen.zig19
-rw-r--r--src/arch/x86_64/CodeGen.zig6
-rw-r--r--src/codegen.zig107
-rw-r--r--src/codegen/c.zig1701
-rw-r--r--src/codegen/c/Type.zig338
-rw-r--r--src/codegen/llvm.zig113
-rw-r--r--src/codegen/spirv.zig43
-rw-r--r--src/link/Coff.zig48
-rw-r--r--src/link/Dwarf.zig37
-rw-r--r--src/link/Elf.zig29
-rw-r--r--src/link/Elf/Atom.zig4
-rw-r--r--src/link/Elf/Object.zig2
-rw-r--r--src/link/Elf/Symbol.zig24
-rw-r--r--src/link/Elf/ZigObject.zig34
-rw-r--r--src/link/Elf/synthetic_sections.zig24
-rw-r--r--src/link/MachO/ZigObject.zig38
-rw-r--r--src/link/Plan9.zig42
-rw-r--r--src/link/SpirV.zig24
-rw-r--r--src/link/Wasm/ZigObject.zig48
-rw-r--r--src/mutable_value.zig12
-rw-r--r--src/print_value.zig49
-rw-r--r--src/target.zig13
-rw-r--r--src/translate_c.zig12
-rw-r--r--src/type.zig22
37 files changed, 2760 insertions, 1958 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig
index c533f2fae7..7af3d7bfd1 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -3159,7 +3159,7 @@ pub fn addModuleErrorMsg(mod: *Module, eb: *ErrorBundle.Wip, module_err_msg: Mod
const rt_file_path = try module_reference.src_loc.file_scope.fullPath(gpa);
defer gpa.free(rt_file_path);
ref_traces.appendAssumeCapacity(.{
- .decl_name = try eb.addString(ip.stringToSlice(module_reference.decl)),
+ .decl_name = try eb.addString(module_reference.decl.toSlice(ip)),
.src_loc = try eb.addSourceLocation(.{
.src_path = try eb.addString(rt_file_path),
.span_start = span.start,
@@ -3731,24 +3731,24 @@ fn docsCopyFallible(comp: *Compilation) anyerror!void {
};
defer tar_file.close();
- var seen_table: std.AutoArrayHashMapUnmanaged(*Package.Module, void) = .{};
+ var seen_table: std.AutoArrayHashMapUnmanaged(*Package.Module, []const u8) = .{};
defer seen_table.deinit(comp.gpa);
- try seen_table.put(comp.gpa, zcu.main_mod, {});
- try seen_table.put(comp.gpa, zcu.std_mod, {});
+ try seen_table.put(comp.gpa, zcu.main_mod, comp.root_name);
+ try seen_table.put(comp.gpa, zcu.std_mod, zcu.std_mod.fully_qualified_name);
var i: usize = 0;
while (i < seen_table.count()) : (i += 1) {
const mod = seen_table.keys()[i];
- try comp.docsCopyModule(mod, tar_file);
+ try comp.docsCopyModule(mod, seen_table.values()[i], tar_file);
const deps = mod.deps.values();
try seen_table.ensureUnusedCapacity(comp.gpa, deps.len);
- for (deps) |dep| seen_table.putAssumeCapacity(dep, {});
+ for (deps) |dep| seen_table.putAssumeCapacity(dep, dep.fully_qualified_name);
}
}
-fn docsCopyModule(comp: *Compilation, module: *Package.Module, tar_file: std.fs.File) !void {
+fn docsCopyModule(comp: *Compilation, module: *Package.Module, name: []const u8, tar_file: std.fs.File) !void {
const root = module.root;
const sub_path = if (root.sub_path.len == 0) "." else root.sub_path;
var mod_dir = root.root_dir.handle.openDir(sub_path, .{ .iterate = true }) catch |err| {
@@ -3788,7 +3788,7 @@ fn docsCopyModule(comp: *Compilation, module: *Package.Module, tar_file: std.fs.
var file_header = std.tar.output.Header.init();
file_header.typeflag = .regular;
- try file_header.setPath(module.fully_qualified_name, entry.path);
+ try file_header.setPath(name, entry.path);
try file_header.setSize(stat.size);
try file_header.updateChecksum();
@@ -4074,8 +4074,7 @@ fn workerCheckEmbedFile(
fn detectEmbedFileUpdate(comp: *Compilation, embed_file: *Module.EmbedFile) !void {
const mod = comp.module.?;
const ip = &mod.intern_pool;
- const sub_file_path = ip.stringToSlice(embed_file.sub_file_path);
- var file = try embed_file.owner.root.openFile(sub_file_path, .{});
+ var file = try embed_file.owner.root.openFile(embed_file.sub_file_path.toSlice(ip), .{});
defer file.close();
const stat = try file.stat();
@@ -4444,7 +4443,7 @@ fn reportRetryableEmbedFileError(
const ip = &mod.intern_pool;
const err_msg = try Module.ErrorMsg.create(gpa, src_loc, "unable to load '{}{s}': {s}", .{
embed_file.owner.root,
- ip.stringToSlice(embed_file.sub_file_path),
+ embed_file.sub_file_path.toSlice(ip),
@errorName(err),
});
diff --git a/src/InternPool.zig b/src/InternPool.zig
index 1155d8c5ad..15dba62e07 100644
--- a/src/InternPool.zig
+++ b/src/InternPool.zig
@@ -351,7 +351,7 @@ const KeyAdapter = struct {
pub fn eql(ctx: @This(), a: Key, b_void: void, b_map_index: usize) bool {
_ = b_void;
if (ctx.intern_pool.items.items(.tag)[b_map_index] == .removed) return false;
- return ctx.intern_pool.indexToKey(@as(Index, @enumFromInt(b_map_index))).eql(a, ctx.intern_pool);
+ return ctx.intern_pool.indexToKey(@enumFromInt(b_map_index)).eql(a, ctx.intern_pool);
}
pub fn hash(ctx: @This(), a: Key) u32 {
@@ -385,7 +385,7 @@ pub const RuntimeIndex = enum(u32) {
_,
pub fn increment(ri: *RuntimeIndex) void {
- ri.* = @as(RuntimeIndex, @enumFromInt(@intFromEnum(ri.*) + 1));
+ ri.* = @enumFromInt(@intFromEnum(ri.*) + 1);
}
};
@@ -418,12 +418,44 @@ pub const OptionalNamespaceIndex = enum(u32) {
/// An index into `string_bytes`.
pub const String = enum(u32) {
+ /// An empty string.
+ empty = 0,
+ _,
+
+ pub fn toSlice(string: String, len: u64, ip: *const InternPool) []const u8 {
+ return ip.string_bytes.items[@intFromEnum(string)..][0..@intCast(len)];
+ }
+
+ pub fn at(string: String, index: u64, ip: *const InternPool) u8 {
+ return ip.string_bytes.items[@intCast(@intFromEnum(string) + index)];
+ }
+
+ pub fn toNullTerminatedString(string: String, len: u64, ip: *const InternPool) NullTerminatedString {
+ assert(std.mem.indexOfScalar(u8, string.toSlice(len, ip), 0) == null);
+ assert(string.at(len, ip) == 0);
+ return @enumFromInt(@intFromEnum(string));
+ }
+};
+
+/// An index into `string_bytes` which might be `none`.
+pub const OptionalString = enum(u32) {
+ /// This is distinct from `none` - it is a valid index that represents empty string.
+ empty = 0,
+ none = std.math.maxInt(u32),
_,
+
+ pub fn unwrap(string: OptionalString) ?String {
+ return if (string != .none) @enumFromInt(@intFromEnum(string)) else null;
+ }
+
+ pub fn toSlice(string: OptionalString, len: u64, ip: *const InternPool) ?[]const u8 {
+ return (string.unwrap() orelse return null).toSlice(len, ip);
+ }
};
/// An index into `string_bytes`.
pub const NullTerminatedString = enum(u32) {
- /// This is distinct from `none` - it is a valid index that represents empty string.
+ /// An empty string.
empty = 0,
_,
@@ -447,6 +479,19 @@ pub const NullTerminatedString = enum(u32) {
return @enumFromInt(@intFromEnum(self));
}
+ pub fn toSlice(string: NullTerminatedString, ip: *const InternPool) [:0]const u8 {
+ const slice = ip.string_bytes.items[@intFromEnum(string)..];
+ return slice[0..std.mem.indexOfScalar(u8, slice, 0).? :0];
+ }
+
+ pub fn length(string: NullTerminatedString, ip: *const InternPool) u32 {
+ return @intCast(string.toSlice(ip).len);
+ }
+
+ pub fn eqlSlice(string: NullTerminatedString, slice: []const u8, ip: *const InternPool) bool {
+ return std.mem.eql(u8, string.toSlice(ip), slice);
+ }
+
const Adapter = struct {
strings: []const NullTerminatedString,
@@ -467,11 +512,11 @@ pub const NullTerminatedString = enum(u32) {
return @intFromEnum(a) < @intFromEnum(b);
}
- pub fn toUnsigned(self: NullTerminatedString, ip: *const InternPool) ?u32 {
- const s = ip.stringToSlice(self);
- if (s.len > 1 and s[0] == '0') return null;
- if (std.mem.indexOfScalar(u8, s, '_')) |_| return null;
- return std.fmt.parseUnsigned(u32, s, 10) catch null;
+ pub fn toUnsigned(string: NullTerminatedString, ip: *const InternPool) ?u32 {
+ const slice = string.toSlice(ip);
+ if (slice.len > 1 and slice[0] == '0') return null;
+ if (std.mem.indexOfScalar(u8, slice, '_')) |_| return null;
+ return std.fmt.parseUnsigned(u32, slice, 10) catch null;
}
const FormatData = struct {
@@ -484,11 +529,11 @@ pub const NullTerminatedString = enum(u32) {
_: std.fmt.FormatOptions,
writer: anytype,
) @TypeOf(writer).Error!void {
- const s = data.ip.stringToSlice(data.string);
+ const slice = data.string.toSlice(data.ip);
if (comptime std.mem.eql(u8, specifier, "")) {
- try writer.writeAll(s);
+ try writer.writeAll(slice);
} else if (comptime std.mem.eql(u8, specifier, "i")) {
- try writer.print("{p}", .{std.zig.fmtId(s)});
+ try writer.print("{p}", .{std.zig.fmtId(slice)});
} else @compileError("invalid format string '" ++ specifier ++ "' for '" ++ @typeName(NullTerminatedString) ++ "'");
}
@@ -504,9 +549,12 @@ pub const OptionalNullTerminatedString = enum(u32) {
none = std.math.maxInt(u32),
_,
- pub fn unwrap(oi: OptionalNullTerminatedString) ?NullTerminatedString {
- if (oi == .none) return null;
- return @enumFromInt(@intFromEnum(oi));
+ pub fn unwrap(string: OptionalNullTerminatedString) ?NullTerminatedString {
+ return if (string != .none) @enumFromInt(@intFromEnum(string)) else null;
+ }
+
+ pub fn toSlice(string: OptionalNullTerminatedString, ip: *const InternPool) ?[:0]const u8 {
+ return (string.unwrap() orelse return null).toSlice(ip);
}
};
@@ -690,6 +738,10 @@ pub const Key = union(enum) {
len: u64,
child: Index,
sentinel: Index = .none,
+
+ pub fn lenIncludingSentinel(array_type: ArrayType) u64 {
+ return array_type.len + @intFromBool(array_type.sentinel != .none);
+ }
};
/// Extern so that hashing can be done via memory reinterpreting.
@@ -1043,7 +1095,7 @@ pub const Key = union(enum) {
storage: Storage,
pub const Storage = union(enum) {
- bytes: []const u8,
+ bytes: String,
elems: []const Index,
repeated_elem: Index,
@@ -1203,7 +1255,7 @@ pub const Key = union(enum) {
if (child == .u8_type) {
switch (aggregate.storage) {
- .bytes => |bytes| for (bytes[0..@intCast(len)]) |byte| {
+ .bytes => |bytes| for (bytes.toSlice(len, ip)) |byte| {
std.hash.autoHash(&hasher, KeyTag.int);
std.hash.autoHash(&hasher, byte);
},
@@ -1240,7 +1292,7 @@ pub const Key = union(enum) {
switch (aggregate.storage) {
.bytes => unreachable,
- .elems => |elems| for (elems[0..@as(usize, @intCast(len))]) |elem|
+ .elems => |elems| for (elems[0..@intCast(len)]) |elem|
std.hash.autoHash(&hasher, elem),
.repeated_elem => |elem| {
var remaining = len;
@@ -1505,11 +1557,11 @@ pub const Key = union(enum) {
if (a_info.ty == .c_longdouble_type and a_info.storage != .f80) {
// These are strange: we'll sometimes represent them as f128, even if the
// underlying type is smaller. f80 is an exception: see float_c_longdouble_f80.
- const a_val = switch (a_info.storage) {
- inline else => |val| @as(u128, @bitCast(@as(f128, @floatCast(val)))),
+ const a_val: u128 = switch (a_info.storage) {
+ inline else => |val| @bitCast(@as(f128, @floatCast(val))),
};
- const b_val = switch (b_info.storage) {
- inline else => |val| @as(u128, @bitCast(@as(f128, @floatCast(val)))),
+ const b_val: u128 = switch (b_info.storage) {
+ inline else => |val| @bitCast(@as(f128, @floatCast(val))),
};
return a_val == b_val;
}
@@ -1560,11 +1612,11 @@ pub const Key = union(enum) {
const len = ip.aggregateTypeLen(a_info.ty);
const StorageTag = @typeInfo(Key.Aggregate.Storage).Union.tag_type.?;
if (@as(StorageTag, a_info.storage) != @as(StorageTag, b_info.storage)) {
- for (0..@as(usize, @intCast(len))) |elem_index| {
+ for (0..@intCast(len)) |elem_index| {
const a_elem = switch (a_info.storage) {
.bytes => |bytes| ip.getIfExists(.{ .int = .{
.ty = .u8_type,
- .storage = .{ .u64 = bytes[elem_index] },
+ .storage = .{ .u64 = bytes.at(elem_index, ip) },
} }) orelse return false,
.elems => |elems| elems[elem_index],
.repeated_elem => |elem| elem,
@@ -1572,7 +1624,7 @@ pub const Key = union(enum) {
const b_elem = switch (b_info.storage) {
.bytes => |bytes| ip.getIfExists(.{ .int = .{
.ty = .u8_type,
- .storage = .{ .u64 = bytes[elem_index] },
+ .storage = .{ .u64 = bytes.at(elem_index, ip) },
} }) orelse return false,
.elems => |elems| elems[elem_index],
.repeated_elem => |elem| elem,
@@ -1585,18 +1637,15 @@ pub const Key = union(enum) {
switch (a_info.storage) {
.bytes => |a_bytes| {
const b_bytes = b_info.storage.bytes;
- return std.mem.eql(
- u8,
- a_bytes[0..@as(usize, @intCast(len))],
- b_bytes[0..@as(usize, @intCast(len))],
- );
+ return a_bytes == b_bytes or
+ std.mem.eql(u8, a_bytes.toSlice(len, ip), b_bytes.toSlice(len, ip));
},
.elems => |a_elems| {
const b_elems = b_info.storage.elems;
return std.mem.eql(
Index,
- a_elems[0..@as(usize, @intCast(len))],
- b_elems[0..@as(usize, @intCast(len))],
+ a_elems[0..@intCast(len)],
+ b_elems[0..@intCast(len)],
);
},
.repeated_elem => |a_elem| {
@@ -4175,10 +4224,10 @@ pub const Float64 = struct {
}
fn pack(val: f64) Float64 {
- const bits = @as(u64, @bitCast(val));
+ const bits: u64 = @bitCast(val);
return .{
- .piece0 = @as(u32, @truncate(bits)),
- .piece1 = @as(u32, @truncate(bits >> 32)),
+ .piece0 = @truncate(bits),
+ .piece1 = @truncate(bits >> 32),
};
}
};
@@ -4197,11 +4246,11 @@ pub const Float80 = struct {
}
fn pack(val: f80) Float80 {
- const bits = @as(u80, @bitCast(val));
+ const bits: u80 = @bitCast(val);
return .{
- .piece0 = @as(u32, @truncate(bits)),
- .piece1 = @as(u32, @truncate(bits >> 32)),
- .piece2 = @as(u16, @truncate(bits >> 64)),
+ .piece0 = @truncate(bits),
+ .piece1 = @truncate(bits >> 32),
+ .piece2 = @truncate(bits >> 64),
};
}
};
@@ -4222,12 +4271,12 @@ pub const Float128 = struct {
}
fn pack(val: f128) Float128 {
- const bits = @as(u128, @bitCast(val));
+ const bits: u128 = @bitCast(val);
return .{
- .piece0 = @as(u32, @truncate(bits)),
- .piece1 = @as(u32, @truncate(bits >> 32)),
- .piece2 = @as(u32, @truncate(bits >> 64)),
- .piece3 = @as(u32, @truncate(bits >> 96)),
+ .piece0 = @truncate(bits),
+ .piece1 = @truncate(bits >> 32),
+ .piece2 = @truncate(bits >> 64),
+ .piece3 = @truncate(bits >> 96),
};
}
};
@@ -4244,7 +4293,7 @@ pub fn init(ip: *InternPool, gpa: Allocator) !void {
assert(ip.items.len == 0);
// Reserve string index 0 for an empty string.
- assert((try ip.getOrPutString(gpa, "")) == .empty);
+ assert((try ip.getOrPutString(gpa, "", .no_embedded_nulls)) == .empty);
// So that we can use `catch unreachable` below.
try ip.items.ensureUnusedCapacity(gpa, static_keys.len);
@@ -4329,13 +4378,13 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
.type_int_signed => .{
.int_type = .{
.signedness = .signed,
- .bits = @as(u16, @intCast(data)),
+ .bits = @intCast(data),
},
},
.type_int_unsigned => .{
.int_type = .{
.signedness = .unsigned,
- .bits = @as(u16, @intCast(data)),
+ .bits = @intCast(data),
},
},
.type_array_big => {
@@ -4354,8 +4403,8 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
.sentinel = .none,
} };
},
- .simple_type => .{ .simple_type = @as(SimpleType, @enumFromInt(data)) },
- .simple_value => .{ .simple_value = @as(SimpleValue, @enumFromInt(data)) },
+ .simple_type => .{ .simple_type = @enumFromInt(data) },
+ .simple_value => .{ .simple_value = @enumFromInt(data) },
.type_vector => {
const vector_info = ip.extraData(Vector, data);
@@ -4506,9 +4555,9 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
} },
.type_function => .{ .func_type = ip.extraFuncType(data) },
- .undef => .{ .undef = @as(Index, @enumFromInt(data)) },
+ .undef => .{ .undef = @enumFromInt(data) },
.opt_null => .{ .opt = .{
- .ty = @as(Index, @enumFromInt(data)),
+ .ty = @enumFromInt(data),
.val = .none,
} },
.opt_payload => {
@@ -4670,11 +4719,11 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
},
.float_f16 => .{ .float = .{
.ty = .f16_type,
- .storage = .{ .f16 = @as(f16, @bitCast(@as(u16, @intCast(data)))) },
+ .storage = .{ .f16 = @bitCast(@as(u16, @intCast(data))) },
} },
.float_f32 => .{ .float = .{
.ty = .f32_type,
- .storage = .{ .f32 = @as(f32, @bitCast(data)) },
+ .storage = .{ .f32 = @bitCast(data) },
} },
.float_f64 => .{ .float = .{
.ty = .f64_type,
@@ -4771,10 +4820,9 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
},
.bytes => {
const extra = ip.extraData(Bytes, data);
- const len: u32 = @intCast(ip.aggregateTypeLenIncludingSentinel(extra.ty));
return .{ .aggregate = .{
.ty = extra.ty,
- .storage = .{ .bytes = ip.string_bytes.items[@intFromEnum(extra.bytes)..][0..len] },
+ .storage = .{ .bytes = extra.bytes },
} };
},
.aggregate => {
@@ -4809,14 +4857,14 @@ pub fn indexToKey(ip: *const InternPool, index: Index) Key {
.val = .{ .payload = extra.val },
} };
},
- .enum_literal => .{ .enum_literal = @as(NullTerminatedString, @enumFromInt(data)) },
+ .enum_literal => .{ .enum_literal = @enumFromInt(data) },
.enum_tag => .{ .enum_tag = ip.extraData(Tag.EnumTag, data) },
.memoized_call => {
const extra = ip.extraDataTrail(MemoizedCall, data);
return .{ .memoized_call = .{
.func = extra.data.func,
- .arg_values = @as([]const Index, @ptrCast(ip.extra.items[extra.end..][0..extra.data.args_len])),
+ .arg_values = @ptrCast(ip.extra.items[extra.end..][0..extra.data.args_len]),
.result = extra.data.result,
} };
},
@@ -5596,9 +5644,8 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
switch (aggregate.storage) {
.bytes => |bytes| {
assert(child == .u8_type);
- if (bytes.len != len) {
- assert(bytes.len == len_including_sentinel);
- assert(bytes[@intCast(len)] == ip.indexToKey(sentinel).int.storage.u64);
+ if (sentinel != .none) {
+ assert(bytes.at(@intCast(len), ip) == ip.indexToKey(sentinel).int.storage.u64);
}
},
.elems => |elems| {
@@ -5641,11 +5688,16 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
switch (ty_key) {
.anon_struct_type => |anon_struct_type| opv: {
switch (aggregate.storage) {
- .bytes => |bytes| for (anon_struct_type.values.get(ip), bytes) |value, byte| {
- if (value != ip.getIfExists(.{ .int = .{
- .ty = .u8_type,
- .storage = .{ .u64 = byte },
- } })) break :opv;
+ .bytes => |bytes| for (anon_struct_type.values.get(ip), bytes.at(0, ip)..) |value, byte| {
+ if (value == .none) break :opv;
+ switch (ip.indexToKey(value)) {
+ .undef => break :opv,
+ .int => |int| switch (int.storage) {
+ .u64 => |x| if (x != byte) break :opv,
+ else => break :opv,
+ },
+ else => unreachable,
+ }
},
.elems => |elems| if (!std.mem.eql(
Index,
@@ -5670,9 +5722,9 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
repeated: {
switch (aggregate.storage) {
- .bytes => |bytes| for (bytes[1..@as(usize, @intCast(len))]) |byte|
- if (byte != bytes[0]) break :repeated,
- .elems => |elems| for (elems[1..@as(usize, @intCast(len))]) |elem|
+ .bytes => |bytes| for (bytes.toSlice(len, ip)[1..]) |byte|
+ if (byte != bytes.at(0, ip)) break :repeated,
+ .elems => |elems| for (elems[1..@intCast(len)]) |elem|
if (elem != elems[0]) break :repeated,
.repeated_elem => {},
}
@@ -5681,7 +5733,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
_ = ip.map.pop();
const elem = try ip.get(gpa, .{ .int = .{
.ty = .u8_type,
- .storage = .{ .u64 = bytes[0] },
+ .storage = .{ .u64 = bytes.at(0, ip) },
} });
assert(!(try ip.map.getOrPutAdapted(gpa, key, adapter)).found_existing);
try ip.items.ensureUnusedCapacity(gpa, 1);
@@ -5710,7 +5762,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
try ip.string_bytes.ensureUnusedCapacity(gpa, @intCast(len_including_sentinel + 1));
try ip.extra.ensureUnusedCapacity(gpa, @typeInfo(Bytes).Struct.fields.len);
switch (aggregate.storage) {
- .bytes => |bytes| ip.string_bytes.appendSliceAssumeCapacity(bytes[0..@intCast(len)]),
+ .bytes => |bytes| ip.string_bytes.appendSliceAssumeCapacity(bytes.toSlice(len, ip)),
.elems => |elems| for (elems[0..@intCast(len)]) |elem| switch (ip.indexToKey(elem)) {
.undef => {
ip.string_bytes.shrinkRetainingCapacity(string_bytes_index);
@@ -5730,15 +5782,14 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
else => unreachable,
},
}
- const has_internal_null =
- std.mem.indexOfScalar(u8, ip.string_bytes.items[string_bytes_index..], 0) != null;
if (sentinel != .none) ip.string_bytes.appendAssumeCapacity(
@intCast(ip.indexToKey(sentinel).int.storage.u64),
);
- const string: String = if (has_internal_null)
- @enumFromInt(string_bytes_index)
- else
- (try ip.getOrPutTrailingString(gpa, @intCast(len_including_sentinel))).toString();
+ const string = try ip.getOrPutTrailingString(
+ gpa,
+ @intCast(len_including_sentinel),
+ .maybe_embedded_nulls,
+ );
ip.items.appendAssumeCapacity(.{
.tag = .bytes,
.data = ip.addExtraAssumeCapacity(Bytes{
@@ -5780,7 +5831,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
.tag = .memoized_call,
.data = ip.addExtraAssumeCapacity(MemoizedCall{
.func = memoized_call.func,
- .args_len = @as(u32, @intCast(memoized_call.arg_values.len)),
+ .args_len = @intCast(memoized_call.arg_values.len),
.result = memoized_call.result,
}),
});
@@ -6753,7 +6804,7 @@ fn finishFuncInstance(
const decl = ip.declPtr(decl_index);
decl.name = try ip.getOrPutStringFmt(gpa, "{}__anon_{d}", .{
fn_owner_decl.name.fmt(ip), @intFromEnum(decl_index),
- });
+ }, .no_embedded_nulls);
return func_index;
}
@@ -7216,7 +7267,7 @@ pub fn remove(ip: *InternPool, index: Index) void {
}
fn addInt(ip: *InternPool, gpa: Allocator, ty: Index, tag: Tag, limbs: []const Limb) !void {
- const limbs_len = @as(u32, @intCast(limbs.len));
+ const limbs_len: u32 = @intCast(limbs.len);
try ip.reserveLimbs(gpa, @typeInfo(Int).Struct.fields.len + limbs_len);
ip.items.appendAssumeCapacity(.{
.tag = tag,
@@ -7235,7 +7286,7 @@ fn addExtra(ip: *InternPool, gpa: Allocator, extra: anytype) Allocator.Error!u32
}
fn addExtraAssumeCapacity(ip: *InternPool, extra: anytype) u32 {
- const result = @as(u32, @intCast(ip.extra.items.len));
+ const result: u32 = @intCast(ip.extra.items.len);
inline for (@typeInfo(@TypeOf(extra)).Struct.fields) |field| {
ip.extra.appendAssumeCapacity(switch (field.type) {
Index,
@@ -7286,7 +7337,7 @@ fn addLimbsExtraAssumeCapacity(ip: *InternPool, extra: anytype) u32 {
@sizeOf(u64) => {},
else => @compileError("unsupported host"),
}
- const result = @as(u32, @intCast(ip.limbs.items.len));
+ const result: u32 = @intCast(ip.limbs.items.len);
inline for (@typeInfo(@TypeOf(extra)).Struct.fields, 0..) |field, i| {
const new: u32 = switch (field.type) {
u32 => @field(extra, field.name),
@@ -7374,7 +7425,7 @@ fn limbData(ip: *const InternPool, comptime T: type, index: usize) T {
@field(result, field.name) = switch (field.type) {
u32 => int32,
- Index => @as(Index, @enumFromInt(int32)),
+ Index => @enumFromInt(int32),
else => @compileError("bad field type: " ++ @typeName(field.type)),
};
}
@@ -7410,8 +7461,8 @@ fn limbsSliceToIndex(ip: *const InternPool, limbs: []const Limb) LimbsAsIndexes
};
// TODO: https://github.com/ziglang/zig/issues/1738
return .{
- .start = @as(u32, @intCast(@divExact(@intFromPtr(limbs.ptr) - @intFromPtr(host_slice.ptr), @sizeOf(Limb)))),
- .len = @as(u32, @intCast(limbs.len)),
+ .start = @intCast(@divExact(@intFromPtr(limbs.ptr) - @intFromPtr(host_slice.ptr), @sizeOf(Limb))),
+ .len = @intCast(limbs.len),
};
}
@@ -7683,7 +7734,7 @@ pub fn getCoerced(ip: *InternPool, gpa: Allocator, val: Index, new_ty: Index) Al
.val = error_union.val,
} }),
.aggregate => |aggregate| {
- const new_len = @as(usize, @intCast(ip.aggregateTypeLen(new_ty)));
+ const new_len: usize = @intCast(ip.aggregateTypeLen(new_ty));
direct: {
const old_ty_child = switch (ip.indexToKey(old_ty)) {
inline .array_type, .vector_type => |seq_type| seq_type.child,
@@ -7696,16 +7747,11 @@ pub fn getCoerced(ip: *InternPool, gpa: Allocator, val: Index, new_ty: Index) Al
else => unreachable,
};
if (old_ty_child != new_ty_child) break :direct;
- // TODO: write something like getCoercedInts to avoid needing to dupe here
switch (aggregate.storage) {
- .bytes => |bytes| {
- const bytes_copy = try gpa.dupe(u8, bytes[0..new_len]);
- defer gpa.free(bytes_copy);
- return ip.get(gpa, .{ .aggregate = .{
- .ty = new_ty,
- .storage = .{ .bytes = bytes_copy },
- } });
- },
+ .bytes => |bytes| return ip.get(gpa, .{ .aggregate = .{
+ .ty = new_ty,
+ .storage = .{ .bytes = bytes },
+ } }),
.elems => |elems| {
const elems_copy = try gpa.dupe(Index, elems[0..new_len]);
defer gpa.free(elems_copy);
@@ -7729,14 +7775,13 @@ pub fn getCoerced(ip: *InternPool, gpa: Allocator, val: Index, new_ty: Index) Al
// lifetime issues, since it'll allow us to avoid referencing `aggregate` after we
// begin interning elems.
switch (aggregate.storage) {
- .bytes => {
+ .bytes => |bytes| {
// We have to intern each value here, so unfortunately we can't easily avoid
// the repeated indexToKey calls.
- for (agg_elems, 0..) |*elem, i| {
- const x = ip.indexToKey(val).aggregate.storage.bytes[i];
+ for (agg_elems, 0..) |*elem, index| {
elem.* = try ip.get(gpa, .{ .int = .{
.ty = .u8_type,
- .storage = .{ .u64 = x },
+ .storage = .{ .u64 = bytes.at(index, ip) },
} });
}
},
@@ -8169,9 +8214,8 @@ fn dumpStatsFallible(ip: *const InternPool, arena: Allocator) anyerror!void {
.bytes => b: {
const info = ip.extraData(Bytes, data);
- const len = @as(u32, @intCast(ip.aggregateTypeLenIncludingSentinel(info.ty)));
- break :b @sizeOf(Bytes) + len +
- @intFromBool(ip.string_bytes.items[@intFromEnum(info.bytes) + len - 1] != 0);
+ const len: usize = @intCast(ip.aggregateTypeLenIncludingSentinel(info.ty));
+ break :b @sizeOf(Bytes) + len + @intFromBool(info.bytes.at(len - 1, ip) != 0);
},
.aggregate => b: {
const info = ip.extraData(Tag.Aggregate, data);
@@ -8434,15 +8478,35 @@ pub fn destroyNamespace(ip: *InternPool, gpa: Allocator, index: NamespaceIndex)
};
}
+const EmbeddedNulls = enum {
+ no_embedded_nulls,
+ maybe_embedded_nulls,
+
+ fn StringType(comptime embedded_nulls: EmbeddedNulls) type {
+ return switch (embedded_nulls) {
+ .no_embedded_nulls => NullTerminatedString,
+ .maybe_embedded_nulls => String,
+ };
+ }
+
+ fn OptionalStringType(comptime embedded_nulls: EmbeddedNulls) type {
+ return switch (embedded_nulls) {
+ .no_embedded_nulls => OptionalNullTerminatedString,
+ .maybe_embedded_nulls => OptionalString,
+ };
+ }
+};
+
pub fn getOrPutString(
ip: *InternPool,
gpa: Allocator,
- s: []const u8,
-) Allocator.Error!NullTerminatedString {
- try ip.string_bytes.ensureUnusedCapacity(gpa, s.len + 1);
- ip.string_bytes.appendSliceAssumeCapacity(s);
+ slice: []const u8,
+ comptime embedded_nulls: EmbeddedNulls,
+) Allocator.Error!embedded_nulls.StringType() {
+ try ip.string_bytes.ensureUnusedCapacity(gpa, slice.len + 1);
+ ip.string_bytes.appendSliceAssumeCapacity(slice);
ip.string_bytes.appendAssumeCapacity(0);
- return ip.getOrPutTrailingString(gpa, s.len + 1);
+ return ip.getOrPutTrailingString(gpa, slice.len + 1, embedded_nulls);
}
pub fn getOrPutStringFmt(
@@ -8450,23 +8514,24 @@ pub fn getOrPutStringFmt(
gpa: Allocator,
comptime format: []const u8,
args: anytype,
-) Allocator.Error!NullTerminatedString {
+ comptime embedded_nulls: EmbeddedNulls,
+) Allocator.Error!embedded_nulls.StringType() {
// ensure that references to string_bytes in args do not get invalidated
const len: usize = @intCast(std.fmt.count(format, args) + 1);
try ip.string_bytes.ensureUnusedCapacity(gpa, len);
ip.string_bytes.writer(undefined).print(format, args) catch unreachable;
ip.string_bytes.appendAssumeCapacity(0);
- return ip.getOrPutTrailingString(gpa, len);
+ return ip.getOrPutTrailingString(gpa, len, embedded_nulls);
}
pub fn getOrPutStringOpt(
ip: *InternPool,
gpa: Allocator,
- optional_string: ?[]const u8,
-) Allocator.Error!OptionalNullTerminatedString {
- const s = optional_string orelse return .none;
- const interned = try getOrPutString(ip, gpa, s);
- return interned.toOptional();
+ slice: ?[]const u8,
+ comptime embedded_nulls: EmbeddedNulls,
+) Allocator.Error!embedded_nulls.OptionalStringType() {
+ const string = try getOrPutString(ip, gpa, slice orelse return .none, embedded_nulls);
+ return string.toOptional();
}
/// Uses the last len bytes of ip.string_bytes as the key.
@@ -8474,7 +8539,8 @@ pub fn getOrPutTrailingString(
ip: *InternPool,
gpa: Allocator,
len: usize,
-) Allocator.Error!NullTerminatedString {
+ comptime embedded_nulls: EmbeddedNulls,
+) Allocator.Error!embedded_nulls.StringType() {
const string_bytes = &ip.string_bytes;
const str_index: u32 = @intCast(string_bytes.items.len - len);
if (len > 0 and string_bytes.getLast() == 0) {
@@ -8483,6 +8549,14 @@ pub fn getOrPutTrailingString(
try string_bytes.ensureUnusedCapacity(gpa, 1);
}
const key: []const u8 = string_bytes.items[str_index..];
+ const has_embedded_null = std.mem.indexOfScalar(u8, key, 0) != null;
+ switch (embedded_nulls) {
+ .no_embedded_nulls => assert(!has_embedded_null),
+ .maybe_embedded_nulls => if (has_embedded_null) {
+ string_bytes.appendAssumeCapacity(0);
+ return @enumFromInt(str_index);
+ },
+ }
const gop = try ip.string_table.getOrPutContextAdapted(gpa, key, std.hash_map.StringIndexAdapter{
.bytes = string_bytes,
}, std.hash_map.StringIndexContext{
@@ -8498,58 +8572,10 @@ pub fn getOrPutTrailingString(
}
}
-/// Uses the last len bytes of ip.string_bytes as the key.
-pub fn getTrailingAggregate(
- ip: *InternPool,
- gpa: Allocator,
- ty: Index,
- len: usize,
-) Allocator.Error!Index {
- try ip.items.ensureUnusedCapacity(gpa, 1);
- try ip.extra.ensureUnusedCapacity(gpa, @typeInfo(Bytes).Struct.fields.len);
-
- const str: String = @enumFromInt(ip.string_bytes.items.len - len);
- const adapter: KeyAdapter = .{ .intern_pool = ip };
- const gop = try ip.map.getOrPutAdapted(gpa, Key{ .aggregate = .{
- .ty = ty,
- .storage = .{ .bytes = ip.string_bytes.items[@intFromEnum(str)..] },
- } }, adapter);
- if (gop.found_existing) return @enumFromInt(gop.index);
-
- ip.items.appendAssumeCapacity(.{
- .tag = .bytes,
- .data = ip.addExtraAssumeCapacity(Bytes{
- .ty = ty,
- .bytes = str,
- }),
- });
- return @enumFromInt(ip.items.len - 1);
-}
-
pub fn getString(ip: *InternPool, s: []const u8) OptionalNullTerminatedString {
- if (ip.string_table.getKeyAdapted(s, std.hash_map.StringIndexAdapter{
+ return if (ip.string_table.getKeyAdapted(s, std.hash_map.StringIndexAdapter{
.bytes = &ip.string_bytes,
- })) |index| {
- return @as(NullTerminatedString, @enumFromInt(index)).toOptional();
- } else {
- return .none;
- }
-}
-
-pub fn stringToSlice(ip: *const InternPool, s: NullTerminatedString) [:0]const u8 {
- const string_bytes = ip.string_bytes.items;
- const start = @intFromEnum(s);
- var end: usize = start;
- while (string_bytes[end] != 0) end += 1;
- return string_bytes[start..end :0];
-}
-
-pub fn stringToSliceUnwrap(ip: *const InternPool, s: OptionalNullTerminatedString) ?[:0]const u8 {
- return ip.stringToSlice(s.unwrap() orelse return null);
-}
-
-pub fn stringEqlSlice(ip: *const InternPool, a: NullTerminatedString, b: []const u8) bool {
- return std.mem.eql(u8, stringToSlice(ip, a), b);
+ })) |index| @enumFromInt(index) else .none;
}
pub fn typeOf(ip: *const InternPool, index: Index) Index {
@@ -8767,7 +8793,7 @@ pub fn aggregateTypeLenIncludingSentinel(ip: *const InternPool, ty: Index) u64 {
return switch (ip.indexToKey(ty)) {
.struct_type => ip.loadStructType(ty).field_types.len,
.anon_struct_type => |anon_struct_type| anon_struct_type.types.len,
- .array_type => |array_type| array_type.len + @intFromBool(array_type.sentinel != .none),
+ .array_type => |array_type| array_type.lenIncludingSentinel(),
.vector_type => |vector_type| vector_type.len,
else => unreachable,
};
diff --git a/src/Module.zig b/src/Module.zig
index 0399a2f85b..c4d7f43fe4 100644
--- a/src/Module.zig
+++ b/src/Module.zig
@@ -763,11 +763,11 @@ pub const Namespace = struct {
) !InternPool.NullTerminatedString {
const ip = &zcu.intern_pool;
const count = count: {
- var count: usize = ip.stringToSlice(name).len + 1;
+ var count: usize = name.length(ip) + 1;
var cur_ns = &ns;
while (true) {
const decl = zcu.declPtr(cur_ns.decl_index);
- count += ip.stringToSlice(decl.name).len + 1;
+ count += decl.name.length(ip) + 1;
cur_ns = zcu.namespacePtr(cur_ns.parent.unwrap() orelse {
count += ns.file_scope.sub_file_path.len;
break :count count;
@@ -793,7 +793,7 @@ pub const Namespace = struct {
};
}
- return ip.getOrPutTrailingString(gpa, ip.string_bytes.items.len - start);
+ return ip.getOrPutTrailingString(gpa, ip.string_bytes.items.len - start, .no_embedded_nulls);
}
pub fn getType(ns: Namespace, zcu: *Zcu) Type {
@@ -980,17 +980,13 @@ pub const File = struct {
const ip = &mod.intern_pool;
const start = ip.string_bytes.items.len;
try file.renderFullyQualifiedName(ip.string_bytes.writer(mod.gpa));
- return ip.getOrPutTrailingString(mod.gpa, ip.string_bytes.items.len - start);
+ return ip.getOrPutTrailingString(mod.gpa, ip.string_bytes.items.len - start, .no_embedded_nulls);
}
pub fn fullPath(file: File, ally: Allocator) ![]u8 {
return file.mod.root.joinString(ally, file.sub_file_path);
}
- pub fn fullPathZ(file: File, ally: Allocator) ![:0]u8 {
- return file.mod.root.joinStringZ(ally, file.sub_file_path);
- }
-
pub fn dumpSrc(file: *File, src: LazySrcLoc) void {
const loc = std.zig.findLineColumn(file.source.bytes, src);
std.debug.print("{s}:{d}:{d}\n", .{ file.sub_file_path, loc.line + 1, loc.column + 1 });
@@ -2534,6 +2530,7 @@ fn updateZirRefs(zcu: *Module, file: *File, old_zir: Zir) !void {
const name_ip = try zcu.intern_pool.getOrPutString(
zcu.gpa,
old_zir.nullTerminatedString(name_zir),
+ .no_embedded_nulls,
);
try old_names.put(zcu.gpa, name_ip, {});
}
@@ -2551,6 +2548,7 @@ fn updateZirRefs(zcu: *Module, file: *File, old_zir: Zir) !void {
const name_ip = try zcu.intern_pool.getOrPutString(
zcu.gpa,
old_zir.nullTerminatedString(name_zir),
+ .no_embedded_nulls,
);
if (!old_names.swapRemove(name_ip)) continue;
// Name added
@@ -3555,37 +3553,46 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !SemaDeclResult {
const gpa = mod.gpa;
const zir = decl.getFileScope(mod).zir;
- const builtin_type_target_index: InternPool.Index = blk: {
+ const builtin_type_target_index: InternPool.Index = ip_index: {
const std_mod = mod.std_mod;
- if (decl.getFileScope(mod).mod != std_mod) break :blk .none;
+ if (decl.getFileScope(mod).mod != std_mod) break :ip_index .none;
// We're in the std module.
const std_file = (try mod.importPkg(std_mod)).file;
const std_decl = mod.declPtr(std_file.root_decl.unwrap().?);
const std_namespace = std_decl.getInnerNamespace(mod).?;
- const builtin_str = try ip.getOrPutString(gpa, "builtin");
- const builtin_decl = mod.declPtr(std_namespace.decls.getKeyAdapted(builtin_str, DeclAdapter{ .zcu = mod }) orelse break :blk .none);
- const builtin_namespace = builtin_decl.getInnerNamespaceIndex(mod).unwrap() orelse break :blk .none;
- if (decl.src_namespace != builtin_namespace) break :blk .none;
+ const builtin_str = try ip.getOrPutString(gpa, "builtin", .no_embedded_nulls);
+ const builtin_decl = mod.declPtr(std_namespace.decls.getKeyAdapted(builtin_str, DeclAdapter{ .zcu = mod }) orelse break :ip_index .none);
+ const builtin_namespace = builtin_decl.getInnerNamespaceIndex(mod).unwrap() orelse break :ip_index .none;
+ if (decl.src_namespace != builtin_namespace) break :ip_index .none;
// We're in builtin.zig. This could be a builtin we need to add to a specific InternPool index.
- for ([_]struct { []const u8, InternPool.Index }{
- .{ "AtomicOrder", .atomic_order_type },
- .{ "AtomicRmwOp", .atomic_rmw_op_type },
- .{ "CallingConvention", .calling_convention_type },
- .{ "AddressSpace", .address_space_type },
- .{ "FloatMode", .float_mode_type },
- .{ "ReduceOp", .reduce_op_type },
- .{ "CallModifier", .call_modifier_type },
- .{ "PrefetchOptions", .prefetch_options_type },
- .{ "ExportOptions", .export_options_type },
- .{ "ExternOptions", .extern_options_type },
- .{ "Type", .type_info_type },
- }) |pair| {
- const decl_name = ip.stringToSlice(decl.name);
- if (std.mem.eql(u8, decl_name, pair[0])) {
- break :blk pair[1];
- }
+ for ([_][]const u8{
+ "AtomicOrder",
+ "AtomicRmwOp",
+ "CallingConvention",
+ "AddressSpace",
+ "FloatMode",
+ "ReduceOp",
+ "CallModifier",
+ "PrefetchOptions",
+ "ExportOptions",
+ "ExternOptions",
+ "Type",
+ }, [_]InternPool.Index{
+ .atomic_order_type,
+ .atomic_rmw_op_type,
+ .calling_convention_type,
+ .address_space_type,
+ .float_mode_type,
+ .reduce_op_type,
+ .call_modifier_type,
+ .prefetch_options_type,
+ .export_options_type,
+ .extern_options_type,
+ .type_info_type,
+ }) |type_name, type_ip| {
+ if (decl.name.eqlSlice(type_name, ip)) break :ip_index type_ip;
}
- break :blk .none;
+ break :ip_index .none;
};
mod.intern_pool.removeDependenciesForDepender(gpa, InternPool.Depender.wrap(.{ .decl = decl_index }));
@@ -3725,8 +3732,7 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !SemaDeclResult {
} else if (bytes.len == 0) {
return sema.fail(&block_scope, section_src, "linksection cannot be empty", .{});
}
- const section = try ip.getOrPutString(gpa, bytes);
- break :blk section.toOptional();
+ break :blk try ip.getOrPutStringOpt(gpa, bytes, .no_embedded_nulls);
};
decl.@"addrspace" = blk: {
const addrspace_ctx: Sema.AddressSpaceContext = switch (ip.indexToKey(decl_val.toIntern())) {
@@ -4101,7 +4107,10 @@ fn newEmbedFile(
.sentinel = .zero_u8,
.child = .u8_type,
} });
- const array_val = try ip.getTrailingAggregate(gpa, array_ty, bytes.len);
+ const array_val = try ip.get(gpa, .{ .aggregate = .{
+ .ty = array_ty,
+ .storage = .{ .bytes = try ip.getOrPutTrailingString(gpa, bytes.len, .maybe_embedded_nulls) },
+ } });
const ptr_ty = (try mod.ptrType(.{
.child = array_ty,
@@ -4111,7 +4120,6 @@ fn newEmbedFile(
.address_space = .generic,
},
})).toIntern();
-
const ptr_val = try ip.get(gpa, .{ .ptr = .{
.ty = ptr_ty,
.addr = .{ .anon_decl = .{
@@ -4122,7 +4130,7 @@ fn newEmbedFile(
result.* = new_file;
new_file.* = .{
- .sub_file_path = try ip.getOrPutString(gpa, sub_file_path),
+ .sub_file_path = try ip.getOrPutString(gpa, sub_file_path, .no_embedded_nulls),
.owner = pkg,
.stat = stat,
.val = ptr_val,
@@ -4214,11 +4222,11 @@ const ScanDeclIter = struct {
const zcu = iter.zcu;
const gpa = zcu.gpa;
const ip = &zcu.intern_pool;
- var name = try ip.getOrPutStringFmt(gpa, fmt, args);
+ var name = try ip.getOrPutStringFmt(gpa, fmt, args, .no_embedded_nulls);
var gop = try iter.seen_decls.getOrPut(gpa, name);
var next_suffix: u32 = 0;
while (gop.found_existing) {
- name = try ip.getOrPutStringFmt(gpa, fmt ++ "_{d}", args ++ .{next_suffix});
+ name = try ip.getOrPutStringFmt(gpa, "{}_{d}", .{ name.fmt(ip), next_suffix }, .no_embedded_nulls);
gop = try iter.seen_decls.getOrPut(gpa, name);
next_suffix += 1;
}
@@ -4300,7 +4308,11 @@ fn scanDecl(iter: *ScanDeclIter, decl_inst: Zir.Inst.Index) Allocator.Error!void
};
} else info: {
if (iter.pass != .named) return;
- const name = try ip.getOrPutString(gpa, zir.nullTerminatedString(declaration.name.toString(zir).?));
+ const name = try ip.getOrPutString(
+ gpa,
+ zir.nullTerminatedString(declaration.name.toString(zir).?),
+ .no_embedded_nulls,
+ );
try iter.seen_decls.putNoClobber(gpa, name, {});
break :info .{
name,
@@ -4362,9 +4374,10 @@ fn scanDecl(iter: *ScanDeclIter, decl_inst: Zir.Inst.Index) Allocator.Error!void
if (!comp.config.is_test) break :a false;
if (decl_mod != zcu.main_mod) break :a false;
if (is_named_test and comp.test_filters.len > 0) {
- const decl_fqn = ip.stringToSlice(try namespace.fullyQualifiedName(zcu, decl_name));
+ const decl_fqn = try namespace.fullyQualifiedName(zcu, decl_name);
+ const decl_fqn_slice = decl_fqn.toSlice(ip);
for (comp.test_filters) |test_filter| {
- if (mem.indexOf(u8, decl_fqn, test_filter)) |_| break;
+ if (mem.indexOf(u8, decl_fqn_slice, test_filter)) |_| break;
} else break :a false;
}
zcu.test_functions.putAssumeCapacity(decl_index, {}); // may clobber on incremental update
@@ -4377,8 +4390,8 @@ fn scanDecl(iter: *ScanDeclIter, decl_inst: Zir.Inst.Index) Allocator.Error!void
// `is_export` is unchanged. In this case, the incremental update mechanism will handle
// re-analysis for us if necessary.
if (prev_exported != declaration.flags.is_export or decl.analysis == .unreferenced) {
- log.debug("scanDecl queue analyze_decl file='{s}' decl_name='{s}' decl_index={d}", .{
- namespace.file_scope.sub_file_path, ip.stringToSlice(decl_name), decl_index,
+ log.debug("scanDecl queue analyze_decl file='{s}' decl_name='{}' decl_index={d}", .{
+ namespace.file_scope.sub_file_path, decl_name.fmt(ip), decl_index,
});
comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = decl_index });
}
@@ -5300,7 +5313,7 @@ pub fn populateTestFunctions(
const builtin_file = (mod.importPkg(builtin_mod) catch unreachable).file;
const root_decl = mod.declPtr(builtin_file.root_decl.unwrap().?);
const builtin_namespace = mod.namespacePtr(root_decl.src_namespace);
- const test_functions_str = try ip.getOrPutString(gpa, "test_functions");
+ const test_functions_str = try ip.getOrPutString(gpa, "test_functions", .no_embedded_nulls);
const decl_index = builtin_namespace.decls.getKeyAdapted(
test_functions_str,
DeclAdapter{ .zcu = mod },
@@ -5327,16 +5340,16 @@ pub fn populateTestFunctions(
for (test_fn_vals, mod.test_functions.keys()) |*test_fn_val, test_decl_index| {
const test_decl = mod.declPtr(test_decl_index);
- const test_decl_name = try gpa.dupe(u8, ip.stringToSlice(try test_decl.fullyQualifiedName(mod)));
- defer gpa.free(test_decl_name);
+ const test_decl_name = try test_decl.fullyQualifiedName(mod);
+ const test_decl_name_len = test_decl_name.length(ip);
const test_name_anon_decl: InternPool.Key.Ptr.Addr.AnonDecl = n: {
const test_name_ty = try mod.arrayType(.{
- .len = test_decl_name.len,
+ .len = test_decl_name_len,
.child = .u8_type,
});
const test_name_val = try mod.intern(.{ .aggregate = .{
.ty = test_name_ty.toIntern(),
- .storage = .{ .bytes = test_decl_name },
+ .storage = .{ .bytes = test_decl_name.toString() },
} });
break :n .{
.orig_ty = (try mod.singleConstPtrType(test_name_ty)).toIntern(),
@@ -5354,7 +5367,7 @@ pub fn populateTestFunctions(
} }),
.len = try mod.intern(.{ .int = .{
.ty = .usize_type,
- .storage = .{ .u64 = test_decl_name.len },
+ .storage = .{ .u64 = test_decl_name_len },
} }),
} }),
// func
diff --git a/src/Package.zig b/src/Package.zig
index e173665e11..61f90727f3 100644
--- a/src/Package.zig
+++ b/src/Package.zig
@@ -2,3 +2,7 @@ pub const Module = @import("Package/Module.zig");
pub const Fetch = @import("Package/Fetch.zig");
pub const build_zig_basename = "build.zig";
pub const Manifest = @import("Package/Manifest.zig");
+
+test {
+ _ = Fetch;
+}
diff --git a/src/Package/Fetch.zig b/src/Package/Fetch.zig
index 9aeaaffea3..506075e921 100644
--- a/src/Package/Fetch.zig
+++ b/src/Package/Fetch.zig
@@ -339,12 +339,12 @@ pub fn run(f: *Fetch) RunError!void {
.path_or_url => |path_or_url| {
if (fs.cwd().openDir(path_or_url, .{ .iterate = true })) |dir| {
var resource: Resource = .{ .dir = dir };
- return runResource(f, path_or_url, &resource, null);
+ return f.runResource(path_or_url, &resource, null);
} else |dir_err| {
const file_err = if (dir_err == error.NotDir) e: {
if (fs.cwd().openFile(path_or_url, .{})) |file| {
var resource: Resource = .{ .file = file };
- return runResource(f, path_or_url, &resource, null);
+ return f.runResource(path_or_url, &resource, null);
} else |err| break :e err;
} else dir_err;
@@ -356,7 +356,7 @@ pub fn run(f: *Fetch) RunError!void {
};
var server_header_buffer: [header_buffer_size]u8 = undefined;
var resource = try f.initResource(uri, &server_header_buffer);
- return runResource(f, uri.path, &resource, null);
+ return f.runResource(try uri.path.toRawMaybeAlloc(arena), &resource, null);
}
},
};
@@ -418,7 +418,7 @@ pub fn run(f: *Fetch) RunError!void {
);
var server_header_buffer: [header_buffer_size]u8 = undefined;
var resource = try f.initResource(uri, &server_header_buffer);
- return runResource(f, uri.path, &resource, remote.hash);
+ return f.runResource(try uri.path.toRawMaybeAlloc(arena), &resource, remote.hash);
}
pub fn deinit(f: *Fetch) void {
@@ -461,14 +461,10 @@ fn runResource(
};
defer tmp_directory.handle.close();
- // Unpack resource into tmp_directory. A non-null return value means
- // that the package contents are inside a `pkg_dir` sub-directory.
- const pkg_dir = try unpackResource(f, resource, uri_path, tmp_directory);
+ // Fetch and unpack a resource into a temporary directory.
+ var unpack_result = try unpackResource(f, resource, uri_path, tmp_directory);
- var pkg_path: Cache.Path = .{
- .root_dir = tmp_directory,
- .sub_path = if (pkg_dir) |pkg_dir_name| pkg_dir_name else "",
- };
+ var pkg_path: Cache.Path = .{ .root_dir = tmp_directory, .sub_path = unpack_result.root_dir };
// Apply btrfs workaround if needed. Reopen tmp_directory.
if (native_os == .linux and f.job_queue.work_around_btrfs_bug) {
@@ -488,10 +484,9 @@ fn runResource(
.include_paths = if (f.manifest) |m| m.paths else .{},
};
- // TODO:
- // If any error occurred for files that were ultimately excluded, those
- // errors should be ignored, such as failure to create symlinks that
- // weren't supposed to be included anyway.
+ // Ignore errors that were excluded by manifest, such as failure to
+ // create symlinks that weren't supposed to be included anyway.
+ try unpack_result.validate(f, filter);
// Apply the manifest's inclusion rules to the temporary directory by
// deleting excluded files.
@@ -500,8 +495,8 @@ fn runResource(
// directory.
f.actual_hash = try computeHash(f, pkg_path, filter);
- break :blk if (pkg_dir) |pkg_dir_name|
- try fs.path.join(arena, &.{ tmp_dir_sub_path, pkg_dir_name })
+ break :blk if (unpack_result.root_dir.len > 0)
+ try fs.path.join(arena, &.{ tmp_dir_sub_path, unpack_result.root_dir })
else
tmp_dir_sub_path;
};
@@ -902,13 +897,14 @@ fn initResource(f: *Fetch, uri: std.Uri, server_header_buffer: []u8) RunError!Re
const arena = f.arena.allocator();
const eb = &f.error_bundle;
- if (ascii.eqlIgnoreCase(uri.scheme, "file")) return .{
- .file = f.parent_package_root.openFile(uri.path, .{}) catch |err| {
+ if (ascii.eqlIgnoreCase(uri.scheme, "file")) {
+ const path = try uri.path.toRawMaybeAlloc(arena);
+ return .{ .file = f.parent_package_root.openFile(path, .{}) catch |err| {
return f.fail(f.location_tok, try eb.printString("unable to open '{}{s}': {s}", .{
- f.parent_package_root, uri.path, @errorName(err),
+ f.parent_package_root, path, @errorName(err),
}));
- },
- };
+ } };
+ }
const http_client = f.job_queue.http_client;
@@ -925,7 +921,7 @@ fn initResource(f: *Fetch, uri: std.Uri, server_header_buffer: []u8) RunError!Re
};
errdefer req.deinit(); // releases more than memory
- req.send(.{}) catch |err| {
+ req.send() catch |err| {
return f.fail(f.location_tok, try eb.printString(
"HTTP request failed: {s}",
.{@errorName(err)},
@@ -972,7 +968,8 @@ fn initResource(f: *Fetch, uri: std.Uri, server_header_buffer: []u8) RunError!Re
};
const want_oid = want_oid: {
- const want_ref = uri.fragment orelse "HEAD";
+ const want_ref =
+ if (uri.fragment) |fragment| try fragment.toRawMaybeAlloc(arena) else "HEAD";
if (git.parseOid(want_ref)) |oid| break :want_oid oid else |_| {}
const want_ref_head = try std.fmt.allocPrint(arena, "refs/heads/{s}", .{want_ref});
@@ -1044,16 +1041,12 @@ fn initResource(f: *Fetch, uri: std.Uri, server_header_buffer: []u8) RunError!Re
));
}
-/// A `null` return value indicates the `tmp_directory` is populated directly
-/// with the package contents.
-/// A non-null return value means that the package contents are inside a
-/// sub-directory indicated by the named path.
fn unpackResource(
f: *Fetch,
resource: *Resource,
uri_path: []const u8,
tmp_directory: Cache.Directory,
-) RunError!?[]const u8 {
+) RunError!UnpackResult {
const eb = &f.error_bundle;
const file_type = switch (resource.*) {
.file => FileType.fromPath(uri_path) orelse
@@ -1121,7 +1114,7 @@ fn unpackResource(
.{ uri_path, @errorName(err) },
));
};
- return null;
+ return .{};
},
};
@@ -1156,27 +1149,22 @@ fn unpackResource(
});
return try unpackTarball(f, tmp_directory.handle, dcp.reader());
},
- .git_pack => {
- unpackGitPack(f, tmp_directory.handle, resource) catch |err| switch (err) {
- error.FetchFailed => return error.FetchFailed,
- error.OutOfMemory => return error.OutOfMemory,
- else => |e| return f.fail(f.location_tok, try eb.printString(
- "unable to unpack git files: {s}",
- .{@errorName(e)},
- )),
- };
- return null;
+ .git_pack => return unpackGitPack(f, tmp_directory.handle, resource) catch |err| switch (err) {
+ error.FetchFailed => return error.FetchFailed,
+ error.OutOfMemory => return error.OutOfMemory,
+ else => |e| return f.fail(f.location_tok, try eb.printString(
+ "unable to unpack git files: {s}",
+ .{@errorName(e)},
+ )),
},
}
}
-fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!?[]const u8 {
+fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!UnpackResult {
const eb = &f.error_bundle;
const arena = f.arena.allocator();
- const gpa = f.arena.child_allocator;
- var diagnostics: std.tar.Diagnostics = .{ .allocator = gpa };
- defer diagnostics.deinit();
+ var diagnostics: std.tar.Diagnostics = .{ .allocator = arena };
std.tar.pipeToFileSystem(out_dir, reader, .{
.diagnostics = &diagnostics,
@@ -1188,53 +1176,27 @@ fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!?[]const
.{@errorName(err)},
));
+ var res: UnpackResult = .{ .root_dir = diagnostics.root_dir };
if (diagnostics.errors.items.len > 0) {
- const notes_len: u32 = @intCast(diagnostics.errors.items.len);
- try eb.addRootErrorMessage(.{
- .msg = try eb.addString("unable to unpack tarball"),
- .src_loc = try f.srcLoc(f.location_tok),
- .notes_len = notes_len,
- });
- const notes_start = try eb.reserveNotes(notes_len);
- for (diagnostics.errors.items, notes_start..) |item, note_i| {
+ try res.allocErrors(arena, diagnostics.errors.items.len, "unable to unpack tarball");
+ for (diagnostics.errors.items) |item| {
switch (item) {
- .unable_to_create_sym_link => |info| {
- eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
- .msg = try eb.printString("unable to create symlink from '{s}' to '{s}': {s}", .{
- info.file_name, info.link_name, @errorName(info.code),
- }),
- }));
- },
- .unable_to_create_file => |info| {
- eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
- .msg = try eb.printString("unable to create file '{s}': {s}", .{
- info.file_name, @errorName(info.code),
- }),
- }));
- },
- .unsupported_file_type => |info| {
- eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
- .msg = try eb.printString("file '{s}' has unsupported type '{c}'", .{
- info.file_name, @intFromEnum(info.file_type),
- }),
- }));
- },
+ .unable_to_create_file => |i| res.unableToCreateFile(stripRoot(i.file_name, res.root_dir), i.code),
+ .unable_to_create_sym_link => |i| res.unableToCreateSymLink(stripRoot(i.file_name, res.root_dir), i.link_name, i.code),
+ .unsupported_file_type => |i| res.unsupportedFileType(stripRoot(i.file_name, res.root_dir), @intFromEnum(i.file_type)),
}
}
- return error.FetchFailed;
}
-
- return if (diagnostics.root_dir) |root_dir|
- return try arena.dupe(u8, root_dir)
- else
- null;
+ return res;
}
-fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource) anyerror!void {
- const eb = &f.error_bundle;
+fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource) anyerror!UnpackResult {
+ const arena = f.arena.allocator();
const gpa = f.arena.child_allocator;
const want_oid = resource.git.want_oid;
const reader = resource.git.fetch_stream.reader();
+
+ var res: UnpackResult = .{};
// The .git directory is used to store the packfile and associated index, but
// we do not attempt to replicate the exact structure of a real .git
// directory, since that isn't relevant for fetching a package.
@@ -1265,35 +1227,23 @@ fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource) anyerror!void
checkout_prog_node.activate();
var repository = try git.Repository.init(gpa, pack_file, index_file);
defer repository.deinit();
- var diagnostics: git.Diagnostics = .{ .allocator = gpa };
- defer diagnostics.deinit();
+ var diagnostics: git.Diagnostics = .{ .allocator = arena };
try repository.checkout(out_dir, want_oid, &diagnostics);
if (diagnostics.errors.items.len > 0) {
- const notes_len: u32 = @intCast(diagnostics.errors.items.len);
- try eb.addRootErrorMessage(.{
- .msg = try eb.addString("unable to unpack packfile"),
- .src_loc = try f.srcLoc(f.location_tok),
- .notes_len = notes_len,
- });
- const notes_start = try eb.reserveNotes(notes_len);
- for (diagnostics.errors.items, notes_start..) |item, note_i| {
+ try res.allocErrors(arena, diagnostics.errors.items.len, "unable to unpack packfile");
+ for (diagnostics.errors.items) |item| {
switch (item) {
- .unable_to_create_sym_link => |info| {
- eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
- .msg = try eb.printString("unable to create symlink from '{s}' to '{s}': {s}", .{
- info.file_name, info.link_name, @errorName(info.code),
- }),
- }));
- },
+ .unable_to_create_file => |i| res.unableToCreateFile(i.file_name, i.code),
+ .unable_to_create_sym_link => |i| res.unableToCreateSymLink(i.file_name, i.link_name, i.code),
}
}
- return error.InvalidGitPack;
}
}
}
try out_dir.deleteTree(".git");
+ return res;
}
fn recursiveDirectoryCopy(f: *Fetch, dir: fs.Dir, tmp_dir: fs.Dir) anyerror!void {
@@ -1743,6 +1693,7 @@ const native_os = builtin.os.tag;
test {
_ = Filter;
_ = FileType;
+ _ = UnpackResult;
}
// Detects executable header: ELF magic header or shebang line.
@@ -1778,3 +1729,469 @@ test FileHeader {
h.update(FileHeader.elf_magic[2..4]);
try std.testing.expect(h.isExecutable());
}
+
+// Result of the `unpackResource` operation. Enables collecting errors from
+// tar/git diagnostic, filtering that errors by manifest inclusion rules and
+// emitting remaining errors to an `ErrorBundle`.
+const UnpackResult = struct {
+ errors: []Error = undefined,
+ errors_count: usize = 0,
+ root_error_message: []const u8 = "",
+
+ // A non empty value means that the package contents are inside a
+ // sub-directory indicated by the named path.
+ root_dir: []const u8 = "",
+
+ const Error = union(enum) {
+ unable_to_create_sym_link: struct {
+ code: anyerror,
+ file_name: []const u8,
+ link_name: []const u8,
+ },
+ unable_to_create_file: struct {
+ code: anyerror,
+ file_name: []const u8,
+ },
+ unsupported_file_type: struct {
+ file_name: []const u8,
+ file_type: u8,
+ },
+
+ fn excluded(self: Error, filter: Filter) bool {
+ const file_name = switch (self) {
+ .unable_to_create_file => |info| info.file_name,
+ .unable_to_create_sym_link => |info| info.file_name,
+ .unsupported_file_type => |info| info.file_name,
+ };
+ return !filter.includePath(file_name);
+ }
+ };
+
+ fn allocErrors(self: *UnpackResult, arena: std.mem.Allocator, n: usize, root_error_message: []const u8) !void {
+ self.root_error_message = try arena.dupe(u8, root_error_message);
+ self.errors = try arena.alloc(UnpackResult.Error, n);
+ }
+
+ fn hasErrors(self: *UnpackResult) bool {
+ return self.errors_count > 0;
+ }
+
+ fn unableToCreateFile(self: *UnpackResult, file_name: []const u8, err: anyerror) void {
+ self.errors[self.errors_count] = .{ .unable_to_create_file = .{
+ .code = err,
+ .file_name = file_name,
+ } };
+ self.errors_count += 1;
+ }
+
+ fn unableToCreateSymLink(self: *UnpackResult, file_name: []const u8, link_name: []const u8, err: anyerror) void {
+ self.errors[self.errors_count] = .{ .unable_to_create_sym_link = .{
+ .code = err,
+ .file_name = file_name,
+ .link_name = link_name,
+ } };
+ self.errors_count += 1;
+ }
+
+ fn unsupportedFileType(self: *UnpackResult, file_name: []const u8, file_type: u8) void {
+ self.errors[self.errors_count] = .{ .unsupported_file_type = .{
+ .file_name = file_name,
+ .file_type = file_type,
+ } };
+ self.errors_count += 1;
+ }
+
+ fn validate(self: *UnpackResult, f: *Fetch, filter: Filter) !void {
+ if (self.errors_count == 0) return;
+
+ var unfiltered_errors: u32 = 0;
+ for (self.errors) |item| {
+ if (item.excluded(filter)) continue;
+ unfiltered_errors += 1;
+ }
+ if (unfiltered_errors == 0) return;
+
+ // Emmit errors to an `ErrorBundle`.
+ const eb = &f.error_bundle;
+ try eb.addRootErrorMessage(.{
+ .msg = try eb.addString(self.root_error_message),
+ .src_loc = try f.srcLoc(f.location_tok),
+ .notes_len = unfiltered_errors,
+ });
+ var note_i: u32 = try eb.reserveNotes(unfiltered_errors);
+ for (self.errors) |item| {
+ if (item.excluded(filter)) continue;
+ switch (item) {
+ .unable_to_create_sym_link => |info| {
+ eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
+ .msg = try eb.printString("unable to create symlink from '{s}' to '{s}': {s}", .{
+ info.file_name, info.link_name, @errorName(info.code),
+ }),
+ }));
+ },
+ .unable_to_create_file => |info| {
+ eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
+ .msg = try eb.printString("unable to create file '{s}': {s}", .{
+ info.file_name, @errorName(info.code),
+ }),
+ }));
+ },
+ .unsupported_file_type => |info| {
+ eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
+ .msg = try eb.printString("file '{s}' has unsupported type '{c}'", .{
+ info.file_name, info.file_type,
+ }),
+ }));
+ },
+ }
+ note_i += 1;
+ }
+
+ return error.FetchFailed;
+ }
+
+ test validate {
+ const gpa = std.testing.allocator;
+ var arena_instance = std.heap.ArenaAllocator.init(gpa);
+ defer arena_instance.deinit();
+ const arena = arena_instance.allocator();
+
+ // fill UnpackResult with errors
+ var res: UnpackResult = .{};
+ try res.allocErrors(arena, 4, "unable to unpack");
+ try std.testing.expectEqual(0, res.errors_count);
+ res.unableToCreateFile("dir1/file1", error.File1);
+ res.unableToCreateSymLink("dir2/file2", "filename", error.SymlinkError);
+ res.unableToCreateFile("dir1/file3", error.File3);
+ res.unsupportedFileType("dir2/file4", 'x');
+ try std.testing.expectEqual(4, res.errors_count);
+
+ // create filter, includes dir2, excludes dir1
+ var filter: Filter = .{};
+ try filter.include_paths.put(arena, "dir2", {});
+
+ // init Fetch
+ var fetch: Fetch = undefined;
+ fetch.parent_manifest_ast = null;
+ fetch.location_tok = 0;
+ try fetch.error_bundle.init(gpa);
+ defer fetch.error_bundle.deinit();
+
+ // validate errors with filter
+ try std.testing.expectError(error.FetchFailed, res.validate(&fetch, filter));
+
+ // output errors to string
+ var errors = try fetch.error_bundle.toOwnedBundle("");
+ defer errors.deinit(gpa);
+ var out = std.ArrayList(u8).init(gpa);
+ defer out.deinit();
+ try errors.renderToWriter(.{ .ttyconf = .no_color }, out.writer());
+ try std.testing.expectEqualStrings(
+ \\error: unable to unpack
+ \\ note: unable to create symlink from 'dir2/file2' to 'filename': SymlinkError
+ \\ note: file 'dir2/file4' has unsupported type 'x'
+ \\
+ , out.items);
+ }
+};
+
+test "tarball with duplicate paths" {
+ // This tarball has duplicate path 'dir1/file1' to simulate case sensitve
+ // file system on any file sytstem.
+ //
+ // duplicate_paths/
+ // duplicate_paths/dir1/
+ // duplicate_paths/dir1/file1
+ // duplicate_paths/dir1/file1
+ // duplicate_paths/build.zig.zon
+ // duplicate_paths/src/
+ // duplicate_paths/src/main.zig
+ // duplicate_paths/src/root.zig
+ // duplicate_paths/build.zig
+ //
+
+ const gpa = std.testing.allocator;
+ var tmp = std.testing.tmpDir(.{});
+ defer tmp.cleanup();
+
+ const tarball_name = "duplicate_paths.tar.gz";
+ try saveEmbedFile(tarball_name, tmp.dir);
+ const tarball_path = try std.fmt.allocPrint(gpa, "zig-cache/tmp/{s}/{s}", .{ tmp.sub_path, tarball_name });
+ defer gpa.free(tarball_path);
+
+ // Run tarball fetch, expect to fail
+ var fb: TestFetchBuilder = undefined;
+ var fetch = try fb.build(gpa, tmp.dir, tarball_path);
+ defer fb.deinit();
+ try std.testing.expectError(error.FetchFailed, fetch.run());
+
+ try fb.expectFetchErrors(1,
+ \\error: unable to unpack tarball
+ \\ note: unable to create file 'dir1/file1': PathAlreadyExists
+ \\
+ );
+}
+
+test "tarball with excluded duplicate paths" {
+ // Same as previous tarball but has build.zig.zon wich excludes 'dir1'.
+ //
+ // .paths = .{
+ // "build.zig",
+ // "build.zig.zon",
+ // "src",
+ // }
+ //
+
+ const gpa = std.testing.allocator;
+ var tmp = std.testing.tmpDir(.{});
+ defer tmp.cleanup();
+
+ const tarball_name = "duplicate_paths_excluded.tar.gz";
+ try saveEmbedFile(tarball_name, tmp.dir);
+ const tarball_path = try std.fmt.allocPrint(gpa, "zig-cache/tmp/{s}/{s}", .{ tmp.sub_path, tarball_name });
+ defer gpa.free(tarball_path);
+
+ // Run tarball fetch, should succeed
+ var fb: TestFetchBuilder = undefined;
+ var fetch = try fb.build(gpa, tmp.dir, tarball_path);
+ defer fb.deinit();
+ try fetch.run();
+
+ const hex_digest = Package.Manifest.hexDigest(fetch.actual_hash);
+ try std.testing.expectEqualStrings(
+ "12200bafe035cbb453dd717741b66e9f9d1e6c674069d06121dafa1b2e62eb6b22da",
+ &hex_digest,
+ );
+
+ const expected_files: []const []const u8 = &.{
+ "build.zig",
+ "build.zig.zon",
+ "src/main.zig",
+ "src/root.zig",
+ };
+ try fb.expectPackageFiles(expected_files);
+}
+
+test "tarball without root folder" {
+ // Tarball with root folder. Manifest excludes dir1 and dir2.
+ //
+ // build.zig
+ // build.zig.zon
+ // dir1/
+ // dir1/file2
+ // dir1/file1
+ // dir2/
+ // dir2/file2
+ // src/
+ // src/main.zig
+ //
+
+ const gpa = std.testing.allocator;
+ var tmp = std.testing.tmpDir(.{});
+ defer tmp.cleanup();
+
+ const tarball_name = "no_root.tar.gz";
+ try saveEmbedFile(tarball_name, tmp.dir);
+ const tarball_path = try std.fmt.allocPrint(gpa, "zig-cache/tmp/{s}/{s}", .{ tmp.sub_path, tarball_name });
+ defer gpa.free(tarball_path);
+
+ // Run tarball fetch, should succeed
+ var fb: TestFetchBuilder = undefined;
+ var fetch = try fb.build(gpa, tmp.dir, tarball_path);
+ defer fb.deinit();
+ try fetch.run();
+
+ const hex_digest = Package.Manifest.hexDigest(fetch.actual_hash);
+ try std.testing.expectEqualStrings(
+ "12209f939bfdcb8b501a61bb4a43124dfa1b2848adc60eec1e4624c560357562b793",
+ &hex_digest,
+ );
+
+ const expected_files: []const []const u8 = &.{
+ "build.zig",
+ "build.zig.zon",
+ "src/main.zig",
+ };
+ try fb.expectPackageFiles(expected_files);
+}
+
+test "set executable bit based on file content" {
+ if (!std.fs.has_executable_bit) return error.SkipZigTest;
+ const gpa = std.testing.allocator;
+ var tmp = std.testing.tmpDir(.{});
+ defer tmp.cleanup();
+
+ const tarball_name = "executables.tar.gz";
+ try saveEmbedFile(tarball_name, tmp.dir);
+ const tarball_path = try std.fmt.allocPrint(gpa, "zig-cache/tmp/{s}/{s}", .{ tmp.sub_path, tarball_name });
+ defer gpa.free(tarball_path);
+
+ // $ tar -tvf executables.tar.gz
+ // drwxrwxr-x 0 executables/
+ // -rwxrwxr-x 170 executables/hello
+ // lrwxrwxrwx 0 executables/hello_ln -> hello
+ // -rw-rw-r-- 0 executables/file1
+ // -rw-rw-r-- 17 executables/script_with_shebang_without_exec_bit
+ // -rwxrwxr-x 7 executables/script_without_shebang
+ // -rwxrwxr-x 17 executables/script
+
+ var fb: TestFetchBuilder = undefined;
+ var fetch = try fb.build(gpa, tmp.dir, tarball_path);
+ defer fb.deinit();
+
+ try fetch.run();
+ try std.testing.expectEqualStrings(
+ "1220fecb4c06a9da8673c87fe8810e15785f1699212f01728eadce094d21effeeef3",
+ &Manifest.hexDigest(fetch.actual_hash),
+ );
+
+ var out = try fb.packageDir();
+ defer out.close();
+ const S = std.posix.S;
+ // expect executable bit not set
+ try std.testing.expect((try out.statFile("file1")).mode & S.IXUSR == 0);
+ try std.testing.expect((try out.statFile("script_without_shebang")).mode & S.IXUSR == 0);
+ // expect executable bit set
+ try std.testing.expect((try out.statFile("hello")).mode & S.IXUSR != 0);
+ try std.testing.expect((try out.statFile("script")).mode & S.IXUSR != 0);
+ try std.testing.expect((try out.statFile("script_with_shebang_without_exec_bit")).mode & S.IXUSR != 0);
+ try std.testing.expect((try out.statFile("hello_ln")).mode & S.IXUSR != 0);
+
+ //
+ // $ ls -al zig-cache/tmp/OCz9ovUcstDjTC_U/zig-global-cache/p/1220fecb4c06a9da8673c87fe8810e15785f1699212f01728eadce094d21effeeef3
+ // -rw-rw-r-- 1 0 Apr file1
+ // -rwxrwxr-x 1 170 Apr hello
+ // lrwxrwxrwx 1 5 Apr hello_ln -> hello
+ // -rwxrwxr-x 1 17 Apr script
+ // -rw-rw-r-- 1 7 Apr script_without_shebang
+ // -rwxrwxr-x 1 17 Apr script_with_shebang_without_exec_bit
+}
+
+fn saveEmbedFile(comptime tarball_name: []const u8, dir: fs.Dir) !void {
+ //const tarball_name = "duplicate_paths_excluded.tar.gz";
+ const tarball_content = @embedFile("Fetch/testdata/" ++ tarball_name);
+ var tmp_file = try dir.createFile(tarball_name, .{});
+ defer tmp_file.close();
+ try tmp_file.writeAll(tarball_content);
+}
+
+// Builds Fetch with required dependencies, clears dependencies on deinit().
+const TestFetchBuilder = struct {
+ thread_pool: ThreadPool,
+ http_client: std.http.Client,
+ global_cache_directory: Cache.Directory,
+ progress: std.Progress,
+ job_queue: Fetch.JobQueue,
+ fetch: Fetch,
+
+ fn build(
+ self: *TestFetchBuilder,
+ allocator: std.mem.Allocator,
+ cache_parent_dir: std.fs.Dir,
+ path_or_url: []const u8,
+ ) !*Fetch {
+ const cache_dir = try cache_parent_dir.makeOpenPath("zig-global-cache", .{});
+
+ try self.thread_pool.init(.{ .allocator = allocator });
+ self.http_client = .{ .allocator = allocator };
+ self.global_cache_directory = .{ .handle = cache_dir, .path = null };
+
+ self.progress = .{ .dont_print_on_dumb = true };
+
+ self.job_queue = .{
+ .http_client = &self.http_client,
+ .thread_pool = &self.thread_pool,
+ .global_cache = self.global_cache_directory,
+ .recursive = false,
+ .read_only = false,
+ .debug_hash = false,
+ .work_around_btrfs_bug = false,
+ };
+
+ self.fetch = .{
+ .arena = std.heap.ArenaAllocator.init(allocator),
+ .location = .{ .path_or_url = path_or_url },
+ .location_tok = 0,
+ .hash_tok = 0,
+ .name_tok = 0,
+ .lazy_status = .eager,
+ .parent_package_root = Cache.Path{ .root_dir = Cache.Directory{ .handle = cache_dir, .path = null } },
+ .parent_manifest_ast = null,
+ .prog_node = self.progress.start("Fetch", 0),
+ .job_queue = &self.job_queue,
+ .omit_missing_hash_error = true,
+ .allow_missing_paths_field = false,
+
+ .package_root = undefined,
+ .error_bundle = undefined,
+ .manifest = null,
+ .manifest_ast = undefined,
+ .actual_hash = undefined,
+ .has_build_zig = false,
+ .oom_flag = false,
+ .module = null,
+ };
+ return &self.fetch;
+ }
+
+ fn deinit(self: *TestFetchBuilder) void {
+ self.fetch.deinit();
+ self.job_queue.deinit();
+ self.fetch.prog_node.end();
+ self.global_cache_directory.handle.close();
+ self.http_client.deinit();
+ self.thread_pool.deinit();
+ }
+
+ fn packageDir(self: *TestFetchBuilder) !fs.Dir {
+ const root = self.fetch.package_root;
+ return try root.root_dir.handle.openDir(root.sub_path, .{ .iterate = true });
+ }
+
+ // Test helper, asserts thet package dir constains expected_files.
+ // expected_files must be sorted.
+ fn expectPackageFiles(self: *TestFetchBuilder, expected_files: []const []const u8) !void {
+ var package_dir = try self.packageDir();
+ defer package_dir.close();
+
+ var actual_files: std.ArrayListUnmanaged([]u8) = .{};
+ defer actual_files.deinit(std.testing.allocator);
+ defer for (actual_files.items) |file| std.testing.allocator.free(file);
+ var walker = try package_dir.walk(std.testing.allocator);
+ defer walker.deinit();
+ while (try walker.next()) |entry| {
+ if (entry.kind != .file) continue;
+ const path = try std.testing.allocator.dupe(u8, entry.path);
+ errdefer std.testing.allocator.free(path);
+ std.mem.replaceScalar(u8, path, std.fs.path.sep, '/');
+ try actual_files.append(std.testing.allocator, path);
+ }
+ std.mem.sortUnstable([]u8, actual_files.items, {}, struct {
+ fn lessThan(_: void, a: []u8, b: []u8) bool {
+ return std.mem.lessThan(u8, a, b);
+ }
+ }.lessThan);
+
+ try std.testing.expectEqual(expected_files.len, actual_files.items.len);
+ for (expected_files, 0..) |file_name, i| {
+ try std.testing.expectEqualStrings(file_name, actual_files.items[i]);
+ }
+ try std.testing.expectEqualDeep(expected_files, actual_files.items);
+ }
+
+ // Test helper, asserts that fetch has failed with `msg` error message.
+ fn expectFetchErrors(self: *TestFetchBuilder, notes_len: usize, msg: []const u8) !void {
+ var errors = try self.fetch.error_bundle.toOwnedBundle("");
+ defer errors.deinit(std.testing.allocator);
+
+ const em = errors.getErrorMessage(errors.getMessages()[0]);
+ try std.testing.expectEqual(1, em.count);
+ if (notes_len > 0) {
+ try std.testing.expectEqual(notes_len, em.notes_len);
+ }
+ var al = std.ArrayList(u8).init(std.testing.allocator);
+ defer al.deinit();
+ try errors.renderToWriter(.{ .ttyconf = .no_color }, al.writer());
+ try std.testing.expectEqualStrings(msg, al.items);
+ }
+};
diff --git a/src/Package/Fetch/git.zig b/src/Package/Fetch/git.zig
index 36652bd88c..a8c106412e 100644
--- a/src/Package/Fetch/git.zig
+++ b/src/Package/Fetch/git.zig
@@ -46,6 +46,10 @@ pub const Diagnostics = struct {
file_name: []const u8,
link_name: []const u8,
},
+ unable_to_create_file: struct {
+ code: anyerror,
+ file_name: []const u8,
+ },
};
pub fn deinit(d: *Diagnostics) void {
@@ -55,6 +59,9 @@ pub const Diagnostics = struct {
d.allocator.free(info.file_name);
d.allocator.free(info.link_name);
},
+ .unable_to_create_file => |info| {
+ d.allocator.free(info.file_name);
+ },
}
}
d.errors.deinit(d.allocator);
@@ -119,11 +126,19 @@ pub const Repository = struct {
try repository.checkoutTree(subdir, entry.oid, sub_path, diagnostics);
},
.file => {
- var file = try dir.createFile(entry.name, .{});
- defer file.close();
try repository.odb.seekOid(entry.oid);
const file_object = try repository.odb.readObject();
if (file_object.type != .blob) return error.InvalidFile;
+ var file = dir.createFile(entry.name, .{ .exclusive = true }) catch |e| {
+ const file_name = try std.fs.path.join(diagnostics.allocator, &.{ current_path, entry.name });
+ errdefer diagnostics.allocator.free(file_name);
+ try diagnostics.errors.append(diagnostics.allocator, .{ .unable_to_create_file = .{
+ .code = e,
+ .file_name = file_name,
+ } });
+ continue;
+ };
+ defer file.close();
try file.writeAll(file_object.data);
try file.sync();
},
@@ -525,9 +540,13 @@ pub const Session = struct {
http_headers_buffer: []u8,
) !CapabilityIterator {
var info_refs_uri = session.uri;
- info_refs_uri.path = try std.fs.path.resolvePosix(allocator, &.{ "/", session.uri.path, "info/refs" });
- defer allocator.free(info_refs_uri.path);
- info_refs_uri.query = "service=git-upload-pack";
+ {
+ const session_uri_path = try std.fmt.allocPrint(allocator, "{path}", .{session.uri.path});
+ defer allocator.free(session_uri_path);
+ info_refs_uri.path = .{ .percent_encoded = try std.fs.path.resolvePosix(allocator, &.{ "/", session_uri_path, "info/refs" }) };
+ }
+ defer allocator.free(info_refs_uri.path.percent_encoded);
+ info_refs_uri.query = .{ .percent_encoded = "service=git-upload-pack" };
info_refs_uri.fragment = null;
const max_redirects = 3;
@@ -539,16 +558,18 @@ pub const Session = struct {
},
});
errdefer request.deinit();
- try request.send(.{});
+ try request.send();
try request.finish();
try request.wait();
if (request.response.status != .ok) return error.ProtocolError;
const any_redirects_occurred = request.redirect_behavior.remaining() < max_redirects;
if (any_redirects_occurred) {
- if (!mem.endsWith(u8, request.uri.path, "/info/refs")) return error.UnparseableRedirect;
+ const request_uri_path = try std.fmt.allocPrint(allocator, "{path}", .{request.uri.path});
+ defer allocator.free(request_uri_path);
+ if (!mem.endsWith(u8, request_uri_path, "/info/refs")) return error.UnparseableRedirect;
var new_uri = request.uri;
- new_uri.path = new_uri.path[0 .. new_uri.path.len - "/info/refs".len];
+ new_uri.path = .{ .percent_encoded = request_uri_path[0 .. request_uri_path.len - "/info/refs".len] };
new_uri.query = null;
redirect_uri.* = try std.fmt.allocPrint(allocator, "{+/}", .{new_uri});
return error.Redirected;
@@ -630,8 +651,12 @@ pub const Session = struct {
/// Returns an iterator over refs known to the server.
pub fn listRefs(session: Session, allocator: Allocator, options: ListRefsOptions) !RefIterator {
var upload_pack_uri = session.uri;
- upload_pack_uri.path = try std.fs.path.resolvePosix(allocator, &.{ "/", session.uri.path, "git-upload-pack" });
- defer allocator.free(upload_pack_uri.path);
+ {
+ const session_uri_path = try std.fmt.allocPrint(allocator, "{path}", .{session.uri.path});
+ defer allocator.free(session_uri_path);
+ upload_pack_uri.path = .{ .percent_encoded = try std.fs.path.resolvePosix(allocator, &.{ "/", session_uri_path, "git-upload-pack" }) };
+ }
+ defer allocator.free(upload_pack_uri.path.percent_encoded);
upload_pack_uri.query = null;
upload_pack_uri.fragment = null;
@@ -666,7 +691,7 @@ pub const Session = struct {
});
errdefer request.deinit();
request.transfer_encoding = .{ .content_length = body.items.len };
- try request.send(.{});
+ try request.send();
try request.writeAll(body.items);
try request.finish();
@@ -733,8 +758,12 @@ pub const Session = struct {
http_headers_buffer: []u8,
) !FetchStream {
var upload_pack_uri = session.uri;
- upload_pack_uri.path = try std.fs.path.resolvePosix(allocator, &.{ "/", session.uri.path, "git-upload-pack" });
- defer allocator.free(upload_pack_uri.path);
+ {
+ const session_uri_path = try std.fmt.allocPrint(allocator, "{path}", .{session.uri.path});
+ defer allocator.free(session_uri_path);
+ upload_pack_uri.path = .{ .percent_encoded = try std.fs.path.resolvePosix(allocator, &.{ "/", session_uri_path, "git-upload-pack" }) };
+ }
+ defer allocator.free(upload_pack_uri.path.percent_encoded);
upload_pack_uri.query = null;
upload_pack_uri.fragment = null;
@@ -771,7 +800,7 @@ pub const Session = struct {
});
errdefer request.deinit();
request.transfer_encoding = .{ .content_length = body.items.len };
- try request.send(.{});
+ try request.send();
try request.writeAll(body.items);
try request.finish();
diff --git a/src/Package/Fetch/testdata/duplicate_paths.tar.gz b/src/Package/Fetch/testdata/duplicate_paths.tar.gz
new file mode 100644
index 0000000000..118a934c1b
--- /dev/null
+++ b/src/Package/Fetch/testdata/duplicate_paths.tar.gz
Binary files differ
diff --git a/src/Package/Fetch/testdata/duplicate_paths_excluded.tar.gz b/src/Package/Fetch/testdata/duplicate_paths_excluded.tar.gz
new file mode 100644
index 0000000000..760b37cd40
--- /dev/null
+++ b/src/Package/Fetch/testdata/duplicate_paths_excluded.tar.gz
Binary files differ
diff --git a/src/Package/Fetch/testdata/executables.tar.gz b/src/Package/Fetch/testdata/executables.tar.gz
new file mode 100644
index 0000000000..abc650801e
--- /dev/null
+++ b/src/Package/Fetch/testdata/executables.tar.gz
Binary files differ
diff --git a/src/Package/Fetch/testdata/no_root.tar.gz b/src/Package/Fetch/testdata/no_root.tar.gz
new file mode 100644
index 0000000000..a3a4baf40f
--- /dev/null
+++ b/src/Package/Fetch/testdata/no_root.tar.gz
Binary files differ
diff --git a/src/Sema.zig b/src/Sema.zig
index d64226fef0..d3989f630c 100644
--- a/src/Sema.zig
+++ b/src/Sema.zig
@@ -2059,12 +2059,12 @@ pub fn setupErrorReturnTrace(sema: *Sema, block: *Block, last_arg_index: usize)
const st_ptr = try err_trace_block.addTy(.alloc, try mod.singleMutPtrType(stack_trace_ty));
// st.instruction_addresses = &addrs;
- const instruction_addresses_field_name = try ip.getOrPutString(gpa, "instruction_addresses");
+ const instruction_addresses_field_name = try ip.getOrPutString(gpa, "instruction_addresses", .no_embedded_nulls);
const addr_field_ptr = try sema.fieldPtr(&err_trace_block, src, st_ptr, instruction_addresses_field_name, src, true);
try sema.storePtr2(&err_trace_block, src, addr_field_ptr, src, addrs_ptr, src, .store);
// st.index = 0;
- const index_field_name = try ip.getOrPutString(gpa, "index");
+ const index_field_name = try ip.getOrPutString(gpa, "index", .no_embedded_nulls);
const index_field_ptr = try sema.fieldPtr(&err_trace_block, src, st_ptr, index_field_name, src, true);
try sema.storePtr2(&err_trace_block, src, index_field_ptr, src, .zero_usize, src, .store);
@@ -2348,13 +2348,13 @@ fn failWithInvalidFieldAccess(
fn typeSupportsFieldAccess(mod: *const Module, ty: Type, field_name: InternPool.NullTerminatedString) bool {
const ip = &mod.intern_pool;
switch (ty.zigTypeTag(mod)) {
- .Array => return ip.stringEqlSlice(field_name, "len"),
+ .Array => return field_name.eqlSlice("len", ip),
.Pointer => {
const ptr_info = ty.ptrInfo(mod);
if (ptr_info.flags.size == .Slice) {
- return ip.stringEqlSlice(field_name, "ptr") or ip.stringEqlSlice(field_name, "len");
+ return field_name.eqlSlice("ptr", ip) or field_name.eqlSlice("len", ip);
} else if (Type.fromInterned(ptr_info.child).zigTypeTag(mod) == .Array) {
- return ip.stringEqlSlice(field_name, "len");
+ return field_name.eqlSlice("len", ip);
} else return false;
},
.Type, .Struct, .Union => return true,
@@ -2703,12 +2703,20 @@ fn getCaptures(sema: *Sema, block: *Block, type_src: LazySrcLoc, extra_index: us
break :capture .{ .runtime = sema.typeOf(air_ref).toIntern() };
}),
.decl_val => |str| capture: {
- const decl_name = try ip.getOrPutString(sema.gpa, sema.code.nullTerminatedString(str));
+ const decl_name = try ip.getOrPutString(
+ sema.gpa,
+ sema.code.nullTerminatedString(str),
+ .no_embedded_nulls,
+ );
const decl = try sema.lookupIdentifier(block, .unneeded, decl_name); // TODO: could we need this src loc?
break :capture InternPool.CaptureValue.wrap(.{ .decl_val = decl });
},
.decl_ref => |str| capture: {
- const decl_name = try ip.getOrPutString(sema.gpa, sema.code.nullTerminatedString(str));
+ const decl_name = try ip.getOrPutString(
+ sema.gpa,
+ sema.code.nullTerminatedString(str),
+ .no_embedded_nulls,
+ );
const decl = try sema.lookupIdentifier(block, .unneeded, decl_name); // TODO: could we need this src loc?
break :capture InternPool.CaptureValue.wrap(.{ .decl_ref = decl });
},
@@ -2882,7 +2890,7 @@ fn createAnonymousDeclTypeNamed(
const name = mod.intern_pool.getOrPutStringFmt(gpa, "{}__{s}_{d}", .{
src_decl.name.fmt(&mod.intern_pool), anon_prefix, @intFromEnum(new_decl_index),
- }) catch unreachable;
+ }, .no_embedded_nulls) catch unreachable;
try mod.initNewAnonDecl(new_decl_index, src_decl.src_line, val, name);
return new_decl_index;
},
@@ -2923,7 +2931,7 @@ fn createAnonymousDeclTypeNamed(
};
try writer.writeByte(')');
- const name = try mod.intern_pool.getOrPutString(gpa, buf.items);
+ const name = try mod.intern_pool.getOrPutString(gpa, buf.items, .no_embedded_nulls);
try mod.initNewAnonDecl(new_decl_index, src_decl.src_line, val, name);
return new_decl_index;
},
@@ -2937,8 +2945,7 @@ fn createAnonymousDeclTypeNamed(
const name = try mod.intern_pool.getOrPutStringFmt(gpa, "{}.{s}", .{
src_decl.name.fmt(&mod.intern_pool), zir_data[i].str_op.getStr(sema.code),
- });
-
+ }, .no_embedded_nulls);
try mod.initNewAnonDecl(new_decl_index, src_decl.src_line, val, name);
return new_decl_index;
},
@@ -3157,7 +3164,7 @@ fn zirEnumDecl(
const field_name_zir = sema.code.nullTerminatedString(field_name_index);
extra_index += 2; // field name, doc comment
- const field_name = try mod.intern_pool.getOrPutString(gpa, field_name_zir);
+ const field_name = try mod.intern_pool.getOrPutString(gpa, field_name_zir, .no_embedded_nulls);
const tag_overflow = if (has_tag_value) overflow: {
const tag_val_ref: Zir.Inst.Ref = @enumFromInt(sema.code.extra[extra_index]);
@@ -3462,7 +3469,7 @@ fn zirErrorSetDecl(
while (extra_index < extra_index_end) : (extra_index += 2) { // +2 to skip over doc_string
const name_index: Zir.NullTerminatedString = @enumFromInt(sema.code.extra[extra_index]);
const name = sema.code.nullTerminatedString(name_index);
- const name_ip = try mod.intern_pool.getOrPutString(gpa, name);
+ const name_ip = try mod.intern_pool.getOrPutString(gpa, name, .no_embedded_nulls);
_ = try mod.getErrorValue(name_ip);
const result = names.getOrPutAssumeCapacity(name_ip);
assert(!result.found_existing); // verified in AstGen
@@ -3635,7 +3642,7 @@ fn indexablePtrLen(
const is_pointer_to = object_ty.isSinglePointer(mod);
const indexable_ty = if (is_pointer_to) object_ty.childType(mod) else object_ty;
try checkIndexable(sema, block, src, indexable_ty);
- const field_name = try mod.intern_pool.getOrPutString(sema.gpa, "len");
+ const field_name = try mod.intern_pool.getOrPutString(sema.gpa, "len", .no_embedded_nulls);
return sema.fieldVal(block, src, object, field_name, src);
}
@@ -3649,7 +3656,7 @@ fn indexablePtrLenOrNone(
const operand_ty = sema.typeOf(operand);
try checkMemOperand(sema, block, src, operand_ty);
if (operand_ty.ptrSize(mod) == .Many) return .none;
- const field_name = try mod.intern_pool.getOrPutString(sema.gpa, "len");
+ const field_name = try mod.intern_pool.getOrPutString(sema.gpa, "len", .no_embedded_nulls);
return sema.fieldVal(block, src, operand, field_name, src);
}
@@ -4363,7 +4370,7 @@ fn zirForLen(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
}
if (!object_ty.indexableHasLen(mod)) continue;
- break :l try sema.fieldVal(block, arg_src, object, try ip.getOrPutString(gpa, "len"), arg_src);
+ break :l try sema.fieldVal(block, arg_src, object, try ip.getOrPutString(gpa, "len", .no_embedded_nulls), arg_src);
};
const arg_len = try sema.coerce(block, Type.usize, arg_len_uncoerced, arg_src);
if (len == .none) {
@@ -4747,7 +4754,11 @@ fn validateUnionInit(
const field_ptr_data = sema.code.instructions.items(.data)[@intFromEnum(field_ptr)].pl_node;
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
const field_ptr_extra = sema.code.extraData(Zir.Inst.Field, field_ptr_data.payload_index).data;
- const field_name = try mod.intern_pool.getOrPutString(gpa, sema.code.nullTerminatedString(field_ptr_extra.field_name_start));
+ const field_name = try mod.intern_pool.getOrPutString(
+ gpa,
+ sema.code.nullTerminatedString(field_ptr_extra.field_name_start),
+ .no_embedded_nulls,
+ );
const field_index = try sema.unionFieldIndex(block, union_ty, field_name, field_src);
const air_tags = sema.air_instructions.items(.tag);
const air_datas = sema.air_instructions.items(.data);
@@ -4890,6 +4901,7 @@ fn validateStructInit(
const field_name = try ip.getOrPutString(
gpa,
sema.code.nullTerminatedString(field_ptr_extra.field_name_start),
+ .no_embedded_nulls,
);
field_index.* = if (struct_ty.isTuple(mod))
try sema.tupleFieldIndex(block, struct_ty, field_name, field_src)
@@ -5672,25 +5684,26 @@ fn zirStoreNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!v
fn zirStr(sema: *Sema, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const bytes = sema.code.instructions.items(.data)[@intFromEnum(inst)].str.get(sema.code);
- return sema.addStrLitNoAlias(bytes);
+ return sema.addStrLit(
+ try sema.mod.intern_pool.getOrPutString(sema.gpa, bytes, .maybe_embedded_nulls),
+ bytes.len,
+ );
}
-fn addStrLit(sema: *Sema, bytes: []const u8) CompileError!Air.Inst.Ref {
- const duped_bytes = try sema.arena.dupe(u8, bytes);
- return addStrLitNoAlias(sema, duped_bytes);
+fn addNullTerminatedStrLit(sema: *Sema, string: InternPool.NullTerminatedString) CompileError!Air.Inst.Ref {
+ return sema.addStrLit(string.toString(), string.length(&sema.mod.intern_pool));
}
-/// Safe to call when `bytes` does not point into `InternPool`.
-fn addStrLitNoAlias(sema: *Sema, bytes: []const u8) CompileError!Air.Inst.Ref {
+fn addStrLit(sema: *Sema, string: InternPool.String, len: u64) CompileError!Air.Inst.Ref {
const mod = sema.mod;
const array_ty = try mod.arrayType(.{
- .len = bytes.len,
+ .len = len,
.sentinel = .zero_u8,
.child = .u8_type,
});
const val = try mod.intern(.{ .aggregate = .{
.ty = array_ty.toIntern(),
- .storage = .{ .bytes = bytes },
+ .storage = .{ .bytes = string },
} });
return anonDeclRef(sema, val);
}
@@ -6370,7 +6383,11 @@ fn zirExport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void
const src = inst_data.src();
const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
const options_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
- const decl_name = try mod.intern_pool.getOrPutString(mod.gpa, sema.code.nullTerminatedString(extra.decl_name));
+ const decl_name = try mod.intern_pool.getOrPutString(
+ mod.gpa,
+ sema.code.nullTerminatedString(extra.decl_name),
+ .no_embedded_nulls,
+ );
const decl_index = if (extra.namespace != .none) index_blk: {
const container_ty = try sema.resolveType(block, operand_src, extra.namespace);
const container_namespace = container_ty.getNamespaceIndex(mod);
@@ -6721,7 +6738,11 @@ fn zirDeclRef(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
const mod = sema.mod;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].str_tok;
const src = inst_data.src();
- const decl_name = try mod.intern_pool.getOrPutString(sema.gpa, inst_data.get(sema.code));
+ const decl_name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ inst_data.get(sema.code),
+ .no_embedded_nulls,
+ );
const decl_index = try sema.lookupIdentifier(block, src, decl_name);
try sema.addReferencedBy(block, src, decl_index);
return sema.analyzeDeclRef(decl_index);
@@ -6731,7 +6752,11 @@ fn zirDeclVal(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
const mod = sema.mod;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].str_tok;
const src = inst_data.src();
- const decl_name = try mod.intern_pool.getOrPutString(sema.gpa, inst_data.get(sema.code));
+ const decl_name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ inst_data.get(sema.code),
+ .no_embedded_nulls,
+ );
const decl = try sema.lookupIdentifier(block, src, decl_name);
return sema.analyzeDeclVal(block, src, decl);
}
@@ -6883,7 +6908,7 @@ pub fn analyzeSaveErrRetIndex(sema: *Sema, block: *Block) SemaError!Air.Inst.Ref
error.NeededSourceLocation, error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable,
else => |e| return e,
};
- const field_name = try mod.intern_pool.getOrPutString(gpa, "index");
+ const field_name = try mod.intern_pool.getOrPutString(gpa, "index", .no_embedded_nulls);
const field_index = sema.structFieldIndex(block, stack_trace_ty, field_name, .unneeded) catch |err| switch (err) {
error.AnalysisFail, error.NeededSourceLocation => @panic("std.builtin.StackTrace is corrupt"),
error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable,
@@ -6926,7 +6951,7 @@ fn popErrorReturnTrace(
try sema.resolveTypeFields(stack_trace_ty);
const ptr_stack_trace_ty = try mod.singleMutPtrType(stack_trace_ty);
const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty);
- const field_name = try mod.intern_pool.getOrPutString(gpa, "index");
+ const field_name = try mod.intern_pool.getOrPutString(gpa, "index", .no_embedded_nulls);
const field_ptr = try sema.structFieldPtr(block, src, err_return_trace, field_name, src, stack_trace_ty, true);
try sema.storePtr2(block, src, field_ptr, src, saved_error_trace_index, src, .store);
} else if (is_non_error == null) {
@@ -6952,7 +6977,7 @@ fn popErrorReturnTrace(
try sema.resolveTypeFields(stack_trace_ty);
const ptr_stack_trace_ty = try mod.singleMutPtrType(stack_trace_ty);
const err_return_trace = try then_block.addTy(.err_return_trace, ptr_stack_trace_ty);
- const field_name = try mod.intern_pool.getOrPutString(gpa, "index");
+ const field_name = try mod.intern_pool.getOrPutString(gpa, "index", .no_embedded_nulls);
const field_ptr = try sema.structFieldPtr(&then_block, src, err_return_trace, field_name, src, stack_trace_ty, true);
try sema.storePtr2(&then_block, src, field_ptr, src, saved_error_trace_index, src, .store);
_ = try then_block.addBr(cond_block_inst, .void_value);
@@ -7010,7 +7035,11 @@ fn zirCall(
.direct => .{ .direct = try sema.resolveInst(extra.data.callee) },
.field => blk: {
const object_ptr = try sema.resolveInst(extra.data.obj_ptr);
- const field_name = try mod.intern_pool.getOrPutString(sema.gpa, sema.code.nullTerminatedString(extra.data.field_name_start));
+ const field_name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ sema.code.nullTerminatedString(extra.data.field_name_start),
+ .no_embedded_nulls,
+ );
const field_name_src: LazySrcLoc = .{ .node_offset_field_name = inst_data.src_node };
break :blk try sema.fieldCallBind(block, callee_src, object_ptr, field_name, field_name_src);
},
@@ -7073,7 +7102,7 @@ fn zirCall(
if (input_is_error or (pop_error_return_trace and return_ty.isError(mod))) {
const stack_trace_ty = try sema.getBuiltinType("StackTrace");
try sema.resolveTypeFields(stack_trace_ty);
- const field_name = try mod.intern_pool.getOrPutString(sema.gpa, "index");
+ const field_name = try mod.intern_pool.getOrPutString(sema.gpa, "index", .no_embedded_nulls);
const field_index = try sema.structFieldIndex(block, stack_trace_ty, field_name, call_src);
// Insert a save instruction before the arg resolution + call instructions we just generated
@@ -8648,7 +8677,11 @@ fn zirErrorValue(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
_ = block;
const mod = sema.mod;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].str_tok;
- const name = try mod.intern_pool.getOrPutString(sema.gpa, inst_data.get(sema.code));
+ const name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ inst_data.get(sema.code),
+ .no_embedded_nulls,
+ );
_ = try mod.getErrorValue(name);
// Create an error set type with only this error value, and return the value.
const error_set_type = try mod.singleErrorSetType(name);
@@ -8804,7 +8837,7 @@ fn zirEnumLiteral(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].str_tok;
const name = inst_data.get(sema.code);
return Air.internedToRef((try mod.intern(.{
- .enum_literal = try mod.intern_pool.getOrPutString(sema.gpa, name),
+ .enum_literal = try mod.intern_pool.getOrPutString(sema.gpa, name, .no_embedded_nulls),
})));
}
@@ -9761,7 +9794,7 @@ fn funcCommon(
const func_index = try ip.getExternFunc(gpa, .{
.ty = func_ty,
.decl = sema.owner_decl_index,
- .lib_name = try mod.intern_pool.getOrPutStringOpt(gpa, opt_lib_name),
+ .lib_name = try mod.intern_pool.getOrPutStringOpt(gpa, opt_lib_name, .no_embedded_nulls),
});
return finishFunc(
sema,
@@ -10225,7 +10258,11 @@ fn zirFieldVal(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const src = inst_data.src();
const field_name_src: LazySrcLoc = .{ .node_offset_field_name = inst_data.src_node };
const extra = sema.code.extraData(Zir.Inst.Field, inst_data.payload_index).data;
- const field_name = try mod.intern_pool.getOrPutString(sema.gpa, sema.code.nullTerminatedString(extra.field_name_start));
+ const field_name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ sema.code.nullTerminatedString(extra.field_name_start),
+ .no_embedded_nulls,
+ );
const object = try sema.resolveInst(extra.lhs);
return sema.fieldVal(block, src, object, field_name, field_name_src);
}
@@ -10239,7 +10276,11 @@ fn zirFieldPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const src = inst_data.src();
const field_name_src: LazySrcLoc = .{ .node_offset_field_name = inst_data.src_node };
const extra = sema.code.extraData(Zir.Inst.Field, inst_data.payload_index).data;
- const field_name = try mod.intern_pool.getOrPutString(sema.gpa, sema.code.nullTerminatedString(extra.field_name_start));
+ const field_name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ sema.code.nullTerminatedString(extra.field_name_start),
+ .no_embedded_nulls,
+ );
const object_ptr = try sema.resolveInst(extra.lhs);
return sema.fieldPtr(block, src, object_ptr, field_name, field_name_src, false);
}
@@ -10253,7 +10294,11 @@ fn zirStructInitFieldPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Compi
const src = inst_data.src();
const field_name_src: LazySrcLoc = .{ .node_offset_field_name_init = inst_data.src_node };
const extra = sema.code.extraData(Zir.Inst.Field, inst_data.payload_index).data;
- const field_name = try mod.intern_pool.getOrPutString(sema.gpa, sema.code.nullTerminatedString(extra.field_name_start));
+ const field_name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ sema.code.nullTerminatedString(extra.field_name_start),
+ .no_embedded_nulls,
+ );
const object_ptr = try sema.resolveInst(extra.lhs);
const struct_ty = sema.typeOf(object_ptr).childType(mod);
switch (struct_ty.zigTypeTag(mod)) {
@@ -13759,8 +13804,8 @@ fn zirHasField(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
switch (ip.indexToKey(ty.toIntern())) {
.ptr_type => |ptr_type| switch (ptr_type.flags.size) {
.Slice => {
- if (ip.stringEqlSlice(field_name, "ptr")) break :hf true;
- if (ip.stringEqlSlice(field_name, "len")) break :hf true;
+ if (field_name.eqlSlice("ptr", ip)) break :hf true;
+ if (field_name.eqlSlice("len", ip)) break :hf true;
break :hf false;
},
else => {},
@@ -13783,7 +13828,7 @@ fn zirHasField(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
.enum_type => {
break :hf ip.loadEnumType(ty.toIntern()).nameIndex(ip, field_name) != null;
},
- .array_type => break :hf ip.stringEqlSlice(field_name, "len"),
+ .array_type => break :hf field_name.eqlSlice("len", ip),
else => {},
}
return sema.fail(block, ty_src, "type '{}' does not support '@hasField'", .{
@@ -13885,7 +13930,11 @@ fn zirEmbedFile(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
fn zirRetErrValueCode(sema: *Sema, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const mod = sema.mod;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].str_tok;
- const name = try mod.intern_pool.getOrPutString(sema.gpa, inst_data.get(sema.code));
+ const name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ inst_data.get(sema.code),
+ .no_embedded_nulls,
+ );
_ = try mod.getErrorValue(name);
const error_set_type = try mod.singleErrorSetType(name);
return Air.internedToRef((try mod.intern(.{ .err = .{
@@ -17552,11 +17601,9 @@ fn zirBuiltinSrc(
const gpa = sema.gpa;
const func_name_val = v: {
- // This dupe prevents InternPool string pool memory from being reallocated
- // while a reference exists.
- const bytes = try sema.arena.dupe(u8, ip.stringToSlice(fn_owner_decl.name));
+ const func_name_len = fn_owner_decl.name.length(ip);
const array_ty = try ip.get(gpa, .{ .array_type = .{
- .len = bytes.len,
+ .len = func_name_len,
.sentinel = .zero_u8,
.child = .u8_type,
} });
@@ -17568,19 +17615,19 @@ fn zirBuiltinSrc(
.orig_ty = .slice_const_u8_sentinel_0_type,
.val = try ip.get(gpa, .{ .aggregate = .{
.ty = array_ty,
- .storage = .{ .bytes = bytes },
+ .storage = .{ .bytes = fn_owner_decl.name.toString() },
} }),
} },
} }),
- .len = (try mod.intValue(Type.usize, bytes.len)).toIntern(),
+ .len = (try mod.intValue(Type.usize, func_name_len)).toIntern(),
} });
};
const file_name_val = v: {
// The compiler must not call realpath anywhere.
- const bytes = try fn_owner_decl.getFileScope(mod).fullPathZ(sema.arena);
+ const file_name = try fn_owner_decl.getFileScope(mod).fullPath(sema.arena);
const array_ty = try ip.get(gpa, .{ .array_type = .{
- .len = bytes.len,
+ .len = file_name.len,
.sentinel = .zero_u8,
.child = .u8_type,
} });
@@ -17592,11 +17639,13 @@ fn zirBuiltinSrc(
.orig_ty = .slice_const_u8_sentinel_0_type,
.val = try ip.get(gpa, .{ .aggregate = .{
.ty = array_ty,
- .storage = .{ .bytes = bytes },
+ .storage = .{
+ .bytes = try ip.getOrPutString(gpa, file_name, .maybe_embedded_nulls),
+ },
} }),
} },
} }),
- .len = (try mod.intValue(Type.usize, bytes.len)).toIntern(),
+ .len = (try mod.intValue(Type.usize, file_name.len)).toIntern(),
} });
};
@@ -17651,7 +17700,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Fn"),
+ try ip.getOrPutString(gpa, "Fn", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(fn_info_decl_index);
const fn_info_decl = mod.declPtr(fn_info_decl_index);
@@ -17661,7 +17710,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
fn_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Param"),
+ try ip.getOrPutString(gpa, "Param", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(param_info_decl_index);
const param_info_decl = mod.declPtr(param_info_decl_index);
@@ -17762,7 +17811,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Int"),
+ try ip.getOrPutString(gpa, "Int", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(int_info_decl_index);
const int_info_decl = mod.declPtr(int_info_decl_index);
@@ -17790,7 +17839,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Float"),
+ try ip.getOrPutString(gpa, "Float", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(float_info_decl_index);
const float_info_decl = mod.declPtr(float_info_decl_index);
@@ -17822,7 +17871,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
(try sema.getBuiltinType("Type")).getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Pointer"),
+ try ip.getOrPutString(gpa, "Pointer", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(decl_index);
const decl = mod.declPtr(decl_index);
@@ -17833,7 +17882,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
pointer_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Size"),
+ try ip.getOrPutString(gpa, "Size", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(decl_index);
const decl = mod.declPtr(decl_index);
@@ -17876,7 +17925,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Array"),
+ try ip.getOrPutString(gpa, "Array", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(array_field_ty_decl_index);
const array_field_ty_decl = mod.declPtr(array_field_ty_decl_index);
@@ -17907,7 +17956,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Vector"),
+ try ip.getOrPutString(gpa, "Vector", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(vector_field_ty_decl_index);
const vector_field_ty_decl = mod.declPtr(vector_field_ty_decl_index);
@@ -17936,7 +17985,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Optional"),
+ try ip.getOrPutString(gpa, "Optional", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(optional_field_ty_decl_index);
const optional_field_ty_decl = mod.declPtr(optional_field_ty_decl_index);
@@ -17963,7 +18012,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Error"),
+ try ip.getOrPutString(gpa, "Error", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(set_field_ty_decl_index);
const set_field_ty_decl = mod.declPtr(set_field_ty_decl_index);
@@ -17980,18 +18029,18 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
else => |err_set_ty_index| blk: {
const names = ip.indexToKey(err_set_ty_index).error_set_type.names;
const vals = try sema.arena.alloc(InternPool.Index, names.len);
- for (vals, 0..) |*field_val, i| {
- // TODO: write something like getCoercedInts to avoid needing to dupe
- const name = try sema.arena.dupeZ(u8, ip.stringToSlice(names.get(ip)[i]));
- const name_val = v: {
+ for (vals, 0..) |*field_val, error_index| {
+ const error_name = names.get(ip)[error_index];
+ const error_name_len = error_name.length(ip);
+ const error_name_val = v: {
const new_decl_ty = try mod.arrayType(.{
- .len = name.len,
+ .len = error_name_len,
.sentinel = .zero_u8,
.child = .u8_type,
});
const new_decl_val = try mod.intern(.{ .aggregate = .{
.ty = new_decl_ty.toIntern(),
- .storage = .{ .bytes = name },
+ .storage = .{ .bytes = error_name.toString() },
} });
break :v try mod.intern(.{ .slice = .{
.ty = .slice_const_u8_sentinel_0_type,
@@ -18002,13 +18051,13 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
.orig_ty = .slice_const_u8_sentinel_0_type,
} },
} }),
- .len = (try mod.intValue(Type.usize, name.len)).toIntern(),
+ .len = (try mod.intValue(Type.usize, error_name_len)).toIntern(),
} });
};
const error_field_fields = .{
// name: [:0]const u8,
- name_val,
+ error_name_val,
};
field_val.* = try mod.intern(.{ .aggregate = .{
.ty = error_field_ty.toIntern(),
@@ -18069,7 +18118,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "ErrorUnion"),
+ try ip.getOrPutString(gpa, "ErrorUnion", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(error_union_field_ty_decl_index);
const error_union_field_ty_decl = mod.declPtr(error_union_field_ty_decl_index);
@@ -18099,7 +18148,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "EnumField"),
+ try ip.getOrPutString(gpa, "EnumField", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(enum_field_ty_decl_index);
const enum_field_ty_decl = mod.declPtr(enum_field_ty_decl_index);
@@ -18107,27 +18156,29 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
};
const enum_field_vals = try sema.arena.alloc(InternPool.Index, ip.loadEnumType(ty.toIntern()).names.len);
- for (enum_field_vals, 0..) |*field_val, i| {
+ for (enum_field_vals, 0..) |*field_val, tag_index| {
const enum_type = ip.loadEnumType(ty.toIntern());
const value_val = if (enum_type.values.len > 0)
try mod.intern_pool.getCoercedInts(
mod.gpa,
- mod.intern_pool.indexToKey(enum_type.values.get(ip)[i]).int,
+ mod.intern_pool.indexToKey(enum_type.values.get(ip)[tag_index]).int,
.comptime_int_type,
)
else
- (try mod.intValue(Type.comptime_int, i)).toIntern();
+ (try mod.intValue(Type.comptime_int, tag_index)).toIntern();
+
// TODO: write something like getCoercedInts to avoid needing to dupe
- const name = try sema.arena.dupeZ(u8, ip.stringToSlice(enum_type.names.get(ip)[i]));
const name_val = v: {
+ const tag_name = enum_type.names.get(ip)[tag_index];
+ const tag_name_len = tag_name.length(ip);
const new_decl_ty = try mod.arrayType(.{
- .len = name.len,
+ .len = tag_name_len,
.sentinel = .zero_u8,
.child = .u8_type,
});
const new_decl_val = try mod.intern(.{ .aggregate = .{
.ty = new_decl_ty.toIntern(),
- .storage = .{ .bytes = name },
+ .storage = .{ .bytes = tag_name.toString() },
} });
break :v try mod.intern(.{ .slice = .{
.ty = .slice_const_u8_sentinel_0_type,
@@ -18138,7 +18189,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
.orig_ty = .slice_const_u8_sentinel_0_type,
} },
} }),
- .len = (try mod.intValue(Type.usize, name.len)).toIntern(),
+ .len = (try mod.intValue(Type.usize, tag_name_len)).toIntern(),
} });
};
@@ -18191,7 +18242,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Enum"),
+ try ip.getOrPutString(gpa, "Enum", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(type_enum_ty_decl_index);
const type_enum_ty_decl = mod.declPtr(type_enum_ty_decl_index);
@@ -18223,7 +18274,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Union"),
+ try ip.getOrPutString(gpa, "Union", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(type_union_ty_decl_index);
const type_union_ty_decl = mod.declPtr(type_union_ty_decl_index);
@@ -18235,7 +18286,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "UnionField"),
+ try ip.getOrPutString(gpa, "UnionField", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(union_field_ty_decl_index);
const union_field_ty_decl = mod.declPtr(union_field_ty_decl_index);
@@ -18250,18 +18301,18 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const union_field_vals = try gpa.alloc(InternPool.Index, tag_type.names.len);
defer gpa.free(union_field_vals);
- for (union_field_vals, 0..) |*field_val, i| {
- // TODO: write something like getCoercedInts to avoid needing to dupe
- const name = try sema.arena.dupeZ(u8, ip.stringToSlice(tag_type.names.get(ip)[i]));
+ for (union_field_vals, 0..) |*field_val, field_index| {
const name_val = v: {
+ const field_name = tag_type.names.get(ip)[field_index];
+ const field_name_len = field_name.length(ip);
const new_decl_ty = try mod.arrayType(.{
- .len = name.len,
+ .len = field_name_len,
.sentinel = .zero_u8,
.child = .u8_type,
});
const new_decl_val = try mod.intern(.{ .aggregate = .{
.ty = new_decl_ty.toIntern(),
- .storage = .{ .bytes = name },
+ .storage = .{ .bytes = field_name.toString() },
} });
break :v try mod.intern(.{ .slice = .{
.ty = .slice_const_u8_sentinel_0_type,
@@ -18272,16 +18323,16 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
.orig_ty = .slice_const_u8_sentinel_0_type,
} },
} }),
- .len = (try mod.intValue(Type.usize, name.len)).toIntern(),
+ .len = (try mod.intValue(Type.usize, field_name_len)).toIntern(),
} });
};
const alignment = switch (layout) {
- .auto, .@"extern" => try sema.unionFieldAlignment(union_obj, @intCast(i)),
+ .auto, .@"extern" => try sema.unionFieldAlignment(union_obj, @intCast(field_index)),
.@"packed" => .none,
};
- const field_ty = union_obj.field_types.get(ip)[i];
+ const field_ty = union_obj.field_types.get(ip)[field_index];
const union_field_fields = .{
// name: [:0]const u8,
name_val,
@@ -18338,7 +18389,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
(try sema.getBuiltinType("Type")).getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "ContainerLayout"),
+ try ip.getOrPutString(gpa, "ContainerLayout", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(decl_index);
const decl = mod.declPtr(decl_index);
@@ -18371,7 +18422,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Struct"),
+ try ip.getOrPutString(gpa, "Struct", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(type_struct_ty_decl_index);
const type_struct_ty_decl = mod.declPtr(type_struct_ty_decl_index);
@@ -18383,7 +18434,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "StructField"),
+ try ip.getOrPutString(gpa, "StructField", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(struct_field_ty_decl_index);
const struct_field_ty_decl = mod.declPtr(struct_field_ty_decl_index);
@@ -18396,27 +18447,25 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
defer gpa.free(struct_field_vals);
fv: {
const struct_type = switch (ip.indexToKey(ty.toIntern())) {
- .anon_struct_type => |tuple| {
- struct_field_vals = try gpa.alloc(InternPool.Index, tuple.types.len);
- for (struct_field_vals, 0..) |*struct_field_val, i| {
- const anon_struct_type = ip.indexToKey(ty.toIntern()).anon_struct_type;
- const field_ty = anon_struct_type.types.get(ip)[i];
- const field_val = anon_struct_type.values.get(ip)[i];
+ .anon_struct_type => |anon_struct_type| {
+ struct_field_vals = try gpa.alloc(InternPool.Index, anon_struct_type.types.len);
+ for (struct_field_vals, 0..) |*struct_field_val, field_index| {
+ const field_ty = anon_struct_type.types.get(ip)[field_index];
+ const field_val = anon_struct_type.values.get(ip)[field_index];
const name_val = v: {
- // TODO: write something like getCoercedInts to avoid needing to dupe
- const bytes = if (tuple.names.len != 0)
- // https://github.com/ziglang/zig/issues/15709
- try sema.arena.dupeZ(u8, ip.stringToSlice(ip.indexToKey(ty.toIntern()).anon_struct_type.names.get(ip)[i]))
+ const field_name = if (anon_struct_type.names.len != 0)
+ anon_struct_type.names.get(ip)[field_index]
else
- try std.fmt.allocPrintZ(sema.arena, "{d}", .{i});
+ try ip.getOrPutStringFmt(gpa, "{d}", .{field_index}, .no_embedded_nulls);
+ const field_name_len = field_name.length(ip);
const new_decl_ty = try mod.arrayType(.{
- .len = bytes.len,
+ .len = field_name_len,
.sentinel = .zero_u8,
.child = .u8_type,
});
const new_decl_val = try mod.intern(.{ .aggregate = .{
.ty = new_decl_ty.toIntern(),
- .storage = .{ .bytes = bytes },
+ .storage = .{ .bytes = field_name.toString() },
} });
break :v try mod.intern(.{ .slice = .{
.ty = .slice_const_u8_sentinel_0_type,
@@ -18427,7 +18476,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
.orig_ty = .slice_const_u8_sentinel_0_type,
} },
} }),
- .len = (try mod.intValue(Type.usize, bytes.len)).toIntern(),
+ .len = (try mod.intValue(Type.usize, field_name_len)).toIntern(),
} });
};
@@ -18462,24 +18511,24 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
try sema.resolveStructFieldInits(ty);
- for (struct_field_vals, 0..) |*field_val, i| {
- // TODO: write something like getCoercedInts to avoid needing to dupe
- const name = if (struct_type.fieldName(ip, i).unwrap()) |name_nts|
- try sema.arena.dupeZ(u8, ip.stringToSlice(name_nts))
+ for (struct_field_vals, 0..) |*field_val, field_index| {
+ const field_name = if (struct_type.fieldName(ip, field_index).unwrap()) |field_name|
+ field_name
else
- try std.fmt.allocPrintZ(sema.arena, "{d}", .{i});
- const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]);
- const field_init = struct_type.fieldInit(ip, i);
- const field_is_comptime = struct_type.fieldIsComptime(ip, i);
+ try ip.getOrPutStringFmt(gpa, "{d}", .{field_index}, .no_embedded_nulls);
+ const field_name_len = field_name.length(ip);
+ const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[field_index]);
+ const field_init = struct_type.fieldInit(ip, field_index);
+ const field_is_comptime = struct_type.fieldIsComptime(ip, field_index);
const name_val = v: {
const new_decl_ty = try mod.arrayType(.{
- .len = name.len,
+ .len = field_name_len,
.sentinel = .zero_u8,
.child = .u8_type,
});
const new_decl_val = try mod.intern(.{ .aggregate = .{
.ty = new_decl_ty.toIntern(),
- .storage = .{ .bytes = name },
+ .storage = .{ .bytes = field_name.toString() },
} });
break :v try mod.intern(.{ .slice = .{
.ty = .slice_const_u8_sentinel_0_type,
@@ -18490,7 +18539,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
.orig_ty = .slice_const_u8_sentinel_0_type,
} },
} }),
- .len = (try mod.intValue(Type.usize, name.len)).toIntern(),
+ .len = (try mod.intValue(Type.usize, field_name_len)).toIntern(),
} });
};
@@ -18499,7 +18548,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const alignment = switch (struct_type.layout) {
.@"packed" => .none,
else => try sema.structFieldAlignment(
- struct_type.fieldAlign(ip, i),
+ struct_type.fieldAlign(ip, field_index),
field_ty,
struct_type.layout,
),
@@ -18569,7 +18618,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
(try sema.getBuiltinType("Type")).getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "ContainerLayout"),
+ try ip.getOrPutString(gpa, "ContainerLayout", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(decl_index);
const decl = mod.declPtr(decl_index);
@@ -18605,7 +18654,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, "Opaque"),
+ try ip.getOrPutString(gpa, "Opaque", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(type_opaque_ty_decl_index);
const type_opaque_ty_decl = mod.declPtr(type_opaque_ty_decl_index);
@@ -18648,7 +18697,7 @@ fn typeInfoDecls(
block,
src,
type_info_ty.getNamespaceIndex(mod),
- try mod.intern_pool.getOrPutString(gpa, "Declaration"),
+ try mod.intern_pool.getOrPutString(gpa, "Declaration", .no_embedded_nulls),
)).?;
try sema.ensureDeclAnalyzed(declaration_ty_decl_index);
const declaration_ty_decl = mod.declPtr(declaration_ty_decl_index);
@@ -18722,16 +18771,15 @@ fn typeInfoNamespaceDecls(
}
if (decl.kind != .named) continue;
const name_val = v: {
- // TODO: write something like getCoercedInts to avoid needing to dupe
- const name = try sema.arena.dupeZ(u8, ip.stringToSlice(decl.name));
+ const decl_name_len = decl.name.length(ip);
const new_decl_ty = try mod.arrayType(.{
- .len = name.len,
+ .len = decl_name_len,
.sentinel = .zero_u8,
.child = .u8_type,
});
const new_decl_val = try mod.intern(.{ .aggregate = .{
.ty = new_decl_ty.toIntern(),
- .storage = .{ .bytes = name },
+ .storage = .{ .bytes = decl.name.toString() },
} });
break :v try mod.intern(.{ .slice = .{
.ty = .slice_const_u8_sentinel_0_type,
@@ -18742,7 +18790,7 @@ fn typeInfoNamespaceDecls(
.val = new_decl_val,
} },
} }),
- .len = (try mod.intValue(Type.usize, name.len)).toIntern(),
+ .len = (try mod.intValue(Type.usize, decl_name_len)).toIntern(),
} });
};
@@ -19385,7 +19433,11 @@ fn zirRetErrValue(
) CompileError!void {
const mod = sema.mod;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].str_tok;
- const err_name = try mod.intern_pool.getOrPutString(sema.gpa, inst_data.get(sema.code));
+ const err_name = try mod.intern_pool.getOrPutString(
+ sema.gpa,
+ inst_data.get(sema.code),
+ .no_embedded_nulls,
+ );
_ = try mod.getErrorValue(err_name);
const src = inst_data.src();
// Return the error code from the function.
@@ -20072,7 +20124,11 @@ fn zirStructInit(
const field_type_data = zir_datas[@intFromEnum(item.data.field_type)].pl_node;
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_type_data.src_node };
const field_type_extra = sema.code.extraData(Zir.Inst.FieldType, field_type_data.payload_index).data;
- const field_name = try ip.getOrPutString(gpa, sema.code.nullTerminatedString(field_type_extra.name_start));
+ const field_name = try ip.getOrPutString(
+ gpa,
+ sema.code.nullTerminatedString(field_type_extra.name_start),
+ .no_embedded_nulls,
+ );
const field_index = if (resolved_ty.isTuple(mod))
try sema.tupleFieldIndex(block, resolved_ty, field_name, field_src)
else
@@ -20109,7 +20165,11 @@ fn zirStructInit(
const field_type_data = zir_datas[@intFromEnum(item.data.field_type)].pl_node;
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_type_data.src_node };
const field_type_extra = sema.code.extraData(Zir.Inst.FieldType, field_type_data.payload_index).data;
- const field_name = try ip.getOrPutString(gpa, sema.code.nullTerminatedString(field_type_extra.name_start));
+ const field_name = try ip.getOrPutString(
+ gpa,
+ sema.code.nullTerminatedString(field_type_extra.name_start),
+ .no_embedded_nulls,
+ );
const field_index = try sema.unionFieldIndex(block, resolved_ty, field_name, field_src);
const tag_ty = resolved_ty.unionTagTypeHypothetical(mod);
const tag_val = try mod.enumValueFieldIndex(tag_ty, field_index);
@@ -20417,8 +20477,7 @@ fn structInitAnon(
},
};
- const name_ip = try mod.intern_pool.getOrPutString(gpa, name);
- field_name.* = name_ip;
+ field_name.* = try mod.intern_pool.getOrPutString(gpa, name, .no_embedded_nulls);
const init = try sema.resolveInst(item.data.init);
field_ty.* = sema.typeOf(init).toIntern();
@@ -20809,7 +20868,7 @@ fn zirStructInitFieldType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Comp
};
const aggregate_ty = wrapped_aggregate_ty.optEuBaseType(mod);
const zir_field_name = sema.code.nullTerminatedString(extra.name_start);
- const field_name = try ip.getOrPutString(sema.gpa, zir_field_name);
+ const field_name = try ip.getOrPutString(sema.gpa, zir_field_name, .no_embedded_nulls);
return sema.fieldType(block, aggregate_ty, field_name, field_name_src, ty_src);
}
@@ -20975,7 +21034,7 @@ fn zirErrorName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
if (try sema.resolveDefinedValue(block, operand_src, operand)) |val| {
const err_name = sema.mod.intern_pool.indexToKey(val.toIntern()).err.name;
- return sema.addStrLit(sema.mod.intern_pool.stringToSlice(err_name));
+ return sema.addNullTerminatedStrLit(err_name);
}
// Similar to zirTagName, we have special AIR instruction for the error name in case an optimimzation pass
@@ -21093,7 +21152,7 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
.EnumLiteral => {
const val = try sema.resolveConstDefinedValue(block, .unneeded, operand, undefined);
const tag_name = ip.indexToKey(val.toIntern()).enum_literal;
- return sema.addStrLit(ip.stringToSlice(tag_name));
+ return sema.addNullTerminatedStrLit(tag_name);
},
.Enum => operand_ty,
.Union => operand_ty.unionTagType(mod) orelse
@@ -21127,7 +21186,7 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
};
// TODO: write something like getCoercedInts to avoid needing to dupe
const field_name = enum_ty.enumFieldName(field_index, mod);
- return sema.addStrLit(ip.stringToSlice(field_name));
+ return sema.addNullTerminatedStrLit(field_name);
}
try sema.requireRuntimeBlock(block, src, operand_src);
if (block.wantSafety() and sema.mod.backendSupportsFeature(.is_named_enum_value)) {
@@ -21179,11 +21238,11 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const signedness_val = try Value.fromInterned(union_val.val).fieldValue(
mod,
- struct_type.nameIndex(ip, try ip.getOrPutString(gpa, "signedness")).?,
+ struct_type.nameIndex(ip, try ip.getOrPutString(gpa, "signedness", .no_embedded_nulls)).?,
);
const bits_val = try Value.fromInterned(union_val.val).fieldValue(
mod,
- struct_type.nameIndex(ip, try ip.getOrPutString(gpa, "bits")).?,
+ struct_type.nameIndex(ip, try ip.getOrPutString(gpa, "bits", .no_embedded_nulls)).?,
);
const signedness = mod.toEnum(std.builtin.Signedness, signedness_val);
@@ -21195,11 +21254,11 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const len_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "len"),
+ try ip.getOrPutString(gpa, "len", .no_embedded_nulls),
).?);
const child_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "child"),
+ try ip.getOrPutString(gpa, "child", .no_embedded_nulls),
).?);
const len: u32 = @intCast(try len_val.toUnsignedIntAdvanced(sema));
@@ -21217,7 +21276,7 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const bits_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "bits"),
+ try ip.getOrPutString(gpa, "bits", .no_embedded_nulls),
).?);
const bits: u16 = @intCast(try bits_val.toUnsignedIntAdvanced(sema));
@@ -21235,35 +21294,35 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const size_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "size"),
+ try ip.getOrPutString(gpa, "size", .no_embedded_nulls),
).?);
const is_const_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_const"),
+ try ip.getOrPutString(gpa, "is_const", .no_embedded_nulls),
).?);
const is_volatile_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_volatile"),
+ try ip.getOrPutString(gpa, "is_volatile", .no_embedded_nulls),
).?);
const alignment_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "alignment"),
+ try ip.getOrPutString(gpa, "alignment", .no_embedded_nulls),
).?);
const address_space_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "address_space"),
+ try ip.getOrPutString(gpa, "address_space", .no_embedded_nulls),
).?);
const child_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "child"),
+ try ip.getOrPutString(gpa, "child", .no_embedded_nulls),
).?);
const is_allowzero_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_allowzero"),
+ try ip.getOrPutString(gpa, "is_allowzero", .no_embedded_nulls),
).?);
const sentinel_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "sentinel"),
+ try ip.getOrPutString(gpa, "sentinel", .no_embedded_nulls),
).?);
if (!try sema.intFitsInType(alignment_val, Type.u32, null)) {
@@ -21341,15 +21400,15 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const len_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "len"),
+ try ip.getOrPutString(gpa, "len", .no_embedded_nulls),
).?);
const child_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "child"),
+ try ip.getOrPutString(gpa, "child", .no_embedded_nulls),
).?);
const sentinel_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "sentinel"),
+ try ip.getOrPutString(gpa, "sentinel", .no_embedded_nulls),
).?);
const len = try len_val.toUnsignedIntAdvanced(sema);
@@ -21370,7 +21429,7 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const child_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "child"),
+ try ip.getOrPutString(gpa, "child", .no_embedded_nulls),
).?);
const child_ty = child_val.toType();
@@ -21382,11 +21441,11 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const error_set_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "error_set"),
+ try ip.getOrPutString(gpa, "error_set", .no_embedded_nulls),
).?);
const payload_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "payload"),
+ try ip.getOrPutString(gpa, "payload", .no_embedded_nulls),
).?);
const error_set_ty = error_set_val.toType();
@@ -21415,7 +21474,7 @@ fn zirReify(
const elem_struct_type = ip.loadStructType(ip.typeOf(elem_val.toIntern()));
const name_val = try elem_val.fieldValue(mod, elem_struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "name"),
+ try ip.getOrPutString(gpa, "name", .no_embedded_nulls),
).?);
const name = try sema.sliceToIpString(block, src, name_val, .{
@@ -21437,23 +21496,23 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const layout_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "layout"),
+ try ip.getOrPutString(gpa, "layout", .no_embedded_nulls),
).?);
const backing_integer_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "backing_integer"),
+ try ip.getOrPutString(gpa, "backing_integer", .no_embedded_nulls),
).?);
const fields_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "fields"),
+ try ip.getOrPutString(gpa, "fields", .no_embedded_nulls),
).?);
const decls_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "decls"),
+ try ip.getOrPutString(gpa, "decls", .no_embedded_nulls),
).?);
const is_tuple_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_tuple"),
+ try ip.getOrPutString(gpa, "is_tuple", .no_embedded_nulls),
).?);
const layout = mod.toEnum(std.builtin.Type.ContainerLayout, layout_val);
@@ -21477,19 +21536,19 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const tag_type_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "tag_type"),
+ try ip.getOrPutString(gpa, "tag_type", .no_embedded_nulls),
).?);
const fields_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "fields"),
+ try ip.getOrPutString(gpa, "fields", .no_embedded_nulls),
).?);
const decls_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "decls"),
+ try ip.getOrPutString(gpa, "decls", .no_embedded_nulls),
).?);
const is_exhaustive_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_exhaustive"),
+ try ip.getOrPutString(gpa, "is_exhaustive", .no_embedded_nulls),
).?);
if (try decls_val.sliceLen(sema) > 0) {
@@ -21506,7 +21565,7 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const decls_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "decls"),
+ try ip.getOrPutString(gpa, "decls", .no_embedded_nulls),
).?);
// Decls
@@ -21544,19 +21603,19 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const layout_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "layout"),
+ try ip.getOrPutString(gpa, "layout", .no_embedded_nulls),
).?);
const tag_type_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "tag_type"),
+ try ip.getOrPutString(gpa, "tag_type", .no_embedded_nulls),
).?);
const fields_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "fields"),
+ try ip.getOrPutString(gpa, "fields", .no_embedded_nulls),
).?);
const decls_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "decls"),
+ try ip.getOrPutString(gpa, "decls", .no_embedded_nulls),
).?);
if (try decls_val.sliceLen(sema) > 0) {
@@ -21574,23 +21633,23 @@ fn zirReify(
const struct_type = ip.loadStructType(ip.typeOf(union_val.val));
const calling_convention_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "calling_convention"),
+ try ip.getOrPutString(gpa, "calling_convention", .no_embedded_nulls),
).?);
const is_generic_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_generic"),
+ try ip.getOrPutString(gpa, "is_generic", .no_embedded_nulls),
).?);
const is_var_args_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_var_args"),
+ try ip.getOrPutString(gpa, "is_var_args", .no_embedded_nulls),
).?);
const return_type_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "return_type"),
+ try ip.getOrPutString(gpa, "return_type", .no_embedded_nulls),
).?);
const params_slice_val = try Value.fromInterned(union_val.val).fieldValue(mod, struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "params"),
+ try ip.getOrPutString(gpa, "params", .no_embedded_nulls),
).?);
const is_generic = is_generic_val.toBool();
@@ -21620,15 +21679,15 @@ fn zirReify(
const elem_struct_type = ip.loadStructType(ip.typeOf(elem_val.toIntern()));
const param_is_generic_val = try elem_val.fieldValue(mod, elem_struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_generic"),
+ try ip.getOrPutString(gpa, "is_generic", .no_embedded_nulls),
).?);
const param_is_noalias_val = try elem_val.fieldValue(mod, elem_struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "is_noalias"),
+ try ip.getOrPutString(gpa, "is_noalias", .no_embedded_nulls),
).?);
const opt_param_type_val = try elem_val.fieldValue(mod, elem_struct_type.nameIndex(
ip,
- try ip.getOrPutString(gpa, "type"),
+ try ip.getOrPutString(gpa, "type", .no_embedded_nulls),
).?);
if (param_is_generic_val.toBool()) {
@@ -22366,13 +22425,14 @@ fn zirCVaStart(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData)
fn zirTypeName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const mod = sema.mod;
+ const ip = &mod.intern_pool;
+
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].un_node;
const ty_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
const ty = try sema.resolveType(block, ty_src, inst_data.operand);
- var bytes = std.ArrayList(u8).init(sema.arena);
- try ty.print(bytes.writer(), mod);
- return addStrLitNoAlias(sema, bytes.items);
+ const type_name = try ip.getOrPutStringFmt(sema.gpa, "{}", .{ty.fmt(mod)}, .no_embedded_nulls);
+ return sema.addNullTerminatedStrLit(type_name);
}
fn zirFrameType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
@@ -23507,7 +23567,7 @@ fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u6
}
const field_index = if (ty.isTuple(mod)) blk: {
- if (ip.stringEqlSlice(field_name, "len")) {
+ if (field_name.eqlSlice("len", ip)) {
return sema.fail(block, src, "no offset available for 'len' field of tuple", .{});
}
break :blk try sema.tupleFieldIndex(block, ty, field_name, rhs_src);
@@ -23977,18 +24037,18 @@ fn resolveExportOptions(
const section_src = sema.maybeOptionsSrc(block, src, "section");
const visibility_src = sema.maybeOptionsSrc(block, src, "visibility");
- const name_operand = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "name"), name_src);
+ const name_operand = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "name", .no_embedded_nulls), name_src);
const name = try sema.toConstString(block, name_src, name_operand, .{
.needed_comptime_reason = "name of exported value must be comptime-known",
});
- const linkage_operand = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "linkage"), linkage_src);
+ const linkage_operand = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "linkage", .no_embedded_nulls), linkage_src);
const linkage_val = try sema.resolveConstDefinedValue(block, linkage_src, linkage_operand, .{
.needed_comptime_reason = "linkage of exported value must be comptime-known",
});
const linkage = mod.toEnum(std.builtin.GlobalLinkage, linkage_val);
- const section_operand = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "section"), section_src);
+ const section_operand = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "section", .no_embedded_nulls), section_src);
const section_opt_val = try sema.resolveConstDefinedValue(block, section_src, section_operand, .{
.needed_comptime_reason = "linksection of exported value must be comptime-known",
});
@@ -23999,7 +24059,7 @@ fn resolveExportOptions(
else
null;
- const visibility_operand = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "visibility"), visibility_src);
+ const visibility_operand = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "visibility", .no_embedded_nulls), visibility_src);
const visibility_val = try sema.resolveConstDefinedValue(block, visibility_src, visibility_operand, .{
.needed_comptime_reason = "visibility of exported value must be comptime-known",
});
@@ -24016,9 +24076,9 @@ fn resolveExportOptions(
}
return .{
- .name = try ip.getOrPutString(gpa, name),
+ .name = try ip.getOrPutString(gpa, name, .no_embedded_nulls),
.linkage = linkage,
- .section = try ip.getOrPutStringOpt(gpa, section),
+ .section = try ip.getOrPutStringOpt(gpa, section, .no_embedded_nulls),
.visibility = visibility,
};
}
@@ -24896,7 +24956,7 @@ fn zirFieldParentPtr(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.Ins
const field_index = switch (parent_ty.zigTypeTag(mod)) {
.Struct => blk: {
if (parent_ty.isTuple(mod)) {
- if (ip.stringEqlSlice(field_name, "len")) {
+ if (field_name.eqlSlice("len", ip)) {
return sema.fail(block, inst_src, "cannot get @fieldParentPtr of 'len' field of tuple", .{});
}
break :blk try sema.tupleFieldIndex(block, parent_ty, field_name, field_name_src);
@@ -25578,7 +25638,7 @@ fn zirMemset(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void
const runtime_src = rs: {
const ptr_val = try sema.resolveDefinedValue(block, dest_src, dest_ptr) orelse break :rs dest_src;
- const len_air_ref = try sema.fieldVal(block, src, dest_ptr, try ip.getOrPutString(gpa, "len"), dest_src);
+ const len_air_ref = try sema.fieldVal(block, src, dest_ptr, try ip.getOrPutString(gpa, "len", .no_embedded_nulls), dest_src);
const len_val = (try sema.resolveDefinedValue(block, dest_src, len_air_ref)) orelse break :rs dest_src;
const len_u64 = (try len_val.getUnsignedIntAdvanced(mod, sema)).?;
const len = try sema.usizeCast(block, dest_src, len_u64);
@@ -25708,7 +25768,7 @@ fn zirVarExtended(
.ty = var_ty.toIntern(),
.init = init_val,
.decl = sema.owner_decl_index,
- .lib_name = try mod.intern_pool.getOrPutStringOpt(sema.gpa, lib_name),
+ .lib_name = try mod.intern_pool.getOrPutStringOpt(sema.gpa, lib_name, .no_embedded_nulls),
.is_extern = small.is_extern,
.is_const = small.is_const,
.is_threadlocal = small.is_threadlocal,
@@ -26076,17 +26136,17 @@ fn resolvePrefetchOptions(
const locality_src = sema.maybeOptionsSrc(block, src, "locality");
const cache_src = sema.maybeOptionsSrc(block, src, "cache");
- const rw = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "rw"), rw_src);
+ const rw = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "rw", .no_embedded_nulls), rw_src);
const rw_val = try sema.resolveConstDefinedValue(block, rw_src, rw, .{
.needed_comptime_reason = "prefetch read/write must be comptime-known",
});
- const locality = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "locality"), locality_src);
+ const locality = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "locality", .no_embedded_nulls), locality_src);
const locality_val = try sema.resolveConstDefinedValue(block, locality_src, locality, .{
.needed_comptime_reason = "prefetch locality must be comptime-known",
});
- const cache = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "cache"), cache_src);
+ const cache = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "cache", .no_embedded_nulls), cache_src);
const cache_val = try sema.resolveConstDefinedValue(block, cache_src, cache, .{
.needed_comptime_reason = "prefetch cache must be comptime-known",
});
@@ -26155,23 +26215,23 @@ fn resolveExternOptions(
const linkage_src = sema.maybeOptionsSrc(block, src, "linkage");
const thread_local_src = sema.maybeOptionsSrc(block, src, "thread_local");
- const name_ref = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "name"), name_src);
+ const name_ref = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "name", .no_embedded_nulls), name_src);
const name = try sema.toConstString(block, name_src, name_ref, .{
.needed_comptime_reason = "name of the extern symbol must be comptime-known",
});
- const library_name_inst = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "library_name"), library_src);
+ const library_name_inst = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "library_name", .no_embedded_nulls), library_src);
const library_name_val = try sema.resolveConstDefinedValue(block, library_src, library_name_inst, .{
.needed_comptime_reason = "library in which extern symbol is must be comptime-known",
});
- const linkage_ref = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "linkage"), linkage_src);
+ const linkage_ref = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "linkage", .no_embedded_nulls), linkage_src);
const linkage_val = try sema.resolveConstDefinedValue(block, linkage_src, linkage_ref, .{
.needed_comptime_reason = "linkage of the extern symbol must be comptime-known",
});
const linkage = mod.toEnum(std.builtin.GlobalLinkage, linkage_val);
- const is_thread_local = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "is_thread_local"), thread_local_src);
+ const is_thread_local = try sema.fieldVal(block, src, options, try ip.getOrPutString(gpa, "is_thread_local", .no_embedded_nulls), thread_local_src);
const is_thread_local_val = try sema.resolveConstDefinedValue(block, thread_local_src, is_thread_local, .{
.needed_comptime_reason = "threadlocality of the extern symbol must be comptime-known",
});
@@ -26196,8 +26256,8 @@ fn resolveExternOptions(
}
return .{
- .name = try ip.getOrPutString(gpa, name),
- .library_name = try ip.getOrPutStringOpt(gpa, library_name),
+ .name = try ip.getOrPutString(gpa, name, .no_embedded_nulls),
+ .library_name = try ip.getOrPutStringOpt(gpa, library_name, .no_embedded_nulls),
.linkage = linkage,
.is_thread_local = is_thread_local_val.toBool(),
};
@@ -26809,7 +26869,7 @@ fn preparePanicId(sema: *Sema, block: *Block, panic_id: Module.PanicId) !InternP
block,
.unneeded,
panic_messages_ty.getNamespaceIndex(mod),
- try mod.intern_pool.getOrPutString(gpa, @tagName(panic_id)),
+ try mod.intern_pool.getOrPutString(gpa, @tagName(panic_id), .no_embedded_nulls),
) catch |err| switch (err) {
error.AnalysisFail, error.NeededSourceLocation => @panic("std.builtin.panic_messages is corrupt"),
error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable,
@@ -27129,9 +27189,9 @@ fn fieldVal(
switch (inner_ty.zigTypeTag(mod)) {
.Array => {
- if (ip.stringEqlSlice(field_name, "len")) {
+ if (field_name.eqlSlice("len", ip)) {
return Air.internedToRef((try mod.intValue(Type.usize, inner_ty.arrayLen(mod))).toIntern());
- } else if (ip.stringEqlSlice(field_name, "ptr") and is_pointer_to) {
+ } else if (field_name.eqlSlice("ptr", ip) and is_pointer_to) {
const ptr_info = object_ty.ptrInfo(mod);
const result_ty = try sema.ptrType(.{
.child = Type.fromInterned(ptr_info.child).childType(mod).toIntern(),
@@ -27160,13 +27220,13 @@ fn fieldVal(
.Pointer => {
const ptr_info = inner_ty.ptrInfo(mod);
if (ptr_info.flags.size == .Slice) {
- if (ip.stringEqlSlice(field_name, "ptr")) {
+ if (field_name.eqlSlice("ptr", ip)) {
const slice = if (is_pointer_to)
try sema.analyzeLoad(block, src, object, object_src)
else
object;
return sema.analyzeSlicePtr(block, object_src, slice, inner_ty);
- } else if (ip.stringEqlSlice(field_name, "len")) {
+ } else if (field_name.eqlSlice("len", ip)) {
const slice = if (is_pointer_to)
try sema.analyzeLoad(block, src, object, object_src)
else
@@ -27319,10 +27379,10 @@ fn fieldPtr(
switch (inner_ty.zigTypeTag(mod)) {
.Array => {
- if (ip.stringEqlSlice(field_name, "len")) {
+ if (field_name.eqlSlice("len", ip)) {
const int_val = try mod.intValue(Type.usize, inner_ty.arrayLen(mod));
return anonDeclRef(sema, int_val.toIntern());
- } else if (ip.stringEqlSlice(field_name, "ptr") and is_pointer_to) {
+ } else if (field_name.eqlSlice("ptr", ip) and is_pointer_to) {
const ptr_info = object_ty.ptrInfo(mod);
const new_ptr_ty = try sema.ptrType(.{
.child = Type.fromInterned(ptr_info.child).childType(mod).toIntern(),
@@ -27370,7 +27430,7 @@ fn fieldPtr(
const attr_ptr_ty = if (is_pointer_to) object_ty else object_ptr_ty;
- if (ip.stringEqlSlice(field_name, "ptr")) {
+ if (field_name.eqlSlice("ptr", ip)) {
const slice_ptr_ty = inner_ty.slicePtrFieldType(mod);
const result_ty = try sema.ptrType(.{
@@ -27396,7 +27456,7 @@ fn fieldPtr(
const field_ptr = try block.addTyOp(.ptr_slice_ptr_ptr, result_ty, inner_ptr);
try sema.checkKnownAllocPtr(block, inner_ptr, field_ptr);
return field_ptr;
- } else if (ip.stringEqlSlice(field_name, "len")) {
+ } else if (field_name.eqlSlice("len", ip)) {
const result_ty = try sema.ptrType(.{
.child = .usize_type,
.flags = .{
@@ -27584,7 +27644,7 @@ fn fieldCallBind(
return sema.finishFieldCallBind(block, src, ptr_ty, field_ty, field_index, object_ptr);
} else if (concrete_ty.isTuple(mod)) {
- if (ip.stringEqlSlice(field_name, "len")) {
+ if (field_name.eqlSlice("len", ip)) {
return .{ .direct = try mod.intRef(Type.usize, concrete_ty.structFieldCount(mod)) };
}
if (field_name.toUnsigned(ip)) |field_index| {
@@ -27808,7 +27868,7 @@ fn structFieldPtr(
try sema.resolveStructLayout(struct_ty);
if (struct_ty.isTuple(mod)) {
- if (ip.stringEqlSlice(field_name, "len")) {
+ if (field_name.eqlSlice("len", ip)) {
const len_inst = try mod.intRef(Type.usize, struct_ty.structFieldCount(mod));
return sema.analyzeRef(block, src, len_inst);
}
@@ -28023,7 +28083,7 @@ fn tupleFieldVal(
tuple_ty: Type,
) CompileError!Air.Inst.Ref {
const mod = sema.mod;
- if (mod.intern_pool.stringEqlSlice(field_name, "len")) {
+ if (field_name.eqlSlice("len", &mod.intern_pool)) {
return mod.intRef(Type.usize, tuple_ty.structFieldCount(mod));
}
const field_index = try sema.tupleFieldIndex(block, tuple_ty, field_name, field_name_src);
@@ -28039,16 +28099,17 @@ fn tupleFieldIndex(
field_name_src: LazySrcLoc,
) CompileError!u32 {
const mod = sema.mod;
- assert(!mod.intern_pool.stringEqlSlice(field_name, "len"));
- if (field_name.toUnsigned(&mod.intern_pool)) |field_index| {
+ const ip = &mod.intern_pool;
+ assert(!field_name.eqlSlice("len", ip));
+ if (field_name.toUnsigned(ip)) |field_index| {
if (field_index < tuple_ty.structFieldCount(mod)) return field_index;
return sema.fail(block, field_name_src, "index '{}' out of bounds of tuple '{}'", .{
- field_name.fmt(&mod.intern_pool), tuple_ty.fmt(mod),
+ field_name.fmt(ip), tuple_ty.fmt(mod),
});
}
return sema.fail(block, field_name_src, "no field named '{}' in tuple '{}'", .{
- field_name.fmt(&mod.intern_pool), tuple_ty.fmt(mod),
+ field_name.fmt(ip), tuple_ty.fmt(mod),
});
}
@@ -28076,7 +28137,7 @@ fn tupleFieldValByIndex(
return switch (mod.intern_pool.indexToKey(tuple_val.toIntern())) {
.undef => mod.undefRef(field_ty),
.aggregate => |aggregate| Air.internedToRef(switch (aggregate.storage) {
- .bytes => |bytes| try mod.intValue(Type.u8, bytes[0]),
+ .bytes => |bytes| try mod.intValue(Type.u8, bytes.at(field_index, &mod.intern_pool)),
.elems => |elems| Value.fromInterned(elems[field_index]),
.repeated_elem => |elem| Value.fromInterned(elem),
}.toIntern()),
@@ -32266,38 +32327,36 @@ fn coerceTupleToStruct(
.struct_type => ip.loadStructType(inst_ty.toIntern()).field_types.len,
else => unreachable,
};
- for (0..field_count) |field_index_usize| {
- const field_i: u32 = @intCast(field_index_usize);
+ for (0..field_count) |tuple_field_index| {
const field_src = inst_src; // TODO better source location
- // https://github.com/ziglang/zig/issues/15709
const field_name: InternPool.NullTerminatedString = switch (ip.indexToKey(inst_ty.toIntern())) {
.anon_struct_type => |anon_struct_type| if (anon_struct_type.names.len > 0)
- anon_struct_type.names.get(ip)[field_i]
+ anon_struct_type.names.get(ip)[tuple_field_index]
else
- try ip.getOrPutStringFmt(sema.gpa, "{d}", .{field_i}),
- .struct_type => ip.loadStructType(inst_ty.toIntern()).field_names.get(ip)[field_i],
+ try ip.getOrPutStringFmt(sema.gpa, "{d}", .{tuple_field_index}, .no_embedded_nulls),
+ .struct_type => ip.loadStructType(inst_ty.toIntern()).field_names.get(ip)[tuple_field_index],
else => unreachable,
};
- const field_index = try sema.structFieldIndex(block, struct_ty, field_name, field_src);
- const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[field_index]);
- const elem_ref = try sema.tupleField(block, inst_src, inst, field_src, field_i);
- const coerced = try sema.coerce(block, field_ty, elem_ref, field_src);
- field_refs[field_index] = coerced;
- if (struct_type.fieldIsComptime(ip, field_index)) {
+ const struct_field_index = try sema.structFieldIndex(block, struct_ty, field_name, field_src);
+ const struct_field_ty = Type.fromInterned(struct_type.field_types.get(ip)[struct_field_index]);
+ const elem_ref = try sema.tupleField(block, inst_src, inst, field_src, @intCast(tuple_field_index));
+ const coerced = try sema.coerce(block, struct_field_ty, elem_ref, field_src);
+ field_refs[struct_field_index] = coerced;
+ if (struct_type.fieldIsComptime(ip, struct_field_index)) {
const init_val = (try sema.resolveValue(coerced)) orelse {
return sema.failWithNeededComptime(block, field_src, .{
.needed_comptime_reason = "value stored in comptime field must be comptime-known",
});
};
- const field_init = Value.fromInterned(struct_type.field_inits.get(ip)[field_index]);
- if (!init_val.eql(field_init, field_ty, sema.mod)) {
- return sema.failWithInvalidComptimeFieldStore(block, field_src, inst_ty, field_i);
+ const field_init = Value.fromInterned(struct_type.field_inits.get(ip)[struct_field_index]);
+ if (!init_val.eql(field_init, struct_field_ty, sema.mod)) {
+ return sema.failWithInvalidComptimeFieldStore(block, field_src, inst_ty, tuple_field_index);
}
}
if (runtime_src == null) {
if (try sema.resolveValue(coerced)) |field_val| {
- field_vals[field_index] = field_val.toIntern();
+ field_vals[struct_field_index] = field_val.toIntern();
} else {
runtime_src = field_src;
}
@@ -32382,24 +32441,23 @@ fn coerceTupleToTuple(
for (0..dest_field_count) |field_index_usize| {
const field_i: u32 = @intCast(field_index_usize);
const field_src = inst_src; // TODO better source location
- // https://github.com/ziglang/zig/issues/15709
const field_name: InternPool.NullTerminatedString = switch (ip.indexToKey(inst_ty.toIntern())) {
.anon_struct_type => |anon_struct_type| if (anon_struct_type.names.len > 0)
anon_struct_type.names.get(ip)[field_i]
else
- try ip.getOrPutStringFmt(sema.gpa, "{d}", .{field_i}),
+ try ip.getOrPutStringFmt(sema.gpa, "{d}", .{field_i}, .no_embedded_nulls),
.struct_type => s: {
const struct_type = ip.loadStructType(inst_ty.toIntern());
if (struct_type.field_names.len > 0) {
break :s struct_type.field_names.get(ip)[field_i];
} else {
- break :s try ip.getOrPutStringFmt(sema.gpa, "{d}", .{field_i});
+ break :s try ip.getOrPutStringFmt(sema.gpa, "{d}", .{field_i}, .no_embedded_nulls);
}
},
else => unreachable,
};
- if (ip.stringEqlSlice(field_name, "len"))
+ if (field_name.eqlSlice("len", ip))
return sema.fail(block, field_src, "cannot assign to 'len' field of tuple", .{});
const field_ty = switch (ip.indexToKey(tuple_ty.toIntern())) {
@@ -34196,7 +34254,7 @@ const PeerResolveResult = union(enum) {
/// There was an error when resolving the type of a struct or tuple field.
field_error: struct {
/// The name of the field which caused the failure.
- field_name: []const u8,
+ field_name: InternPool.NullTerminatedString,
/// The type of this field in each peer.
field_types: []Type,
/// The error from resolving the field type. Guaranteed not to be `success`.
@@ -34237,8 +34295,8 @@ const PeerResolveResult = union(enum) {
};
},
.field_error => |field_error| {
- const fmt = "struct field '{s}' has conflicting types";
- const args = .{field_error.field_name};
+ const fmt = "struct field '{}' has conflicting types";
+ const args = .{field_error.field_name.fmt(&mod.intern_pool)};
if (opt_msg) |msg| {
try sema.errNote(block, src, msg, fmt, args);
} else {
@@ -35321,7 +35379,7 @@ fn resolvePeerTypesInner(
const sub_peer_tys = try sema.arena.alloc(?Type, peer_tys.len);
const sub_peer_vals = try sema.arena.alloc(?Value, peer_vals.len);
- for (field_types, field_vals, 0..) |*field_ty, *field_val, field_idx| {
+ for (field_types, field_vals, 0..) |*field_ty, *field_val, field_index| {
// Fill buffers with types and values of the field
for (peer_tys, peer_vals, sub_peer_tys, sub_peer_vals) |opt_ty, opt_val, *peer_field_ty, *peer_field_val| {
const ty = opt_ty orelse {
@@ -35329,8 +35387,8 @@ fn resolvePeerTypesInner(
peer_field_val.* = null;
continue;
};
- peer_field_ty.* = ty.structFieldType(field_idx, mod);
- peer_field_val.* = if (opt_val) |val| try val.fieldValue(mod, field_idx) else null;
+ peer_field_ty.* = ty.structFieldType(field_index, mod);
+ peer_field_val.* = if (opt_val) |val| try val.fieldValue(mod, field_index) else null;
}
// Resolve field type recursively
@@ -35339,9 +35397,10 @@ fn resolvePeerTypesInner(
else => |result| {
const result_buf = try sema.arena.create(PeerResolveResult);
result_buf.* = result;
- const field_name = if (is_tuple) name: {
- break :name try std.fmt.allocPrint(sema.arena, "{d}", .{field_idx});
- } else try sema.arena.dupe(u8, ip.stringToSlice(field_names[field_idx]));
+ const field_name = if (is_tuple)
+ try ip.getOrPutStringFmt(sema.gpa, "{d}", .{field_index}, .no_embedded_nulls)
+ else
+ field_names[field_index];
// The error info needs the field types, but we can't reuse sub_peer_tys
// since the recursive call may have clobbered it.
@@ -35350,7 +35409,7 @@ fn resolvePeerTypesInner(
// Already-resolved types won't be referenced by the error so it's fine
// to leave them undefined.
const ty = opt_ty orelse continue;
- peer_field_ty.* = ty.structFieldType(field_idx, mod);
+ peer_field_ty.* = ty.structFieldType(field_index, mod);
}
return .{ .field_error = .{
@@ -35369,7 +35428,7 @@ fn resolvePeerTypesInner(
const struct_ty = opt_ty orelse continue;
try sema.resolveStructFieldInits(struct_ty);
- const uncoerced_field_val = try struct_ty.structFieldValueComptime(mod, field_idx) orelse {
+ const uncoerced_field_val = try struct_ty.structFieldValueComptime(mod, field_index) orelse {
comptime_val = null;
break;
};
@@ -36811,7 +36870,7 @@ fn semaStructFields(
// This string needs to outlive the ZIR code.
if (opt_field_name_zir) |field_name_zir| {
- const field_name = try ip.getOrPutString(gpa, field_name_zir);
+ const field_name = try ip.getOrPutString(gpa, field_name_zir, .no_embedded_nulls);
assert(struct_type.addFieldName(ip, field_name) == null);
}
@@ -37342,7 +37401,7 @@ fn semaUnionFields(mod: *Module, arena: Allocator, union_type: InternPool.Loaded
}
// This string needs to outlive the ZIR code.
- const field_name = try ip.getOrPutString(gpa, field_name_zir);
+ const field_name = try ip.getOrPutString(gpa, field_name_zir, .no_embedded_nulls);
if (enum_field_names.len != 0) {
enum_field_names[field_i] = field_name;
}
@@ -37528,7 +37587,12 @@ fn generateUnionTagTypeNumbered(
const new_decl_index = try mod.allocateNewDecl(block.namespace, src_decl.src_node);
errdefer mod.destroyDecl(new_decl_index);
const fqn = try union_owner_decl.fullyQualifiedName(mod);
- const name = try ip.getOrPutStringFmt(gpa, "@typeInfo({}).Union.tag_type.?", .{fqn.fmt(ip)});
+ const name = try ip.getOrPutStringFmt(
+ gpa,
+ "@typeInfo({}).Union.tag_type.?",
+ .{fqn.fmt(ip)},
+ .no_embedded_nulls,
+ );
try mod.initNewAnonDecl(
new_decl_index,
src_decl.src_line,
@@ -37574,7 +37638,12 @@ fn generateUnionTagTypeSimple(
const src_decl = mod.declPtr(block.src_decl);
const new_decl_index = try mod.allocateNewDecl(block.namespace, src_decl.src_node);
errdefer mod.destroyDecl(new_decl_index);
- const name = try ip.getOrPutStringFmt(gpa, "@typeInfo({}).Union.tag_type.?", .{fqn.fmt(ip)});
+ const name = try ip.getOrPutStringFmt(
+ gpa,
+ "@typeInfo({}).Union.tag_type.?",
+ .{fqn.fmt(ip)},
+ .no_embedded_nulls,
+ );
try mod.initNewAnonDecl(
new_decl_index,
src_decl.src_line,
@@ -37638,7 +37707,7 @@ fn getBuiltinDecl(sema: *Sema, block: *Block, name: []const u8) CompileError!Int
block,
src,
mod.declPtr(std_file.root_decl.unwrap().?).src_namespace.toOptional(),
- try ip.getOrPutString(gpa, "builtin"),
+ try ip.getOrPutString(gpa, "builtin", .no_embedded_nulls),
)) orelse @panic("lib/std.zig is corrupt and missing 'builtin'");
const builtin_inst = try sema.analyzeLoad(block, src, opt_builtin_inst, src);
const builtin_ty = sema.analyzeAsType(block, src, builtin_inst) catch |err| switch (err) {
@@ -37649,7 +37718,7 @@ fn getBuiltinDecl(sema: *Sema, block: *Block, name: []const u8) CompileError!Int
block,
src,
builtin_ty.getNamespaceIndex(mod),
- try ip.getOrPutString(gpa, name),
+ try ip.getOrPutString(gpa, name, .no_embedded_nulls),
)) orelse std.debug.panic("lib/std/builtin.zig is corrupt and missing '{s}'", .{name});
return decl_index;
}
@@ -38820,7 +38889,7 @@ fn intFitsInType(
.aggregate => |aggregate| {
assert(ty.zigTypeTag(mod) == .Vector);
return switch (aggregate.storage) {
- .bytes => |bytes| for (bytes, 0..) |byte, i| {
+ .bytes => |bytes| for (bytes.toSlice(ty.vectorLen(mod), &mod.intern_pool), 0..) |byte, i| {
if (byte == 0) continue;
const actual_needed_bits = std.math.log2(byte) + 1 + @intFromBool(info.signedness == .signed);
if (info.bits >= actual_needed_bits) continue;
diff --git a/src/Value.zig b/src/Value.zig
index 7a9775e198..0f8dc5f7dc 100644
--- a/src/Value.zig
+++ b/src/Value.zig
@@ -52,30 +52,31 @@ pub fn toIpString(val: Value, ty: Type, mod: *Module) !InternPool.NullTerminated
assert(ty.zigTypeTag(mod) == .Array);
assert(ty.childType(mod).toIntern() == .u8_type);
const ip = &mod.intern_pool;
- return switch (mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage) {
- .bytes => |bytes| try ip.getOrPutString(mod.gpa, bytes),
- .elems => try arrayToIpString(val, ty.arrayLen(mod), mod),
+ switch (mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage) {
+ .bytes => |bytes| return bytes.toNullTerminatedString(ty.arrayLen(mod), ip),
+ .elems => return arrayToIpString(val, ty.arrayLen(mod), mod),
.repeated_elem => |elem| {
- const byte = @as(u8, @intCast(Value.fromInterned(elem).toUnsignedInt(mod)));
- const len = @as(usize, @intCast(ty.arrayLen(mod)));
+ const byte: u8 = @intCast(Value.fromInterned(elem).toUnsignedInt(mod));
+ const len: usize = @intCast(ty.arrayLen(mod));
try ip.string_bytes.appendNTimes(mod.gpa, byte, len);
- return ip.getOrPutTrailingString(mod.gpa, len);
+ return ip.getOrPutTrailingString(mod.gpa, len, .no_embedded_nulls);
},
- };
+ }
}
/// Asserts that the value is representable as an array of bytes.
/// Copies the value into a freshly allocated slice of memory, which is owned by the caller.
pub fn toAllocatedBytes(val: Value, ty: Type, allocator: Allocator, mod: *Module) ![]u8 {
- return switch (mod.intern_pool.indexToKey(val.toIntern())) {
- .enum_literal => |enum_literal| allocator.dupe(u8, mod.intern_pool.stringToSlice(enum_literal)),
+ const ip = &mod.intern_pool;
+ return switch (ip.indexToKey(val.toIntern())) {
+ .enum_literal => |enum_literal| allocator.dupe(u8, enum_literal.toSlice(ip)),
.slice => |slice| try arrayToAllocatedBytes(val, Value.fromInterned(slice.len).toUnsignedInt(mod), allocator, mod),
.aggregate => |aggregate| switch (aggregate.storage) {
- .bytes => |bytes| try allocator.dupe(u8, bytes),
+ .bytes => |bytes| try allocator.dupe(u8, bytes.toSlice(ty.arrayLenIncludingSentinel(mod), ip)),
.elems => try arrayToAllocatedBytes(val, ty.arrayLen(mod), allocator, mod),
.repeated_elem => |elem| {
- const byte = @as(u8, @intCast(Value.fromInterned(elem).toUnsignedInt(mod)));
- const result = try allocator.alloc(u8, @as(usize, @intCast(ty.arrayLen(mod))));
+ const byte: u8 = @intCast(Value.fromInterned(elem).toUnsignedInt(mod));
+ const result = try allocator.alloc(u8, @intCast(ty.arrayLen(mod)));
@memset(result, byte);
return result;
},
@@ -85,10 +86,10 @@ pub fn toAllocatedBytes(val: Value, ty: Type, allocator: Allocator, mod: *Module
}
fn arrayToAllocatedBytes(val: Value, len: u64, allocator: Allocator, mod: *Module) ![]u8 {
- const result = try allocator.alloc(u8, @as(usize, @intCast(len)));
+ const result = try allocator.alloc(u8, @intCast(len));
for (result, 0..) |*elem, i| {
const elem_val = try val.elemValue(mod, i);
- elem.* = @as(u8, @intCast(elem_val.toUnsignedInt(mod)));
+ elem.* = @intCast(elem_val.toUnsignedInt(mod));
}
return result;
}
@@ -96,7 +97,7 @@ fn arrayToAllocatedBytes(val: Value, len: u64, allocator: Allocator, mod: *Modul
fn arrayToIpString(val: Value, len_u64: u64, mod: *Module) !InternPool.NullTerminatedString {
const gpa = mod.gpa;
const ip = &mod.intern_pool;
- const len = @as(usize, @intCast(len_u64));
+ const len: usize = @intCast(len_u64);
try ip.string_bytes.ensureUnusedCapacity(gpa, len);
for (0..len) |i| {
// I don't think elemValue has the possibility to affect ip.string_bytes. Let's
@@ -104,10 +105,10 @@ fn arrayToIpString(val: Value, len_u64: u64, mod: *Module) !InternPool.NullTermi
const prev = ip.string_bytes.items.len;
const elem_val = try val.elemValue(mod, i);
assert(ip.string_bytes.items.len == prev);
- const byte = @as(u8, @intCast(elem_val.toUnsignedInt(mod)));
+ const byte: u8 = @intCast(elem_val.toUnsignedInt(mod));
ip.string_bytes.appendAssumeCapacity(byte);
}
- return ip.getOrPutTrailingString(gpa, len);
+ return ip.getOrPutTrailingString(gpa, len, .no_embedded_nulls);
}
pub fn fromInterned(i: InternPool.Index) Value {
@@ -256,7 +257,7 @@ pub fn getUnsignedIntAdvanced(val: Value, mod: *Module, opt_sema: ?*Sema) !?u64
const base_addr = (try Value.fromInterned(field.base).getUnsignedIntAdvanced(mod, opt_sema)) orelse return null;
const struct_ty = Value.fromInterned(field.base).typeOf(mod).childType(mod);
if (opt_sema) |sema| try sema.resolveTypeLayout(struct_ty);
- return base_addr + struct_ty.structFieldOffset(@as(usize, @intCast(field.index)), mod);
+ return base_addr + struct_ty.structFieldOffset(@intCast(field.index), mod);
},
else => null,
},
@@ -351,17 +352,17 @@ pub fn writeToMemory(val: Value, ty: Type, mod: *Module, buffer: []u8) error{
bigint.writeTwosComplement(buffer[0..byte_count], endian);
},
.Float => switch (ty.floatBits(target)) {
- 16 => std.mem.writeInt(u16, buffer[0..2], @as(u16, @bitCast(val.toFloat(f16, mod))), endian),
- 32 => std.mem.writeInt(u32, buffer[0..4], @as(u32, @bitCast(val.toFloat(f32, mod))), endian),
- 64 => std.mem.writeInt(u64, buffer[0..8], @as(u64, @bitCast(val.toFloat(f64, mod))), endian),
- 80 => std.mem.writeInt(u80, buffer[0..10], @as(u80, @bitCast(val.toFloat(f80, mod))), endian),
- 128 => std.mem.writeInt(u128, buffer[0..16], @as(u128, @bitCast(val.toFloat(f128, mod))), endian),
+ 16 => std.mem.writeInt(u16, buffer[0..2], @bitCast(val.toFloat(f16, mod)), endian),
+ 32 => std.mem.writeInt(u32, buffer[0..4], @bitCast(val.toFloat(f32, mod)), endian),
+ 64 => std.mem.writeInt(u64, buffer[0..8], @bitCast(val.toFloat(f64, mod)), endian),
+ 80 => std.mem.writeInt(u80, buffer[0..10], @bitCast(val.toFloat(f80, mod)), endian),
+ 128 => std.mem.writeInt(u128, buffer[0..16], @bitCast(val.toFloat(f128, mod)), endian),
else => unreachable,
},
.Array => {
const len = ty.arrayLen(mod);
const elem_ty = ty.childType(mod);
- const elem_size = @as(usize, @intCast(elem_ty.abiSize(mod)));
+ const elem_size: usize = @intCast(elem_ty.abiSize(mod));
var elem_i: usize = 0;
var buf_off: usize = 0;
while (elem_i < len) : (elem_i += 1) {
@@ -380,17 +381,17 @@ pub fn writeToMemory(val: Value, ty: Type, mod: *Module, buffer: []u8) error{
const struct_type = mod.typeToStruct(ty) orelse return error.IllDefinedMemoryLayout;
switch (struct_type.layout) {
.auto => return error.IllDefinedMemoryLayout,
- .@"extern" => for (0..struct_type.field_types.len) |i| {
- const off: usize = @intCast(ty.structFieldOffset(i, mod));
+ .@"extern" => for (0..struct_type.field_types.len) |field_index| {
+ const off: usize = @intCast(ty.structFieldOffset(field_index, mod));
const field_val = Value.fromInterned(switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
.bytes => |bytes| {
- buffer[off] = bytes[i];
+ buffer[off] = bytes.at(field_index, ip);
continue;
},
- .elems => |elems| elems[i],
+ .elems => |elems| elems[field_index],
.repeated_elem => |elem| elem,
});
- const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[i]);
+ const field_ty = Type.fromInterned(struct_type.field_types.get(ip)[field_index]);
try writeToMemory(field_val, field_ty, mod, buffer[off..]);
},
.@"packed" => {
@@ -423,7 +424,7 @@ pub fn writeToMemory(val: Value, ty: Type, mod: *Module, buffer: []u8) error{
const field_index = mod.unionTagFieldIndex(union_obj, union_tag).?;
const field_type = Type.fromInterned(union_obj.field_types.get(&mod.intern_pool)[field_index]);
const field_val = try val.fieldValue(mod, field_index);
- const byte_count = @as(usize, @intCast(field_type.abiSize(mod)));
+ const byte_count: usize = @intCast(field_type.abiSize(mod));
return writeToMemory(field_val, field_type, mod, buffer[0..byte_count]);
} else {
const backing_ty = try ty.unionBackingType(mod);
@@ -471,7 +472,7 @@ pub fn writeToPackedMemory(
const target = mod.getTarget();
const endian = target.cpu.arch.endian();
if (val.isUndef(mod)) {
- const bit_size = @as(usize, @intCast(ty.bitSize(mod)));
+ const bit_size: usize = @intCast(ty.bitSize(mod));
std.mem.writeVarPackedInt(buffer, bit_offset, bit_size, @as(u1, 0), endian);
return;
}
@@ -507,17 +508,17 @@ pub fn writeToPackedMemory(
}
},
.Float => switch (ty.floatBits(target)) {
- 16 => std.mem.writePackedInt(u16, buffer, bit_offset, @as(u16, @bitCast(val.toFloat(f16, mod))), endian),
- 32 => std.mem.writePackedInt(u32, buffer, bit_offset, @as(u32, @bitCast(val.toFloat(f32, mod))), endian),
- 64 => std.mem.writePackedInt(u64, buffer, bit_offset, @as(u64, @bitCast(val.toFloat(f64, mod))), endian),
- 80 => std.mem.writePackedInt(u80, buffer, bit_offset, @as(u80, @bitCast(val.toFloat(f80, mod))), endian),
- 128 => std.mem.writePackedInt(u128, buffer, bit_offset, @as(u128, @bitCast(val.toFloat(f128, mod))), endian),
+ 16 => std.mem.writePackedInt(u16, buffer, bit_offset, @bitCast(val.toFloat(f16, mod)), endian),
+ 32 => std.mem.writePackedInt(u32, buffer, bit_offset, @bitCast(val.toFloat(f32, mod)), endian),
+ 64 => std.mem.writePackedInt(u64, buffer, bit_offset, @bitCast(val.toFloat(f64, mod)), endian),
+ 80 => std.mem.writePackedInt(u80, buffer, bit_offset, @bitCast(val.toFloat(f80, mod)), endian),
+ 128 => std.mem.writePackedInt(u128, buffer, bit_offset, @bitCast(val.toFloat(f128, mod)), endian),
else => unreachable,
},
.Vector => {
const elem_ty = ty.childType(mod);
- const elem_bit_size = @as(u16, @intCast(elem_ty.bitSize(mod)));
- const len = @as(usize, @intCast(ty.arrayLen(mod)));
+ const elem_bit_size: u16 = @intCast(elem_ty.bitSize(mod));
+ const len: usize = @intCast(ty.arrayLen(mod));
var bits: u16 = 0;
var elem_i: usize = 0;
@@ -644,22 +645,22 @@ pub fn readFromMemory(
.Float => return Value.fromInterned((try mod.intern(.{ .float = .{
.ty = ty.toIntern(),
.storage = switch (ty.floatBits(target)) {
- 16 => .{ .f16 = @as(f16, @bitCast(std.mem.readInt(u16, buffer[0..2], endian))) },
- 32 => .{ .f32 = @as(f32, @bitCast(std.mem.readInt(u32, buffer[0..4], endian))) },
- 64 => .{ .f64 = @as(f64, @bitCast(std.mem.readInt(u64, buffer[0..8], endian))) },
- 80 => .{ .f80 = @as(f80, @bitCast(std.mem.readInt(u80, buffer[0..10], endian))) },
- 128 => .{ .f128 = @as(f128, @bitCast(std.mem.readInt(u128, buffer[0..16], endian))) },
+ 16 => .{ .f16 = @bitCast(std.mem.readInt(u16, buffer[0..2], endian)) },
+ 32 => .{ .f32 = @bitCast(std.mem.readInt(u32, buffer[0..4], endian)) },
+ 64 => .{ .f64 = @bitCast(std.mem.readInt(u64, buffer[0..8], endian)) },
+ 80 => .{ .f80 = @bitCast(std.mem.readInt(u80, buffer[0..10], endian)) },
+ 128 => .{ .f128 = @bitCast(std.mem.readInt(u128, buffer[0..16], endian)) },
else => unreachable,
},
} }))),
.Array => {
const elem_ty = ty.childType(mod);
const elem_size = elem_ty.abiSize(mod);
- const elems = try arena.alloc(InternPool.Index, @as(usize, @intCast(ty.arrayLen(mod))));
+ const elems = try arena.alloc(InternPool.Index, @intCast(ty.arrayLen(mod)));
var offset: usize = 0;
for (elems) |*elem| {
elem.* = (try readFromMemory(elem_ty, mod, buffer[offset..], arena)).toIntern();
- offset += @as(usize, @intCast(elem_size));
+ offset += @intCast(elem_size);
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
@@ -795,7 +796,7 @@ pub fn readFromPackedMemory(
};
// Slow path, we have to construct a big-int
- const abi_size = @as(usize, @intCast(ty.abiSize(mod)));
+ const abi_size: usize = @intCast(ty.abiSize(mod));
const Limb = std.math.big.Limb;
const limb_count = (abi_size + @sizeOf(Limb) - 1) / @sizeOf(Limb);
const limbs_buffer = try arena.alloc(Limb, limb_count);
@@ -812,20 +813,20 @@ pub fn readFromPackedMemory(
.Float => return Value.fromInterned((try mod.intern(.{ .float = .{
.ty = ty.toIntern(),
.storage = switch (ty.floatBits(target)) {
- 16 => .{ .f16 = @as(f16, @bitCast(std.mem.readPackedInt(u16, buffer, bit_offset, endian))) },
- 32 => .{ .f32 = @as(f32, @bitCast(std.mem.readPackedInt(u32, buffer, bit_offset, endian))) },
- 64 => .{ .f64 = @as(f64, @bitCast(std.mem.readPackedInt(u64, buffer, bit_offset, endian))) },
- 80 => .{ .f80 = @as(f80, @bitCast(std.mem.readPackedInt(u80, buffer, bit_offset, endian))) },
- 128 => .{ .f128 = @as(f128, @bitCast(std.mem.readPackedInt(u128, buffer, bit_offset, endian))) },
+ 16 => .{ .f16 = @bitCast(std.mem.readPackedInt(u16, buffer, bit_offset, endian)) },
+ 32 => .{ .f32 = @bitCast(std.mem.readPackedInt(u32, buffer, bit_offset, endian)) },
+ 64 => .{ .f64 = @bitCast(std.mem.readPackedInt(u64, buffer, bit_offset, endian)) },
+ 80 => .{ .f80 = @bitCast(std.mem.readPackedInt(u80, buffer, bit_offset, endian)) },
+ 128 => .{ .f128 = @bitCast(std.mem.readPackedInt(u128, buffer, bit_offset, endian)) },
else => unreachable,
},
} }))),
.Vector => {
const elem_ty = ty.childType(mod);
- const elems = try arena.alloc(InternPool.Index, @as(usize, @intCast(ty.arrayLen(mod))));
+ const elems = try arena.alloc(InternPool.Index, @intCast(ty.arrayLen(mod)));
var bits: u16 = 0;
- const elem_bit_size = @as(u16, @intCast(elem_ty.bitSize(mod)));
+ const elem_bit_size: u16 = @intCast(elem_ty.bitSize(mod));
for (elems, 0..) |_, i| {
// On big-endian systems, LLVM reverses the element order of vectors by default
const tgt_elem_i = if (endian == .big) elems.len - i - 1 else i;
@@ -909,7 +910,7 @@ fn bigIntToFloat(limbs: []const std.math.big.Limb, positive: bool) f128 {
var i: usize = limbs.len;
while (i != 0) {
i -= 1;
- const limb: f128 = @as(f128, @floatFromInt(limbs[i]));
+ const limb: f128 = @floatFromInt(limbs[i]);
result = @mulAdd(f128, base, result, limb);
}
if (positive) {
@@ -934,7 +935,7 @@ pub fn ctz(val: Value, ty: Type, mod: *Module) u64 {
pub fn popCount(val: Value, ty: Type, mod: *Module) u64 {
var bigint_buf: BigIntSpace = undefined;
const bigint = val.toBigInt(&bigint_buf, mod);
- return @as(u64, @intCast(bigint.popCount(ty.intInfo(mod).bits)));
+ return @intCast(bigint.popCount(ty.intInfo(mod).bits));
}
pub fn bitReverse(val: Value, ty: Type, mod: *Module, arena: Allocator) !Value {
@@ -1191,7 +1192,7 @@ pub fn compareAllWithZeroAdvancedExtra(
inline else => |x| if (std.math.isNan(x)) return op == .neq,
},
.aggregate => |aggregate| return switch (aggregate.storage) {
- .bytes => |bytes| for (bytes) |byte| {
+ .bytes => |bytes| for (bytes.toSlice(lhs.typeOf(mod).arrayLenIncludingSentinel(mod), &mod.intern_pool)) |byte| {
if (!std.math.order(byte, 0).compare(op)) break false;
} else true,
.elems => |elems| for (elems) |elem| {
@@ -1279,7 +1280,7 @@ pub fn elemValue(val: Value, zcu: *Zcu, index: usize) Allocator.Error!Value {
if (index < len) return Value.fromInterned(switch (aggregate.storage) {
.bytes => |bytes| try zcu.intern(.{ .int = .{
.ty = .u8_type,
- .storage = .{ .u64 = bytes[index] },
+ .storage = .{ .u64 = bytes.at(index, ip) },
} }),
.elems => |elems| elems[index],
.repeated_elem => |elem| elem,
@@ -1318,28 +1319,37 @@ pub fn sliceArray(
start: usize,
end: usize,
) error{OutOfMemory}!Value {
- // TODO: write something like getCoercedInts to avoid needing to dupe
const mod = sema.mod;
- const aggregate = mod.intern_pool.indexToKey(val.toIntern()).aggregate;
- return Value.fromInterned(try mod.intern(.{ .aggregate = .{
- .ty = switch (mod.intern_pool.indexToKey(mod.intern_pool.typeOf(val.toIntern()))) {
- .array_type => |array_type| try mod.arrayType(.{
- .len = @as(u32, @intCast(end - start)),
- .child = array_type.child,
- .sentinel = if (end == array_type.len) array_type.sentinel else .none,
- }),
- .vector_type => |vector_type| try mod.vectorType(.{
- .len = @as(u32, @intCast(end - start)),
- .child = vector_type.child,
- }),
- else => unreachable,
- }.toIntern(),
- .storage = switch (aggregate.storage) {
- .bytes => .{ .bytes = try sema.arena.dupe(u8, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.bytes[start..end]) },
- .elems => .{ .elems = try sema.arena.dupe(InternPool.Index, mod.intern_pool.indexToKey(val.toIntern()).aggregate.storage.elems[start..end]) },
- .repeated_elem => |elem| .{ .repeated_elem = elem },
+ const ip = &mod.intern_pool;
+ return Value.fromInterned(try mod.intern(.{
+ .aggregate = .{
+ .ty = switch (mod.intern_pool.indexToKey(mod.intern_pool.typeOf(val.toIntern()))) {
+ .array_type => |array_type| try mod.arrayType(.{
+ .len = @intCast(end - start),
+ .child = array_type.child,
+ .sentinel = if (end == array_type.len) array_type.sentinel else .none,
+ }),
+ .vector_type => |vector_type| try mod.vectorType(.{
+ .len = @intCast(end - start),
+ .child = vector_type.child,
+ }),
+ else => unreachable,
+ }.toIntern(),
+ .storage = switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
+ .bytes => |bytes| storage: {
+ try ip.string_bytes.ensureUnusedCapacity(sema.gpa, end - start + 1);
+ break :storage .{ .bytes = try ip.getOrPutString(
+ sema.gpa,
+ bytes.toSlice(end, ip)[start..],
+ .maybe_embedded_nulls,
+ ) };
+ },
+ // TODO: write something like getCoercedInts to avoid needing to dupe
+ .elems => |elems| .{ .elems = try sema.arena.dupe(InternPool.Index, elems[start..end]) },
+ .repeated_elem => |elem| .{ .repeated_elem = elem },
+ },
},
- } }));
+ }));
}
pub fn fieldValue(val: Value, mod: *Module, index: usize) !Value {
@@ -1350,7 +1360,7 @@ pub fn fieldValue(val: Value, mod: *Module, index: usize) !Value {
.aggregate => |aggregate| Value.fromInterned(switch (aggregate.storage) {
.bytes => |bytes| try mod.intern(.{ .int = .{
.ty = .u8_type,
- .storage = .{ .u64 = bytes[index] },
+ .storage = .{ .u64 = bytes.at(index, &mod.intern_pool) },
} }),
.elems => |elems| elems[index],
.repeated_elem => |elem| elem,
@@ -1461,7 +1471,7 @@ pub fn getErrorName(val: Value, mod: *const Module) InternPool.OptionalNullTermi
pub fn getErrorInt(val: Value, mod: *const Module) Module.ErrorInt {
return if (getErrorName(val, mod).unwrap()) |err_name|
- @as(Module.ErrorInt, @intCast(mod.global_error_set.getIndex(err_name).?))
+ @intCast(mod.global_error_set.getIndex(err_name).?)
else
0;
}
@@ -2413,14 +2423,14 @@ pub fn intTruncBitsAsValue(
for (result_data, 0..) |*scalar, i| {
const elem_val = try val.elemValue(mod, i);
const bits_elem = try bits.elemValue(mod, i);
- scalar.* = (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, @as(u16, @intCast(bits_elem.toUnsignedInt(mod))), mod)).toIntern();
+ scalar.* = (try intTruncScalar(elem_val, scalar_ty, allocator, signedness, @intCast(bits_elem.toUnsignedInt(mod)), mod)).toIntern();
}
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = ty.toIntern(),
.storage = .{ .elems = result_data },
} })));
}
- return intTruncScalar(val, ty, allocator, signedness, @as(u16, @intCast(bits.toUnsignedInt(mod))), mod);
+ return intTruncScalar(val, ty, allocator, signedness, @intCast(bits.toUnsignedInt(mod)), mod);
}
pub fn intTruncScalar(
@@ -2468,7 +2478,7 @@ pub fn shlScalar(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *M
// resorting to BigInt first.
var lhs_space: Value.BigIntSpace = undefined;
const lhs_bigint = lhs.toBigInt(&lhs_space, mod);
- const shift = @as(usize, @intCast(rhs.toUnsignedInt(mod)));
+ const shift: usize = @intCast(rhs.toUnsignedInt(mod));
const limbs = try allocator.alloc(
std.math.big.Limb,
lhs_bigint.limbs.len + (shift / (@sizeOf(std.math.big.Limb) * 8)) + 1,
@@ -2530,7 +2540,7 @@ pub fn shlWithOverflowScalar(
const info = ty.intInfo(mod);
var lhs_space: Value.BigIntSpace = undefined;
const lhs_bigint = lhs.toBigInt(&lhs_space, mod);
- const shift = @as(usize, @intCast(rhs.toUnsignedInt(mod)));
+ const shift: usize = @intCast(rhs.toUnsignedInt(mod));
const limbs = try allocator.alloc(
std.math.big.Limb,
lhs_bigint.limbs.len + (shift / (@sizeOf(std.math.big.Limb) * 8)) + 1,
@@ -2587,7 +2597,7 @@ pub fn shlSatScalar(
var lhs_space: Value.BigIntSpace = undefined;
const lhs_bigint = lhs.toBigInt(&lhs_space, mod);
- const shift = @as(usize, @intCast(rhs.toUnsignedInt(mod)));
+ const shift: usize = @intCast(rhs.toUnsignedInt(mod));
const limbs = try arena.alloc(
std.math.big.Limb,
std.math.big.int.calcTwosCompLimbCount(info.bits) + 1,
@@ -2659,7 +2669,7 @@ pub fn shrScalar(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *M
// resorting to BigInt first.
var lhs_space: Value.BigIntSpace = undefined;
const lhs_bigint = lhs.toBigInt(&lhs_space, mod);
- const shift = @as(usize, @intCast(rhs.toUnsignedInt(mod)));
+ const shift: usize = @intCast(rhs.toUnsignedInt(mod));
const result_limbs = lhs_bigint.limbs.len -| (shift / (@sizeOf(std.math.big.Limb) * 8));
if (result_limbs == 0) {
diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig
index b9f8259c05..ddde72345e 100644
--- a/src/arch/aarch64/CodeGen.zig
+++ b/src/arch/aarch64/CodeGen.zig
@@ -4345,8 +4345,8 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
.data = .{ .reg = .x30 },
});
} else if (func_value.getExternFunc(mod)) |extern_func| {
- const decl_name = mod.intern_pool.stringToSlice(mod.declPtr(extern_func.decl).name);
- const lib_name = mod.intern_pool.stringToSliceUnwrap(extern_func.lib_name);
+ const decl_name = mod.declPtr(extern_func.decl).name.toSlice(&mod.intern_pool);
+ const lib_name = extern_func.lib_name.toSlice(&mod.intern_pool);
if (self.bin_file.cast(link.File.MachO)) |macho_file| {
_ = macho_file;
@panic("TODO airCall");
diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig
index 022f2f9bee..83159ec80e 100644
--- a/src/arch/wasm/CodeGen.zig
+++ b/src/arch/wasm/CodeGen.zig
@@ -2199,9 +2199,9 @@ fn airCall(func: *CodeGen, inst: Air.Inst.Index, modifier: std.builtin.CallModif
const atom = func.bin_file.getAtomPtr(atom_index);
const type_index = try func.bin_file.storeDeclType(extern_func.decl, func_type);
try func.bin_file.addOrUpdateImport(
- mod.intern_pool.stringToSlice(ext_decl.name),
+ ext_decl.name.toSlice(&mod.intern_pool),
atom.sym_index,
- mod.intern_pool.stringToSliceUnwrap(ext_decl.getOwnedExternFunc(mod).?.lib_name),
+ ext_decl.getOwnedExternFunc(mod).?.lib_name.toSlice(&mod.intern_pool),
type_index,
);
break :blk extern_func.decl;
@@ -7236,8 +7236,8 @@ fn getTagNameFunction(func: *CodeGen, enum_ty: Type) InnerError!u32 {
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
- const fqn = ip.stringToSlice(try mod.declPtr(enum_decl_index).fullyQualifiedName(mod));
- const func_name = try std.fmt.allocPrintZ(arena, "__zig_tag_name_{s}", .{fqn});
+ const fqn = try mod.declPtr(enum_decl_index).fullyQualifiedName(mod);
+ const func_name = try std.fmt.allocPrintZ(arena, "__zig_tag_name_{}", .{fqn.fmt(ip)});
// check if we already generated code for this.
if (func.bin_file.findGlobalSymbol(func_name)) |loc| {
@@ -7268,17 +7268,18 @@ fn getTagNameFunction(func: *CodeGen, enum_ty: Type) InnerError!u32 {
// generate an if-else chain for each tag value as well as constant.
const tag_names = enum_ty.enumFields(mod);
for (0..tag_names.len) |tag_index| {
- const tag_name = ip.stringToSlice(tag_names.get(ip)[tag_index]);
+ const tag_name = tag_names.get(ip)[tag_index];
+ const tag_name_len = tag_name.length(ip);
// for each tag name, create an unnamed const,
// and then get a pointer to its value.
const name_ty = try mod.arrayType(.{
- .len = tag_name.len,
+ .len = tag_name_len,
.child = .u8_type,
.sentinel = .zero_u8,
});
const name_val = try mod.intern(.{ .aggregate = .{
.ty = name_ty.toIntern(),
- .storage = .{ .bytes = tag_name },
+ .storage = .{ .bytes = tag_name.toString() },
} });
const tag_sym_index = try func.bin_file.lowerUnnamedConst(
Value.fromInterned(name_val),
@@ -7338,7 +7339,7 @@ fn getTagNameFunction(func: *CodeGen, enum_ty: Type) InnerError!u32 {
// store length
try writer.writeByte(std.wasm.opcode(.i32_const));
- try leb.writeULEB128(writer, @as(u32, @intCast(tag_name.len)));
+ try leb.writeULEB128(writer, @as(u32, @intCast(tag_name_len)));
try writer.writeByte(std.wasm.opcode(.i32_store));
try leb.writeULEB128(writer, encoded_alignment);
try leb.writeULEB128(writer, @as(u32, 4));
@@ -7359,7 +7360,7 @@ fn getTagNameFunction(func: *CodeGen, enum_ty: Type) InnerError!u32 {
// store length
try writer.writeByte(std.wasm.opcode(.i64_const));
- try leb.writeULEB128(writer, @as(u64, @intCast(tag_name.len)));
+ try leb.writeULEB128(writer, @as(u64, @intCast(tag_name_len)));
try writer.writeByte(std.wasm.opcode(.i64_store));
try leb.writeULEB128(writer, encoded_alignment);
try leb.writeULEB128(writer, @as(u32, 8));
diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig
index 7a90eacf54..c165baf7e8 100644
--- a/src/arch/x86_64/CodeGen.zig
+++ b/src/arch/x86_64/CodeGen.zig
@@ -2247,7 +2247,7 @@ fn genLazy(self: *Self, lazy_sym: link.File.LazySymbol) InnerError!void {
var data_off: i32 = 0;
const tag_names = enum_ty.enumFields(mod);
for (exitlude_jump_relocs, 0..) |*exitlude_jump_reloc, tag_index| {
- const tag_name_len = ip.stringToSlice(tag_names.get(ip)[tag_index]).len;
+ const tag_name_len = tag_names.get(ip)[tag_index].length(ip);
const tag_val = try mod.enumValueFieldIndex(enum_ty, @intCast(tag_index));
const tag_mcv = try self.genTypedValue(tag_val);
try self.genBinOpMir(.{ ._, .cmp }, enum_ty, enum_mcv, tag_mcv);
@@ -12314,8 +12314,8 @@ fn genCall(self: *Self, info: union(enum) {
},
.extern_func => |extern_func| {
const owner_decl = mod.declPtr(extern_func.decl);
- const lib_name = mod.intern_pool.stringToSliceUnwrap(extern_func.lib_name);
- const decl_name = mod.intern_pool.stringToSlice(owner_decl.name);
+ const lib_name = extern_func.lib_name.toSlice(&mod.intern_pool);
+ const decl_name = owner_decl.name.toSlice(&mod.intern_pool);
try self.genExternSymbolRef(.call, lib_name, decl_name);
},
else => return self.fail("TODO implement calling bitcasted functions", .{}),
diff --git a/src/codegen.zig b/src/codegen.zig
index 76be8be974..b45777564a 100644
--- a/src/codegen.zig
+++ b/src/codegen.zig
@@ -97,7 +97,7 @@ fn writeFloat(comptime F: type, f: F, target: Target, endian: std.builtin.Endian
_ = target;
const bits = @typeInfo(F).Float.bits;
const Int = @Type(.{ .Int = .{ .signedness = .unsigned, .bits = bits } });
- const int = @as(Int, @bitCast(f));
+ const int: Int = @bitCast(f);
mem.writeInt(Int, code[0..@divExact(bits, 8)], int, endian);
}
@@ -136,24 +136,24 @@ pub fn generateLazySymbol(
if (lazy_sym.ty.isAnyError(zcu)) {
alignment.* = .@"4";
const err_names = zcu.global_error_set.keys();
- mem.writeInt(u32, try code.addManyAsArray(4), @as(u32, @intCast(err_names.len)), endian);
+ mem.writeInt(u32, try code.addManyAsArray(4), @intCast(err_names.len), endian);
var offset = code.items.len;
try code.resize((1 + err_names.len + 1) * 4);
for (err_names) |err_name_nts| {
- const err_name = zcu.intern_pool.stringToSlice(err_name_nts);
- mem.writeInt(u32, code.items[offset..][0..4], @as(u32, @intCast(code.items.len)), endian);
+ const err_name = err_name_nts.toSlice(ip);
+ mem.writeInt(u32, code.items[offset..][0..4], @intCast(code.items.len), endian);
offset += 4;
try code.ensureUnusedCapacity(err_name.len + 1);
code.appendSliceAssumeCapacity(err_name);
code.appendAssumeCapacity(0);
}
- mem.writeInt(u32, code.items[offset..][0..4], @as(u32, @intCast(code.items.len)), endian);
+ mem.writeInt(u32, code.items[offset..][0..4], @intCast(code.items.len), endian);
return Result.ok;
} else if (lazy_sym.ty.zigTypeTag(zcu) == .Enum) {
alignment.* = .@"1";
const tag_names = lazy_sym.ty.enumFields(zcu);
for (0..tag_names.len) |tag_index| {
- const tag_name = zcu.intern_pool.stringToSlice(tag_names.get(ip)[tag_index]);
+ const tag_name = tag_names.get(ip)[tag_index].toSlice(ip);
try code.ensureUnusedCapacity(tag_name.len + 1);
code.appendSliceAssumeCapacity(tag_name);
code.appendAssumeCapacity(0);
@@ -241,13 +241,13 @@ pub fn generateSymbol(
},
.err => |err| {
const int = try mod.getErrorValue(err.name);
- try code.writer().writeInt(u16, @as(u16, @intCast(int)), endian);
+ try code.writer().writeInt(u16, @intCast(int), endian);
},
.error_union => |error_union| {
const payload_ty = ty.errorUnionPayload(mod);
- const err_val = switch (error_union.val) {
- .err_name => |err_name| @as(u16, @intCast(try mod.getErrorValue(err_name))),
- .payload => @as(u16, 0),
+ const err_val: u16 = switch (error_union.val) {
+ .err_name => |err_name| @intCast(try mod.getErrorValue(err_name)),
+ .payload => 0,
};
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
@@ -357,15 +357,13 @@ pub fn generateSymbol(
},
.aggregate => |aggregate| switch (ip.indexToKey(ty.toIntern())) {
.array_type => |array_type| switch (aggregate.storage) {
- .bytes => |bytes| try code.appendSlice(bytes),
+ .bytes => |bytes| try code.appendSlice(bytes.toSlice(array_type.lenIncludingSentinel(), ip)),
.elems, .repeated_elem => {
var index: u64 = 0;
- const len_including_sentinel =
- array_type.len + @intFromBool(array_type.sentinel != .none);
- while (index < len_including_sentinel) : (index += 1) {
+ while (index < array_type.lenIncludingSentinel()) : (index += 1) {
switch (try generateSymbol(bin_file, src_loc, Value.fromInterned(switch (aggregate.storage) {
.bytes => unreachable,
- .elems => |elems| elems[@as(usize, @intCast(index))],
+ .elems => |elems| elems[@intCast(index)],
.repeated_elem => |elem| if (index < array_type.len)
elem
else
@@ -399,7 +397,7 @@ pub fn generateSymbol(
}) {
.bool_true => true,
.bool_false => false,
- else => |elem| switch (mod.intern_pool.indexToKey(elem)) {
+ else => |elem| switch (ip.indexToKey(elem)) {
.undef => continue,
.int => |int| switch (int.storage) {
.u64 => |x| switch (x) {
@@ -420,7 +418,7 @@ pub fn generateSymbol(
}
} else {
switch (aggregate.storage) {
- .bytes => |bytes| try code.appendSlice(bytes),
+ .bytes => |bytes| try code.appendSlice(bytes.toSlice(vector_type.len, ip)),
.elems, .repeated_elem => {
var index: u64 = 0;
while (index < vector_type.len) : (index += 1) {
@@ -457,7 +455,7 @@ pub fn generateSymbol(
const field_val = switch (aggregate.storage) {
.bytes => |bytes| try ip.get(mod.gpa, .{ .int = .{
.ty = field_ty,
- .storage = .{ .u64 = bytes[index] },
+ .storage = .{ .u64 = bytes.at(index, ip) },
} }),
.elems => |elems| elems[index],
.repeated_elem => |elem| elem,
@@ -493,7 +491,7 @@ pub fn generateSymbol(
const field_val = switch (aggregate.storage) {
.bytes => |bytes| try ip.get(mod.gpa, .{ .int = .{
.ty = field_ty,
- .storage = .{ .u64 = bytes[index] },
+ .storage = .{ .u64 = bytes.at(index, ip) },
} }),
.elems => |elems| elems[index],
.repeated_elem => |elem| elem,
@@ -513,7 +511,7 @@ pub fn generateSymbol(
} else {
Value.fromInterned(field_val).writeToPackedMemory(Type.fromInterned(field_ty), mod, code.items[current_pos..], bits) catch unreachable;
}
- bits += @as(u16, @intCast(Type.fromInterned(field_ty).bitSize(mod)));
+ bits += @intCast(Type.fromInterned(field_ty).bitSize(mod));
}
},
.auto, .@"extern" => {
@@ -529,7 +527,7 @@ pub fn generateSymbol(
const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
.bytes => |bytes| try ip.get(mod.gpa, .{ .int = .{
.ty = field_ty,
- .storage = .{ .u64 = bytes[field_index] },
+ .storage = .{ .u64 = bytes.at(field_index, ip) },
} }),
.elems => |elems| elems[field_index],
.repeated_elem => |elem| elem,
@@ -625,7 +623,8 @@ fn lowerParentPtr(
reloc_info: RelocInfo,
) CodeGenError!Result {
const mod = bin_file.comp.module.?;
- const ptr = mod.intern_pool.indexToKey(parent_ptr).ptr;
+ const ip = &mod.intern_pool;
+ const ptr = ip.indexToKey(parent_ptr).ptr;
return switch (ptr.addr) {
.decl => |decl| try lowerDeclRef(bin_file, src_loc, decl, code, debug_output, reloc_info),
.anon_decl => |ad| try lowerAnonDeclRef(bin_file, src_loc, ad, code, debug_output, reloc_info),
@@ -636,10 +635,10 @@ fn lowerParentPtr(
eu_payload,
code,
debug_output,
- reloc_info.offset(@as(u32, @intCast(errUnionPayloadOffset(
- Type.fromInterned(mod.intern_pool.typeOf(eu_payload)),
+ reloc_info.offset(@intCast(errUnionPayloadOffset(
+ Type.fromInterned(ip.typeOf(eu_payload)),
mod,
- )))),
+ ))),
),
.opt_payload => |opt_payload| try lowerParentPtr(
bin_file,
@@ -655,19 +654,19 @@ fn lowerParentPtr(
elem.base,
code,
debug_output,
- reloc_info.offset(@as(u32, @intCast(elem.index *
- Type.fromInterned(mod.intern_pool.typeOf(elem.base)).elemType2(mod).abiSize(mod)))),
+ reloc_info.offset(@intCast(elem.index *
+ Type.fromInterned(ip.typeOf(elem.base)).elemType2(mod).abiSize(mod))),
),
.field => |field| {
- const base_ptr_ty = mod.intern_pool.typeOf(field.base);
- const base_ty = mod.intern_pool.indexToKey(base_ptr_ty).ptr_type.child;
+ const base_ptr_ty = ip.typeOf(field.base);
+ const base_ty = ip.indexToKey(base_ptr_ty).ptr_type.child;
return lowerParentPtr(
bin_file,
src_loc,
field.base,
code,
debug_output,
- reloc_info.offset(switch (mod.intern_pool.indexToKey(base_ty)) {
+ reloc_info.offset(switch (ip.indexToKey(base_ty)) {
.ptr_type => |ptr_type| switch (ptr_type.flags.size) {
.One, .Many, .C => unreachable,
.Slice => switch (field.index) {
@@ -723,11 +722,12 @@ fn lowerAnonDeclRef(
) CodeGenError!Result {
_ = debug_output;
const zcu = lf.comp.module.?;
+ const ip = &zcu.intern_pool;
const target = lf.comp.root_mod.resolved_target.result;
const ptr_width_bytes = @divExact(target.ptrBitWidth(), 8);
const decl_val = anon_decl.val;
- const decl_ty = Type.fromInterned(zcu.intern_pool.typeOf(decl_val));
+ const decl_ty = Type.fromInterned(ip.typeOf(decl_val));
log.debug("lowerAnonDecl: ty = {}", .{decl_ty.fmt(zcu)});
const is_fn_body = decl_ty.zigTypeTag(zcu) == .Fn;
if (!is_fn_body and !decl_ty.hasRuntimeBits(zcu)) {
@@ -735,7 +735,7 @@ fn lowerAnonDeclRef(
return Result.ok;
}
- const decl_align = zcu.intern_pool.indexToKey(anon_decl.orig_ty).ptr_type.flags.alignment;
+ const decl_align = ip.indexToKey(anon_decl.orig_ty).ptr_type.flags.alignment;
const res = try lf.lowerAnonDecl(decl_val, decl_align, src_loc);
switch (res) {
.ok => {},
@@ -787,8 +787,8 @@ fn lowerDeclRef(
});
const endian = target.cpu.arch.endian();
switch (ptr_width) {
- 16 => mem.writeInt(u16, try code.addManyAsArray(2), @as(u16, @intCast(vaddr)), endian),
- 32 => mem.writeInt(u32, try code.addManyAsArray(4), @as(u32, @intCast(vaddr)), endian),
+ 16 => mem.writeInt(u16, try code.addManyAsArray(2), @intCast(vaddr), endian),
+ 32 => mem.writeInt(u32, try code.addManyAsArray(4), @intCast(vaddr), endian),
64 => mem.writeInt(u64, try code.addManyAsArray(8), vaddr, endian),
else => unreachable,
}
@@ -859,6 +859,7 @@ fn genDeclRef(
ptr_decl_index: InternPool.DeclIndex,
) CodeGenError!GenResult {
const zcu = lf.comp.module.?;
+ const ip = &zcu.intern_pool;
const ty = val.typeOf(zcu);
log.debug("genDeclRef: val = {}", .{val.fmtValue(zcu)});
@@ -869,7 +870,7 @@ fn genDeclRef(
const ptr_bits = target.ptrBitWidth();
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
- const decl_index = switch (zcu.intern_pool.indexToKey(ptr_decl.val.toIntern())) {
+ const decl_index = switch (ip.indexToKey(ptr_decl.val.toIntern())) {
.func => |func| func.owner_decl,
.extern_func => |extern_func| extern_func.decl,
else => ptr_decl_index,
@@ -909,12 +910,9 @@ fn genDeclRef(
if (lf.cast(link.File.Elf)) |elf_file| {
if (is_extern) {
- const name = zcu.intern_pool.stringToSlice(decl.name);
+ const name = decl.name.toSlice(ip);
// TODO audit this
- const lib_name = if (decl.getOwnedVariable(zcu)) |ov|
- zcu.intern_pool.stringToSliceUnwrap(ov.lib_name)
- else
- null;
+ const lib_name = if (decl.getOwnedVariable(zcu)) |ov| ov.lib_name.toSlice(ip) else null;
const sym_index = try elf_file.getGlobalSymbol(name, lib_name);
elf_file.symbol(elf_file.zigObjectPtr().?.symbol(sym_index)).flags.needs_got = true;
return GenResult.mcv(.{ .load_symbol = sym_index });
@@ -927,11 +925,8 @@ fn genDeclRef(
return GenResult.mcv(.{ .load_symbol = sym.esym_index });
} else if (lf.cast(link.File.MachO)) |macho_file| {
if (is_extern) {
- const name = zcu.intern_pool.stringToSlice(decl.name);
- const lib_name = if (decl.getOwnedVariable(zcu)) |ov|
- zcu.intern_pool.stringToSliceUnwrap(ov.lib_name)
- else
- null;
+ const name = decl.name.toSlice(ip);
+ const lib_name = if (decl.getOwnedVariable(zcu)) |ov| ov.lib_name.toSlice(ip) else null;
const sym_index = try macho_file.getGlobalSymbol(name, lib_name);
macho_file.getSymbol(macho_file.getZigObject().?.symbols.items[sym_index]).flags.needs_got = true;
return GenResult.mcv(.{ .load_symbol = sym_index });
@@ -944,12 +939,9 @@ fn genDeclRef(
return GenResult.mcv(.{ .load_symbol = sym.nlist_idx });
} else if (lf.cast(link.File.Coff)) |coff_file| {
if (is_extern) {
- const name = zcu.intern_pool.stringToSlice(decl.name);
+ const name = decl.name.toSlice(ip);
// TODO audit this
- const lib_name = if (decl.getOwnedVariable(zcu)) |ov|
- zcu.intern_pool.stringToSliceUnwrap(ov.lib_name)
- else
- null;
+ const lib_name = if (decl.getOwnedVariable(zcu)) |ov| ov.lib_name.toSlice(ip) else null;
const global_index = try coff_file.getGlobalSymbol(name, lib_name);
try coff_file.need_got_table.put(gpa, global_index, {}); // needs GOT
return GenResult.mcv(.{ .load_got = link.File.Coff.global_symbol_bit | global_index });
@@ -1012,6 +1004,7 @@ pub fn genTypedValue(
owner_decl_index: InternPool.DeclIndex,
) CodeGenError!GenResult {
const zcu = lf.comp.module.?;
+ const ip = &zcu.intern_pool;
const ty = val.typeOf(zcu);
log.debug("genTypedValue: val = {}", .{val.fmtValue(zcu)});
@@ -1024,7 +1017,7 @@ pub fn genTypedValue(
const target = namespace.file_scope.mod.resolved_target.result;
const ptr_bits = target.ptrBitWidth();
- if (!ty.isSlice(zcu)) switch (zcu.intern_pool.indexToKey(val.toIntern())) {
+ if (!ty.isSlice(zcu)) switch (ip.indexToKey(val.toIntern())) {
.ptr => |ptr| switch (ptr.addr) {
.decl => |decl| return genDeclRef(lf, src_loc, val, decl),
else => {},
@@ -1041,7 +1034,7 @@ pub fn genTypedValue(
return GenResult.mcv(.{ .immediate = 0 });
},
.none => {},
- else => switch (zcu.intern_pool.indexToKey(val.toIntern())) {
+ else => switch (ip.indexToKey(val.toIntern())) {
.int => {
return GenResult.mcv(.{ .immediate = val.toUnsignedInt(zcu) });
},
@@ -1052,8 +1045,8 @@ pub fn genTypedValue(
.Int => {
const info = ty.intInfo(zcu);
if (info.bits <= ptr_bits) {
- const unsigned = switch (info.signedness) {
- .signed => @as(u64, @bitCast(val.toSignedInt(zcu))),
+ const unsigned: u64 = switch (info.signedness) {
+ .signed => @bitCast(val.toSignedInt(zcu)),
.unsigned => val.toUnsignedInt(zcu),
};
return GenResult.mcv(.{ .immediate = unsigned });
@@ -1075,7 +1068,7 @@ pub fn genTypedValue(
}
},
.Enum => {
- const enum_tag = zcu.intern_pool.indexToKey(val.toIntern()).enum_tag;
+ const enum_tag = ip.indexToKey(val.toIntern()).enum_tag;
return genTypedValue(
lf,
src_loc,
@@ -1084,7 +1077,7 @@ pub fn genTypedValue(
);
},
.ErrorSet => {
- const err_name = zcu.intern_pool.indexToKey(val.toIntern()).err.name;
+ const err_name = ip.indexToKey(val.toIntern()).err.name;
const error_index = zcu.global_error_set.getIndex(err_name).?;
return GenResult.mcv(.{ .immediate = error_index });
},
@@ -1094,7 +1087,7 @@ pub fn genTypedValue(
if (!payload_type.hasRuntimeBitsIgnoreComptime(zcu)) {
// We use the error type directly as the type.
const err_int_ty = try zcu.errorIntType();
- switch (zcu.intern_pool.indexToKey(val.toIntern()).error_union.val) {
+ switch (ip.indexToKey(val.toIntern()).error_union.val) {
.err_name => |err_name| return genTypedValue(
lf,
src_loc,
diff --git a/src/codegen/c.zig b/src/codegen/c.zig
index 95f4ecc4ac..818267a8b8 100644
--- a/src/codegen/c.zig
+++ b/src/codegen/c.zig
@@ -43,10 +43,12 @@ pub const CValue = union(enum) {
decl_ref: InternPool.DeclIndex,
/// An undefined value (cannot be dereferenced)
undef: Type,
- /// Render the slice as an identifier (using fmtIdent)
+ /// Rendered as an identifier (using fmtIdent)
identifier: []const u8,
- /// Render the slice as an payload.identifier (using fmtIdent)
+ /// Rendered as "payload." followed by as identifier (using fmtIdent)
payload_identifier: []const u8,
+ /// Rendered with fmtCTypePoolString
+ ctype_pool_string: CType.Pool.String,
};
const BlockData = struct {
@@ -62,10 +64,10 @@ pub const LazyFnKey = union(enum) {
never_inline: InternPool.DeclIndex,
};
pub const LazyFnValue = struct {
- fn_name: CType.String,
+ fn_name: CType.Pool.String,
data: Data,
- pub const Data = union {
+ const Data = union {
tag_name: Type,
never_tail: void,
never_inline: void,
@@ -80,7 +82,7 @@ const Local = struct {
_: u20 = undefined,
},
- pub fn getType(local: Local) LocalType {
+ fn getType(local: Local) LocalType {
return .{ .ctype = local.ctype, .alignas = local.flags.alignas };
}
};
@@ -96,12 +98,20 @@ const ValueRenderLocation = enum {
StaticInitializer,
Other,
- pub fn isInitializer(self: ValueRenderLocation) bool {
- return switch (self) {
+ fn isInitializer(loc: ValueRenderLocation) bool {
+ return switch (loc) {
.Initializer, .StaticInitializer => true,
else => false,
};
}
+
+ fn toCTypeKind(loc: ValueRenderLocation) CType.Kind {
+ return switch (loc) {
+ .FunctionArgument => .parameter,
+ .Initializer, .Other => .complete,
+ .StaticInitializer => .global,
+ };
+ }
};
const BuiltinInfo = enum { none, bits };
@@ -234,12 +244,11 @@ fn isReservedIdent(ident: []const u8) bool {
fn formatIdent(
ident: []const u8,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
+ comptime fmt_str: []const u8,
+ _: std.fmt.FormatOptions,
writer: anytype,
-) !void {
- _ = options;
- const solo = fmt.len != 0 and fmt[0] == ' '; // space means solo; not part of a bigger ident.
+) @TypeOf(writer).Error!void {
+ const solo = fmt_str.len != 0 and fmt_str[0] == ' '; // space means solo; not part of a bigger ident.
if (solo and isReservedIdent(ident)) {
try writer.writeAll("zig_e_");
}
@@ -256,11 +265,32 @@ fn formatIdent(
}
}
}
-
pub fn fmtIdent(ident: []const u8) std.fmt.Formatter(formatIdent) {
return .{ .data = ident };
}
+const CTypePoolStringFormatData = struct {
+ ctype_pool_string: CType.Pool.String,
+ ctype_pool: *const CType.Pool,
+};
+fn formatCTypePoolString(
+ data: CTypePoolStringFormatData,
+ comptime fmt_str: []const u8,
+ fmt_opts: std.fmt.FormatOptions,
+ writer: anytype,
+) @TypeOf(writer).Error!void {
+ if (data.ctype_pool_string.toSlice(data.ctype_pool)) |slice|
+ try formatIdent(slice, fmt_str, fmt_opts, writer)
+ else
+ try writer.print("{}", .{data.ctype_pool_string.fmt(data.ctype_pool)});
+}
+pub fn fmtCTypePoolString(
+ ctype_pool_string: CType.Pool.String,
+ ctype_pool: *const CType.Pool,
+) std.fmt.Formatter(formatCTypePoolString) {
+ return .{ .data = .{ .ctype_pool_string = ctype_pool_string, .ctype_pool = ctype_pool } };
+}
+
// Returns true if `formatIdent` would make any edits to ident.
// This must be kept in sync with `formatIdent`.
pub fn isMangledIdent(ident: []const u8, solo: bool) bool {
@@ -321,7 +351,7 @@ pub const Function = struct {
try writer.writeAll(" = ");
try f.object.dg.renderValue(writer, val, .StaticInitializer);
try writer.writeAll(";\n ");
- break :result decl_c_value;
+ break :result .{ .local = decl_c_value.new_local };
} else .{ .constant = val };
gop.value_ptr.* = result;
@@ -377,27 +407,7 @@ pub const Function = struct {
switch (c_value) {
.none => unreachable,
.new_local, .local => |i| try w.print("t{d}", .{i}),
- .local_ref => |i| {
- const local = &f.locals.items[i];
- if (local.flags.alignas.abiOrder().compare(.lt)) {
- const gpa = f.object.dg.gpa;
- const mod = f.object.dg.mod;
- const ctype_pool = &f.object.dg.ctype_pool;
-
- try w.writeByte('(');
- try f.renderCType(w, try ctype_pool.getPointer(gpa, .{
- .elem_ctype = try ctype_pool.fromIntInfo(gpa, .{
- .signedness = .unsigned,
- .bits = @min(
- local.flags.alignas.toByteUnits(),
- mod.resolved_target.result.maxIntAlignment(),
- ) * 8,
- }, mod, .forward),
- }));
- try w.writeByte(')');
- }
- try w.print("&t{d}", .{i});
- },
+ .local_ref => |i| try w.print("&t{d}", .{i}),
.constant => |val| try f.object.dg.renderValue(w, val, location),
.arg => |i| try w.print("a{d}", .{i}),
.arg_array => |i| try f.writeCValueMember(w, .{ .arg = i }, .{ .identifier = "array" }),
@@ -505,7 +515,7 @@ pub const Function = struct {
.never_inline,
=> |owner_decl| try ctype_pool.fmt(gpa, "zig_{s}_{}__{d}", .{
@tagName(key),
- fmtIdent(zcu.intern_pool.stringToSlice(zcu.declPtr(owner_decl).name)),
+ fmtIdent(zcu.declPtr(owner_decl).name.toSlice(&zcu.intern_pool)),
@intFromEnum(owner_decl),
}),
},
@@ -516,7 +526,7 @@ pub const Function = struct {
},
};
}
- return gop.value_ptr.fn_name.slice(ctype_pool);
+ return gop.value_ptr.fn_name.toSlice(ctype_pool).?;
}
pub fn deinit(f: *Function) void {
@@ -538,6 +548,43 @@ pub const Function = struct {
const zcu = f.object.dg.zcu;
return f.air.typeOfIndex(inst, &zcu.intern_pool);
}
+
+ fn copyCValue(f: *Function, ctype: CType, dst: CValue, src: CValue) !void {
+ switch (dst) {
+ .new_local, .local => |dst_local_index| switch (src) {
+ .new_local, .local => |src_local_index| if (dst_local_index == src_local_index) return,
+ else => {},
+ },
+ else => {},
+ }
+ const writer = f.object.writer();
+ const a = try Assignment.start(f, writer, ctype);
+ try f.writeCValue(writer, dst, .Other);
+ try a.assign(f, writer);
+ try f.writeCValue(writer, src, .Initializer);
+ try a.end(f, writer);
+ }
+
+ fn moveCValue(f: *Function, inst: Air.Inst.Index, ty: Type, src: CValue) !CValue {
+ switch (src) {
+ // Move the freshly allocated local to be owned by this instruction,
+ // by returning it here instead of freeing it.
+ .new_local => return src,
+ else => {
+ try freeCValue(f, inst, src);
+ const dst = try f.allocLocal(inst, ty);
+ try f.copyCValue(try f.ctypeFromType(ty, .complete), dst, src);
+ return dst;
+ },
+ }
+ }
+
+ fn freeCValue(f: *Function, inst: ?Air.Inst.Index, val: CValue) !void {
+ switch (val) {
+ .new_local => |local_index| try freeLocal(f, inst, local_index, null),
+ else => {},
+ }
+ }
};
/// This data is available when outputting .c code for a `Zcu`.
@@ -627,13 +674,14 @@ pub const DeclGen = struct {
// them). The analysis until now should ensure that the C function
// pointers are compatible. If they are not, then there is a bug
// somewhere and we should let the C compiler tell us about it.
- const elem_ctype = (try dg.ctypeFromType(ptr_ty, .complete)).info(ctype_pool).pointer.elem_ctype;
+ const ptr_ctype = try dg.ctypeFromType(ptr_ty, .complete);
+ const elem_ctype = ptr_ctype.info(ctype_pool).pointer.elem_ctype;
const decl_ctype = try dg.ctypeFromType(decl_ty, .complete);
const need_cast = !elem_ctype.eql(decl_ctype) and
(elem_ctype.info(ctype_pool) != .function or decl_ctype.info(ctype_pool) != .function);
if (need_cast) {
try writer.writeAll("((");
- try dg.renderType(writer, ptr_ty);
+ try dg.renderCType(writer, ptr_ctype);
try writer.writeByte(')');
}
try writer.writeByte('&');
@@ -692,13 +740,14 @@ pub const DeclGen = struct {
// them). The analysis until now should ensure that the C function
// pointers are compatible. If they are not, then there is a bug
// somewhere and we should let the C compiler tell us about it.
- const elem_ctype = (try dg.ctypeFromType(ty, .complete)).info(ctype_pool).pointer.elem_ctype;
+ const ctype = try dg.ctypeFromType(ty, .complete);
+ const elem_ctype = ctype.info(ctype_pool).pointer.elem_ctype;
const decl_ctype = try dg.ctypeFromType(decl_ty, .complete);
const need_cast = !elem_ctype.eql(decl_ctype) and
(elem_ctype.info(ctype_pool) != .function or decl_ctype.info(ctype_pool) != .function);
if (need_cast) {
try writer.writeAll("((");
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
try writer.writeByte('&');
@@ -822,25 +871,18 @@ pub const DeclGen = struct {
try dg.fmtIntLiteral(try zcu.intValue(Type.usize, byte_offset), .Other),
});
},
- .end => {
- const ptr_base_ctype = try dg.ctypeFromType(ptr_base_ty, .complete);
- if (!ptr_ctype.eql(ptr_base_ctype)) {
- try writer.writeByte('(');
- try dg.renderCType(writer, ptr_ctype);
- try writer.writeByte(')');
- }
- try writer.writeAll("((");
- try dg.renderParentPtr(writer, field.base, location);
- try writer.print(") + {})", .{
- try dg.fmtIntLiteral(try zcu.intValue(Type.usize, 1), .Other),
- });
- },
}
},
.comptime_field, .comptime_alloc => unreachable,
}
}
+ fn renderErrorName(dg: *DeclGen, writer: anytype, err_name: InternPool.NullTerminatedString) !void {
+ const zcu = dg.zcu;
+ const ip = &zcu.intern_pool;
+ try writer.print("zig_error_{}", .{fmtIdent(err_name.toSlice(ip))});
+ }
+
fn renderValue(
dg: *DeclGen,
writer: anytype,
@@ -850,6 +892,7 @@ pub const DeclGen = struct {
const zcu = dg.zcu;
const ip = &zcu.intern_pool;
const target = &dg.mod.resolved_target.result;
+ const ctype_pool = &dg.ctype_pool;
const initializer_type: ValueRenderLocation = switch (location) {
.StaticInitializer => .StaticInitializer,
@@ -858,6 +901,7 @@ pub const DeclGen = struct {
const ty = val.typeOf(zcu);
if (val.isUndefDeep(zcu)) return dg.renderUndefValue(writer, ty, location);
+ const ctype = try dg.ctypeFromType(ty, location.toCTypeKind());
switch (ip.indexToKey(val.toIntern())) {
// types, not values
.int_type,
@@ -903,76 +947,53 @@ pub const DeclGen = struct {
.u64, .i64, .big_int => try writer.print("{}", .{try dg.fmtIntLiteral(val, location)}),
.lazy_align, .lazy_size => {
try writer.writeAll("((");
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.print("){x})", .{try dg.fmtIntLiteral(
try zcu.intValue(Type.usize, val.toUnsignedInt(zcu)),
.Other,
)});
},
},
- .err => |err| try writer.print("zig_error_{}", .{
- fmtIdent(ip.stringToSlice(err.name)),
- }),
- .error_union => |error_union| {
- const payload_ty = ty.errorUnionPayload(zcu);
- const error_ty = ty.errorUnionSet(zcu);
- const err_int_ty = try zcu.errorIntType();
- if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
- switch (error_union.val) {
- .err_name => |err_name| return dg.renderValue(
- writer,
- Value.fromInterned((try zcu.intern(.{ .err = .{
- .ty = error_ty.toIntern(),
- .name = err_name,
- } }))),
- location,
- ),
- .payload => return dg.renderValue(
- writer,
- try zcu.intValue(err_int_ty, 0),
- location,
- ),
+ .err => |err| try dg.renderErrorName(writer, err.name),
+ .error_union => |error_union| switch (ctype.info(ctype_pool)) {
+ .basic => switch (error_union.val) {
+ .err_name => |err_name| try dg.renderErrorName(writer, err_name),
+ .payload => try writer.writeAll("0"),
+ },
+ .pointer, .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => |aggregate| {
+ if (!location.isInitializer()) {
+ try writer.writeByte('(');
+ try dg.renderCType(writer, ctype);
+ try writer.writeByte(')');
}
- }
-
- if (!location.isInitializer()) {
- try writer.writeByte('(');
- try dg.renderType(writer, ty);
- try writer.writeByte(')');
- }
-
- try writer.writeAll("{ .payload = ");
- try dg.renderValue(
- writer,
- Value.fromInterned(switch (error_union.val) {
- .err_name => (try zcu.undefValue(payload_ty)).toIntern(),
- .payload => |payload| payload,
- }),
- initializer_type,
- );
- try writer.writeAll(", .error = ");
- switch (error_union.val) {
- .err_name => |err_name| try dg.renderValue(
- writer,
- Value.fromInterned((try zcu.intern(.{ .err = .{
- .ty = error_ty.toIntern(),
- .name = err_name,
- } }))),
- location,
- ),
- .payload => try dg.renderValue(
- writer,
- try zcu.intValue(err_int_ty, 0),
- location,
- ),
- }
- try writer.writeAll(" }");
+ try writer.writeByte('{');
+ for (0..aggregate.fields.len) |field_index| {
+ if (field_index > 0) try writer.writeByte(',');
+ switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
+ .@"error" => switch (error_union.val) {
+ .err_name => |err_name| try dg.renderErrorName(writer, err_name),
+ .payload => try writer.writeByte('0'),
+ },
+ .payload => switch (error_union.val) {
+ .err_name => try dg.renderUndefValue(
+ writer,
+ ty.errorUnionPayload(zcu),
+ initializer_type,
+ ),
+ .payload => |payload| try dg.renderValue(
+ writer,
+ Value.fromInterned(payload),
+ initializer_type,
+ ),
+ },
+ else => unreachable,
+ }
+ }
+ try writer.writeByte('}');
+ },
},
- .enum_tag => |enum_tag| try dg.renderValue(
- writer,
- Value.fromInterned(enum_tag.int),
- location,
- ),
+ .enum_tag => |enum_tag| try dg.renderValue(writer, Value.fromInterned(enum_tag.int), location),
.float => {
const bits = ty.floatBits(target.*);
const f128_val = val.toFloat(f128, zcu);
@@ -1063,15 +1084,23 @@ pub const DeclGen = struct {
if (!empty) try writer.writeByte(')');
},
.slice => |slice| {
+ const aggregate = ctype.info(ctype_pool).aggregate;
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
try writer.writeByte('{');
- try dg.renderValue(writer, Value.fromInterned(slice.ptr), initializer_type);
- try writer.writeAll(", ");
- try dg.renderValue(writer, Value.fromInterned(slice.len), initializer_type);
+ for (0..aggregate.fields.len) |field_index| {
+ if (field_index > 0) try writer.writeByte(',');
+ try dg.renderValue(writer, Value.fromInterned(
+ switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
+ .ptr => slice.ptr,
+ .len => slice.len,
+ else => unreachable,
+ },
+ ), initializer_type);
+ }
try writer.writeByte('}');
},
.ptr => |ptr| switch (ptr.addr) {
@@ -1079,7 +1108,7 @@ pub const DeclGen = struct {
.anon_decl => |decl_val| try dg.renderAnonDeclValue(writer, val, decl_val, location),
.int => |int| {
try writer.writeAll("((");
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.print("){x})", .{try dg.fmtIntLiteral(Value.fromInterned(int), location)});
},
.eu_payload,
@@ -1089,54 +1118,80 @@ pub const DeclGen = struct {
=> try dg.renderParentPtr(writer, val.toIntern(), location),
.comptime_field, .comptime_alloc => unreachable,
},
- .opt => |opt| {
- const payload_ty = ty.optionalChild(zcu);
-
- const is_null_val = Value.makeBool(opt.val == .none);
- if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu))
- return dg.renderValue(writer, is_null_val, location);
-
- if (ty.optionalReprIsPayload(zcu)) return dg.renderValue(
- writer,
+ .opt => |opt| switch (ctype.info(ctype_pool)) {
+ .basic => if (ctype.isBool()) try writer.writeAll(switch (opt.val) {
+ .none => "true",
+ else => "false",
+ }) else switch (opt.val) {
+ .none => try writer.writeAll("0"),
+ else => |payload| switch (ip.indexToKey(payload)) {
+ .undef => |err_ty| try dg.renderUndefValue(
+ writer,
+ Type.fromInterned(err_ty),
+ location,
+ ),
+ .err => |err| try dg.renderErrorName(writer, err.name),
+ else => unreachable,
+ },
+ },
+ .pointer => switch (opt.val) {
+ .none => try writer.writeAll("NULL"),
+ else => |payload| try dg.renderValue(writer, Value.fromInterned(payload), location),
+ },
+ .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => |aggregate| {
switch (opt.val) {
- .none => switch (payload_ty.zigTypeTag(zcu)) {
- .ErrorSet => try zcu.intValue(try zcu.errorIntType(), 0),
- .Pointer => try zcu.getCoerced(val, payload_ty),
+ .none => {},
+ else => |payload| switch (aggregate.fields.at(0, ctype_pool).name.index) {
+ .is_null, .payload => {},
+ .ptr, .len => return dg.renderValue(
+ writer,
+ Value.fromInterned(payload),
+ location,
+ ),
else => unreachable,
},
- else => |payload| Value.fromInterned(payload),
- },
- location,
- );
-
- if (!location.isInitializer()) {
- try writer.writeByte('(');
- try dg.renderType(writer, ty);
- try writer.writeByte(')');
- }
-
- try writer.writeAll("{ .payload = ");
- switch (opt.val) {
- .none => try dg.renderUndefValue(writer, payload_ty, initializer_type),
- else => |payload| try dg.renderValue(
- writer,
- Value.fromInterned(payload),
- initializer_type,
- ),
- }
- try writer.writeAll(", .is_null = ");
- try dg.renderValue(writer, is_null_val, initializer_type);
- try writer.writeAll(" }");
+ }
+ if (!location.isInitializer()) {
+ try writer.writeByte('(');
+ try dg.renderCType(writer, ctype);
+ try writer.writeByte(')');
+ }
+ try writer.writeByte('{');
+ for (0..aggregate.fields.len) |field_index| {
+ if (field_index > 0) try writer.writeByte(',');
+ switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
+ .is_null => try writer.writeAll(switch (opt.val) {
+ .none => "true",
+ else => "false",
+ }),
+ .payload => switch (opt.val) {
+ .none => try dg.renderUndefValue(
+ writer,
+ ty.optionalChild(zcu),
+ initializer_type,
+ ),
+ else => |payload| try dg.renderValue(
+ writer,
+ Value.fromInterned(payload),
+ initializer_type,
+ ),
+ },
+ .ptr => try writer.writeAll("NULL"),
+ .len => try dg.renderUndefValue(writer, Type.usize, initializer_type),
+ else => unreachable,
+ }
+ }
+ try writer.writeByte('}');
+ },
},
.aggregate => switch (ip.indexToKey(ty.toIntern())) {
.array_type, .vector_type => {
if (location == .FunctionArgument) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
- // Fall back to generic implementation.
-
const ai = ty.arrayInfo(zcu);
if (ai.elem_type.eql(Type.u8, zcu)) {
var literal = stringLiteral(writer, ty.arrayLenIncludingSentinel(zcu));
@@ -1173,7 +1228,7 @@ pub const DeclGen = struct {
.anon_struct_type => |tuple| {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
@@ -1191,7 +1246,7 @@ pub const DeclGen = struct {
switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
.bytes => |bytes| try ip.get(zcu.gpa, .{ .int = .{
.ty = field_ty.toIntern(),
- .storage = .{ .u64 = bytes[field_index] },
+ .storage = .{ .u64 = bytes.at(field_index, ip) },
} }),
.elems => |elems| elems[field_index],
.repeated_elem => |elem| elem,
@@ -1209,7 +1264,7 @@ pub const DeclGen = struct {
.auto, .@"extern" => {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
@@ -1225,7 +1280,7 @@ pub const DeclGen = struct {
const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
.bytes => |bytes| try ip.get(zcu.gpa, .{ .int = .{
.ty = field_ty.toIntern(),
- .storage = .{ .u64 = bytes[field_index] },
+ .storage = .{ .u64 = bytes.at(field_index, ip) },
} }),
.elems => |elems| elems[field_index],
.repeated_elem => |elem| elem,
@@ -1251,7 +1306,7 @@ pub const DeclGen = struct {
if (eff_num_fields == 0) {
try writer.writeByte('(');
- try dg.renderUndefValue(writer, ty, initializer_type);
+ try dg.renderUndefValue(writer, ty, location);
try writer.writeByte(')');
} else if (ty.bitSize(zcu) > 64) {
// zig_or_u128(zig_or_u128(zig_shl_u128(a, a_off), zig_shl_u128(b, b_off)), zig_shl_u128(c, c_off))
@@ -1271,7 +1326,7 @@ pub const DeclGen = struct {
const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
.bytes => |bytes| try ip.get(zcu.gpa, .{ .int = .{
.ty = field_ty.toIntern(),
- .storage = .{ .u64 = bytes[field_index] },
+ .storage = .{ .u64 = bytes.at(field_index, ip) },
} }),
.elems => |elems| elems[field_index],
.repeated_elem => |elem| elem,
@@ -1306,13 +1361,13 @@ pub const DeclGen = struct {
if (!empty) try writer.writeAll(" | ");
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
.bytes => |bytes| try ip.get(zcu.gpa, .{ .int = .{
.ty = field_ty.toIntern(),
- .storage = .{ .u64 = bytes[field_index] },
+ .storage = .{ .u64 = bytes.at(field_index, ip) },
} }),
.elems => |elems| elems[field_index],
.repeated_elem => |elem| elem,
@@ -1347,7 +1402,7 @@ pub const DeclGen = struct {
try dg.renderType(writer, backing_ty);
try writer.writeByte(')');
}
- try dg.renderValue(writer, Value.fromInterned(un.val), initializer_type);
+ try dg.renderValue(writer, Value.fromInterned(un.val), location);
},
.@"extern" => {
if (location == .StaticInitializer) {
@@ -1360,7 +1415,7 @@ pub const DeclGen = struct {
try writer.writeAll(")(");
try dg.renderType(writer, backing_ty);
try writer.writeAll("){");
- try dg.renderValue(writer, Value.fromInterned(un.val), initializer_type);
+ try dg.renderValue(writer, Value.fromInterned(un.val), location);
try writer.writeAll("})");
},
else => unreachable,
@@ -1368,7 +1423,7 @@ pub const DeclGen = struct {
} else {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
@@ -1379,43 +1434,56 @@ pub const DeclGen = struct {
if (field_ty.hasRuntimeBits(zcu)) {
if (field_ty.isPtrAtRuntime(zcu)) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
} else if (field_ty.zigTypeTag(zcu) == .Float) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
- try dg.renderValue(writer, Value.fromInterned(un.val), initializer_type);
- } else {
- try writer.writeAll("0");
- }
+ try dg.renderValue(writer, Value.fromInterned(un.val), location);
+ } else try writer.writeAll("0");
return;
}
- try writer.writeByte('{');
- if (ty.unionTagTypeSafety(zcu)) |_| {
- const layout = zcu.getUnionLayout(loaded_union);
- if (layout.tag_size != 0) {
- try writer.writeAll(" .tag = ");
- try dg.renderValue(writer, Value.fromInterned(un.tag), initializer_type);
+ const has_tag = loaded_union.hasTag(ip);
+ if (has_tag) try writer.writeByte('{');
+ const aggregate = ctype.info(ctype_pool).aggregate;
+ for (0..if (has_tag) aggregate.fields.len else 1) |outer_field_index| {
+ if (outer_field_index > 0) try writer.writeByte(',');
+ switch (if (has_tag)
+ aggregate.fields.at(outer_field_index, ctype_pool).name.index
+ else
+ .payload) {
+ .tag => try dg.renderValue(
+ writer,
+ Value.fromInterned(un.tag),
+ initializer_type,
+ ),
+ .payload => {
+ try writer.writeByte('{');
+ if (field_ty.hasRuntimeBits(zcu)) {
+ try writer.print(" .{ } = ", .{fmtIdent(field_name.toSlice(ip))});
+ try dg.renderValue(
+ writer,
+ Value.fromInterned(un.val),
+ initializer_type,
+ );
+ try writer.writeByte(' ');
+ } else for (0..loaded_union.field_types.len) |inner_field_index| {
+ const inner_field_ty = Type.fromInterned(
+ loaded_union.field_types.get(ip)[inner_field_index],
+ );
+ if (!inner_field_ty.hasRuntimeBits(zcu)) continue;
+ try dg.renderUndefValue(writer, inner_field_ty, initializer_type);
+ break;
+ }
+ try writer.writeByte('}');
+ },
+ else => unreachable,
}
- if (ty.unionHasAllZeroBitFieldTypes(zcu)) return try writer.writeByte('}');
- if (layout.tag_size != 0) try writer.writeByte(',');
- try writer.writeAll(" .payload = {");
- }
- if (field_ty.hasRuntimeBits(zcu)) {
- try writer.print(" .{ } = ", .{fmtIdent(ip.stringToSlice(field_name))});
- try dg.renderValue(writer, Value.fromInterned(un.val), initializer_type);
- try writer.writeByte(' ');
- } else for (0..loaded_union.field_types.len) |this_field_index| {
- const this_field_ty = Type.fromInterned(loaded_union.field_types.get(ip)[this_field_index]);
- if (!this_field_ty.hasRuntimeBits(zcu)) continue;
- try dg.renderUndefValue(writer, this_field_ty, initializer_type);
- break;
}
- if (ty.unionTagTypeSafety(zcu)) |_| try writer.writeByte('}');
- try writer.writeByte('}');
+ if (has_tag) try writer.writeByte('}');
}
},
}
@@ -1430,6 +1498,7 @@ pub const DeclGen = struct {
const zcu = dg.zcu;
const ip = &zcu.intern_pool;
const target = &dg.mod.resolved_target.result;
+ const ctype_pool = &dg.ctype_pool;
const initializer_type: ValueRenderLocation = switch (location) {
.StaticInitializer => .StaticInitializer,
@@ -1441,6 +1510,7 @@ pub const DeclGen = struct {
.ReleaseFast, .ReleaseSmall => false,
};
+ const ctype = try dg.ctypeFromType(ty, location.toCTypeKind());
switch (ty.toIntern()) {
.c_longdouble_type,
.f16_type,
@@ -1478,48 +1548,64 @@ pub const DeclGen = struct {
=> return writer.print("{x}", .{
try dg.fmtIntLiteral(try zcu.undefValue(ty), location),
}),
- .ptr_type => if (ty.isSlice(zcu)) {
- if (!location.isInitializer()) {
- try writer.writeByte('(');
- try dg.renderType(writer, ty);
- try writer.writeByte(')');
- }
+ .ptr_type => |ptr_type| switch (ptr_type.flags.size) {
+ .One, .Many, .C => {
+ try writer.writeAll("((");
+ try dg.renderCType(writer, ctype);
+ return writer.print("){x})", .{
+ try dg.fmtIntLiteral(try zcu.undefValue(Type.usize), .Other),
+ });
+ },
+ .Slice => {
+ if (!location.isInitializer()) {
+ try writer.writeByte('(');
+ try dg.renderCType(writer, ctype);
+ try writer.writeByte(')');
+ }
- try writer.writeAll("{(");
- const ptr_ty = ty.slicePtrFieldType(zcu);
- try dg.renderType(writer, ptr_ty);
- return writer.print("){x}, {0x}}}", .{
- try dg.fmtIntLiteral(try zcu.undefValue(Type.usize), .Other),
- });
- } else {
- try writer.writeAll("((");
- try dg.renderType(writer, ty);
- return writer.print("){x})", .{
- try dg.fmtIntLiteral(try zcu.undefValue(Type.usize), .Other),
- });
+ try writer.writeAll("{(");
+ const ptr_ty = ty.slicePtrFieldType(zcu);
+ try dg.renderType(writer, ptr_ty);
+ return writer.print("){x}, {0x}}}", .{
+ try dg.fmtIntLiteral(try zcu.undefValue(Type.usize), .Other),
+ });
+ },
},
- .opt_type => {
- const payload_ty = ty.optionalChild(zcu);
-
- if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
- return dg.renderUndefValue(writer, Type.bool, location);
- }
-
- if (ty.optionalReprIsPayload(zcu)) {
- return dg.renderUndefValue(writer, payload_ty, location);
- }
-
- if (!location.isInitializer()) {
- try writer.writeByte('(');
- try dg.renderType(writer, ty);
- try writer.writeByte(')');
- }
-
- try writer.writeAll("{ .payload = ");
- try dg.renderUndefValue(writer, payload_ty, initializer_type);
- try writer.writeAll(", .is_null = ");
- try dg.renderUndefValue(writer, Type.bool, initializer_type);
- return writer.writeAll(" }");
+ .opt_type => |child_type| switch (ctype.info(ctype_pool)) {
+ .basic, .pointer => try dg.renderUndefValue(
+ writer,
+ Type.fromInterned(if (ctype.isBool()) .bool_type else child_type),
+ location,
+ ),
+ .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => |aggregate| {
+ switch (aggregate.fields.at(0, ctype_pool).name.index) {
+ .is_null, .payload => {},
+ .ptr, .len => return dg.renderUndefValue(
+ writer,
+ Type.fromInterned(child_type),
+ location,
+ ),
+ else => unreachable,
+ }
+ if (!location.isInitializer()) {
+ try writer.writeByte('(');
+ try dg.renderCType(writer, ctype);
+ try writer.writeByte(')');
+ }
+ try writer.writeByte('{');
+ for (0..aggregate.fields.len) |field_index| {
+ if (field_index > 0) try writer.writeByte(',');
+ try dg.renderUndefValue(writer, Type.fromInterned(
+ switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
+ .is_null => .bool_type,
+ .payload => child_type,
+ else => unreachable,
+ },
+ ), initializer_type);
+ }
+ try writer.writeByte('}');
+ },
},
.struct_type => {
const loaded_struct = ip.loadStructType(ty.toIntern());
@@ -1527,7 +1613,7 @@ pub const DeclGen = struct {
.auto, .@"extern" => {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
@@ -1552,7 +1638,7 @@ pub const DeclGen = struct {
.anon_struct_type => |anon_struct_info| {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
@@ -1575,54 +1661,80 @@ pub const DeclGen = struct {
.auto, .@"extern" => {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
- try writer.writeByte('{');
- if (ty.unionTagTypeSafety(zcu)) |tag_ty| {
- const layout = ty.unionGetLayout(zcu);
- if (layout.tag_size != 0) {
- try writer.writeAll(" .tag = ");
- try dg.renderUndefValue(writer, tag_ty, initializer_type);
+ const has_tag = loaded_union.hasTag(ip);
+ if (has_tag) try writer.writeByte('{');
+ const aggregate = ctype.info(ctype_pool).aggregate;
+ for (0..if (has_tag) aggregate.fields.len else 1) |outer_field_index| {
+ if (outer_field_index > 0) try writer.writeByte(',');
+ switch (if (has_tag)
+ aggregate.fields.at(outer_field_index, ctype_pool).name.index
+ else
+ .payload) {
+ .tag => try dg.renderUndefValue(
+ writer,
+ Type.fromInterned(loaded_union.enum_tag_ty),
+ initializer_type,
+ ),
+ .payload => {
+ try writer.writeByte('{');
+ for (0..loaded_union.field_types.len) |inner_field_index| {
+ const inner_field_ty = Type.fromInterned(
+ loaded_union.field_types.get(ip)[inner_field_index],
+ );
+ if (!inner_field_ty.hasRuntimeBits(zcu)) continue;
+ try dg.renderUndefValue(
+ writer,
+ inner_field_ty,
+ initializer_type,
+ );
+ break;
+ }
+ try writer.writeByte('}');
+ },
+ else => unreachable,
}
- if (ty.unionHasAllZeroBitFieldTypes(zcu)) return try writer.writeByte('}');
- if (layout.tag_size != 0) try writer.writeByte(',');
- try writer.writeAll(" .payload = {");
}
- for (0..loaded_union.field_types.len) |field_index| {
- const field_ty = Type.fromInterned(loaded_union.field_types.get(ip)[field_index]);
- if (!field_ty.hasRuntimeBits(zcu)) continue;
- try dg.renderUndefValue(writer, field_ty, initializer_type);
- break;
- }
- if (ty.unionTagTypeSafety(zcu)) |_| try writer.writeByte('}');
- return writer.writeByte('}');
+ if (has_tag) try writer.writeByte('}');
},
.@"packed" => return writer.print("{x}", .{
try dg.fmtIntLiteral(try zcu.undefValue(ty), .Other),
}),
}
},
- .error_union_type => {
- const payload_ty = ty.errorUnionPayload(zcu);
- const error_ty = ty.errorUnionSet(zcu);
-
- if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
- return dg.renderUndefValue(writer, error_ty, location);
- }
-
- if (!location.isInitializer()) {
- try writer.writeByte('(');
- try dg.renderType(writer, ty);
- try writer.writeByte(')');
- }
-
- try writer.writeAll("{ .payload = ");
- try dg.renderUndefValue(writer, payload_ty, initializer_type);
- try writer.writeAll(", .error = ");
- try dg.renderUndefValue(writer, error_ty, initializer_type);
- return writer.writeAll(" }");
+ .error_union_type => |error_union_type| switch (ctype.info(ctype_pool)) {
+ .basic => try dg.renderUndefValue(
+ writer,
+ Type.fromInterned(error_union_type.error_set_type),
+ location,
+ ),
+ .pointer, .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => |aggregate| {
+ if (!location.isInitializer()) {
+ try writer.writeByte('(');
+ try dg.renderCType(writer, ctype);
+ try writer.writeByte(')');
+ }
+ try writer.writeByte('{');
+ for (0..aggregate.fields.len) |field_index| {
+ if (field_index > 0) try writer.writeByte(',');
+ try dg.renderUndefValue(
+ writer,
+ Type.fromInterned(
+ switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
+ .@"error" => error_union_type.error_set_type,
+ .payload => error_union_type.payload_type,
+ else => unreachable,
+ },
+ ),
+ initializer_type,
+ );
+ }
+ try writer.writeByte('}');
+ },
},
.array_type, .vector_type => {
const ai = ty.arrayInfo(zcu);
@@ -1637,7 +1749,7 @@ pub const DeclGen = struct {
} else {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderType(writer, ty);
+ try dg.renderCType(writer, ctype);
try writer.writeByte(')');
}
@@ -1687,6 +1799,7 @@ pub const DeclGen = struct {
name: union(enum) {
export_index: u32,
ident: []const u8,
+ fmt_ctype_pool_string: std.fmt.Formatter(formatCTypePoolString),
},
) !void {
const zcu = dg.zcu;
@@ -1730,6 +1843,7 @@ pub const DeclGen = struct {
try dg.renderDeclName(w, fn_decl_index, export_index);
},
.ident => |ident| try w.print("{}{ }", .{ trailing, fmtIdent(ident) }),
+ .fmt_ctype_pool_string => |fmt| try w.print("{}{ }", .{ trailing, fmt }),
}
try renderTypeSuffix(
@@ -1754,14 +1868,12 @@ pub const DeclGen = struct {
switch (name) {
.export_index => |export_index| mangled: {
const maybe_exports = zcu.decl_exports.get(fn_decl_index);
- const external_name = ip.stringToSlice(
- if (maybe_exports) |exports|
- exports.items[export_index].opts.name
- else if (fn_decl.isExtern(zcu))
- fn_decl.name
- else
- break :mangled,
- );
+ const external_name = (if (maybe_exports) |exports|
+ exports.items[export_index].opts.name
+ else if (fn_decl.isExtern(zcu))
+ fn_decl.name
+ else
+ break :mangled).toSlice(ip);
const is_mangled = isMangledIdent(external_name, true);
const is_export = export_index > 0;
if (is_mangled and is_export) {
@@ -1769,7 +1881,7 @@ pub const DeclGen = struct {
fmtIdent(external_name),
fmtStringLiteral(external_name, null),
fmtStringLiteral(
- ip.stringToSlice(maybe_exports.?.items[0].opts.name),
+ maybe_exports.?.items[0].opts.name.toSlice(ip),
null,
),
});
@@ -1780,14 +1892,14 @@ pub const DeclGen = struct {
} else if (is_export) {
try w.print(" zig_export({s}, {s})", .{
fmtStringLiteral(
- ip.stringToSlice(maybe_exports.?.items[0].opts.name),
+ maybe_exports.?.items[0].opts.name.toSlice(ip),
null,
),
fmtStringLiteral(external_name, null),
});
}
},
- .ident => {},
+ .ident, .fmt_ctype_pool_string => {},
}
},
.complete => {},
@@ -1815,11 +1927,11 @@ pub const DeclGen = struct {
/// | `renderTypeAndName` | "uint8_t *name" | "uint8_t *name[10]" |
/// | `renderType` | "uint8_t *" | "uint8_t *[10]" |
///
- fn renderType(dg: *DeclGen, w: anytype, t: Type) error{ OutOfMemory, AnalysisFail }!void {
+ fn renderType(dg: *DeclGen, w: anytype, t: Type) error{OutOfMemory}!void {
try dg.renderCType(w, try dg.ctypeFromType(t, .complete));
}
- fn renderCType(dg: *DeclGen, w: anytype, ctype: CType) error{ OutOfMemory, AnalysisFail }!void {
+ fn renderCType(dg: *DeclGen, w: anytype, ctype: CType) error{OutOfMemory}!void {
_ = try renderTypePrefix(dg.pass, &dg.ctype_pool, dg.zcu, w, ctype, .suffix, .{});
try renderTypeSuffix(dg.pass, &dg.ctype_pool, dg.zcu, w, ctype, .suffix, .{});
}
@@ -1844,7 +1956,26 @@ pub const DeclGen = struct {
}
}
};
+ fn intCastIsNoop(dg: *DeclGen, dest_ty: Type, src_ty: Type) bool {
+ const zcu = dg.zcu;
+ const dest_bits = dest_ty.bitSize(zcu);
+ const dest_int_info = dest_ty.intInfo(zcu);
+
+ const src_is_ptr = src_ty.isPtrAtRuntime(zcu);
+ const src_eff_ty: Type = if (src_is_ptr) switch (dest_int_info.signedness) {
+ .unsigned => Type.usize,
+ .signed => Type.isize,
+ } else src_ty;
+ const src_bits = src_eff_ty.bitSize(zcu);
+ const src_int_info = if (src_eff_ty.isAbiInt(zcu)) src_eff_ty.intInfo(zcu) else null;
+ if (dest_bits <= 64 and src_bits <= 64) {
+ const needs_cast = src_int_info == null or
+ (toCIntBits(dest_int_info.bits) != toCIntBits(src_int_info.?.bits) or
+ dest_int_info.signedness != src_int_info.?.signedness);
+ return !needs_cast and !src_is_ptr;
+ } else return false;
+ }
/// Renders a cast to an int type, from either an int or a pointer.
///
/// Some platforms don't have 128 bit integers, so we need to use
@@ -1858,7 +1989,14 @@ pub const DeclGen = struct {
/// | > 64 bit integer | pointer | zig_make_<dest_ty>(0, (zig_<u|i>size)src)
/// | > 64 bit integer | < 64 bit integer | zig_make_<dest_ty>(0, src)
/// | > 64 bit integer | > 64 bit integer | zig_make_<dest_ty>(zig_hi_<src_ty>(src), zig_lo_<src_ty>(src))
- fn renderIntCast(dg: *DeclGen, w: anytype, dest_ty: Type, context: IntCastContext, src_ty: Type, location: ValueRenderLocation) !void {
+ fn renderIntCast(
+ dg: *DeclGen,
+ w: anytype,
+ dest_ty: Type,
+ context: IntCastContext,
+ src_ty: Type,
+ location: ValueRenderLocation,
+ ) !void {
const zcu = dg.zcu;
const dest_bits = dest_ty.bitSize(zcu);
const dest_int_info = dest_ty.intInfo(zcu);
@@ -2013,12 +2151,23 @@ pub const DeclGen = struct {
fmtIdent("payload"),
fmtIdent(ident),
}),
+ .ctype_pool_string => |string| try w.print("{ }", .{
+ fmtCTypePoolString(string, &dg.ctype_pool),
+ }),
}
}
fn writeCValueDeref(dg: *DeclGen, w: anytype, c_value: CValue) !void {
switch (c_value) {
- .none, .new_local, .local, .local_ref, .constant, .arg, .arg_array => unreachable,
+ .none,
+ .new_local,
+ .local,
+ .local_ref,
+ .constant,
+ .arg,
+ .arg_array,
+ .ctype_pool_string,
+ => unreachable,
.field => |i| try w.print("f{d}", .{i}),
.decl => |decl| {
try w.writeAll("(*");
@@ -2048,7 +2197,17 @@ pub const DeclGen = struct {
fn writeCValueDerefMember(dg: *DeclGen, writer: anytype, c_value: CValue, member: CValue) !void {
switch (c_value) {
- .none, .new_local, .local, .local_ref, .constant, .field, .undef, .arg, .arg_array => unreachable,
+ .none,
+ .new_local,
+ .local,
+ .local_ref,
+ .constant,
+ .field,
+ .undef,
+ .arg,
+ .arg_array,
+ .ctype_pool_string,
+ => unreachable,
.decl, .identifier, .payload_identifier => {
try dg.writeCValue(writer, c_value);
try writer.writeAll("->");
@@ -2088,12 +2247,12 @@ pub const DeclGen = struct {
.complete,
);
mangled: {
- const external_name = zcu.intern_pool.stringToSlice(if (maybe_exports) |exports|
+ const external_name = (if (maybe_exports) |exports|
exports.items[0].opts.name
else if (variable.is_extern)
decl.name
else
- break :mangled);
+ break :mangled).toSlice(&zcu.intern_pool);
if (isMangledIdent(external_name, true)) {
try fwd.print(" zig_mangled_{s}({ }, {s})", .{
@tagName(fwd_kind),
@@ -2107,15 +2266,16 @@ pub const DeclGen = struct {
fn renderDeclName(dg: *DeclGen, writer: anytype, decl_index: InternPool.DeclIndex, export_index: u32) !void {
const zcu = dg.zcu;
+ const ip = &zcu.intern_pool;
const decl = zcu.declPtr(decl_index);
if (zcu.decl_exports.get(decl_index)) |exports| {
try writer.print("{ }", .{
- fmtIdent(zcu.intern_pool.stringToSlice(exports.items[export_index].opts.name)),
+ fmtIdent(exports.items[export_index].opts.name.toSlice(ip)),
});
} else if (decl.getExternDecl(zcu).unwrap()) |extern_decl_index| {
try writer.print("{ }", .{
- fmtIdent(zcu.intern_pool.stringToSlice(zcu.declPtr(extern_decl_index).name)),
+ fmtIdent(zcu.declPtr(extern_decl_index).name.toSlice(ip)),
});
} else {
// MSVC has a limit of 4095 character token length limit, and fmtIdent can (worst case),
@@ -2186,11 +2346,7 @@ pub const DeclGen = struct {
loc: ValueRenderLocation,
) !std.fmt.Formatter(formatIntLiteral) {
const zcu = dg.zcu;
- const kind: CType.Kind = switch (loc) {
- .FunctionArgument => .parameter,
- .Initializer, .Other => .complete,
- .StaticInitializer => .global,
- };
+ const kind = loc.toCTypeKind();
const ty = val.typeOf(zcu);
return std.fmt.Formatter(formatIntLiteral){ .data = .{
.dg = dg,
@@ -2239,7 +2395,7 @@ fn renderFwdDeclTypeName(
switch (fwd_decl.name) {
.anon => try w.print("anon__lazy_{d}", .{@intFromEnum(ctype.index)}),
.owner_decl => |owner_decl| try w.print("{}__{d}", .{
- fmtIdent(zcu.intern_pool.stringToSlice(zcu.declPtr(owner_decl).name)),
+ fmtIdent(zcu.declPtr(owner_decl).name.toSlice(&zcu.intern_pool)),
@intFromEnum(owner_decl),
}),
}
@@ -2453,7 +2609,7 @@ fn renderFields(
.suffix,
.{},
);
- try writer.print("{}{ }", .{ trailing, fmtIdent(field_info.name.slice(ctype_pool)) });
+ try writer.print("{}{ }", .{ trailing, fmtCTypePoolString(field_info.name, ctype_pool) });
try renderTypeSuffix(.flush, ctype_pool, zcu, writer, field_info.ctype, .suffix, .{});
try writer.writeAll(";\n");
}
@@ -2561,7 +2717,7 @@ pub fn genErrDecls(o: *Object) !void {
try writer.writeAll("enum {\n");
o.indent_writer.pushIndent();
for (zcu.global_error_set.keys()[1..], 1..) |name_nts, value| {
- const name = ip.stringToSlice(name_nts);
+ const name = name_nts.toSlice(ip);
max_name_len = @max(name.len, max_name_len);
const err_val = try zcu.intern(.{ .err = .{
.ty = .anyerror_type,
@@ -2579,19 +2735,19 @@ pub fn genErrDecls(o: *Object) !void {
defer o.dg.gpa.free(name_buf);
@memcpy(name_buf[0..name_prefix.len], name_prefix);
- for (zcu.global_error_set.keys()) |name_ip| {
- const name = ip.stringToSlice(name_ip);
- @memcpy(name_buf[name_prefix.len..][0..name.len], name);
- const identifier = name_buf[0 .. name_prefix.len + name.len];
+ for (zcu.global_error_set.keys()) |name| {
+ const name_slice = name.toSlice(ip);
+ @memcpy(name_buf[name_prefix.len..][0..name_slice.len], name_slice);
+ const identifier = name_buf[0 .. name_prefix.len + name_slice.len];
const name_ty = try zcu.arrayType(.{
- .len = name.len,
+ .len = name_slice.len,
.child = .u8_type,
.sentinel = .zero_u8,
});
const name_val = try zcu.intern(.{ .aggregate = .{
.ty = name_ty.toIntern(),
- .storage = .{ .bytes = name },
+ .storage = .{ .bytes = name.toString() },
} });
try writer.writeAll("static ");
@@ -2624,7 +2780,7 @@ pub fn genErrDecls(o: *Object) !void {
);
try writer.writeAll(" = {");
for (zcu.global_error_set.keys(), 0..) |name_nts, value| {
- const name = ip.stringToSlice(name_nts);
+ const name = name_nts.toSlice(ip);
if (value != 0) try writer.writeByte(',');
try writer.print("{{" ++ name_prefix ++ "{}, {}}}", .{
fmtIdent(name),
@@ -2672,7 +2828,7 @@ fn genExports(o: *Object) !void {
for (exports.items[1..]) |@"export"| {
try fwd.writeAll("zig_extern ");
if (@"export".opts.linkage == .weak) try fwd.writeAll("zig_weak_linkage ");
- const export_name = ip.stringToSlice(@"export".opts.name);
+ const export_name = @"export".opts.name.toSlice(ip);
try o.dg.renderTypeAndName(
fwd,
decl.typeOf(zcu),
@@ -2685,11 +2841,11 @@ fn genExports(o: *Object) !void {
try fwd.print(" zig_mangled_export({ }, {s}, {s})", .{
fmtIdent(export_name),
fmtStringLiteral(export_name, null),
- fmtStringLiteral(ip.stringToSlice(exports.items[0].opts.name), null),
+ fmtStringLiteral(exports.items[0].opts.name.toSlice(ip), null),
});
} else {
try fwd.print(" zig_export({s}, {s})", .{
- fmtStringLiteral(ip.stringToSlice(exports.items[0].opts.name), null),
+ fmtStringLiteral(exports.items[0].opts.name.toSlice(ip), null),
fmtStringLiteral(export_name, null),
});
}
@@ -2712,24 +2868,23 @@ pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFn
try w.writeAll("static ");
try o.dg.renderType(w, name_slice_ty);
- try w.writeByte(' ');
- try w.writeAll(val.fn_name.slice(lazy_ctype_pool));
- try w.writeByte('(');
+ try w.print(" {}(", .{val.fn_name.fmt(lazy_ctype_pool)});
try o.dg.renderTypeAndName(w, enum_ty, .{ .identifier = "tag" }, Const, .none, .complete);
try w.writeAll(") {\n switch (tag) {\n");
const tag_names = enum_ty.enumFields(zcu);
for (0..tag_names.len) |tag_index| {
- const tag_name = ip.stringToSlice(tag_names.get(ip)[tag_index]);
+ const tag_name = tag_names.get(ip)[tag_index];
+ const tag_name_len = tag_name.length(ip);
const tag_val = try zcu.enumValueFieldIndex(enum_ty, @intCast(tag_index));
const name_ty = try zcu.arrayType(.{
- .len = tag_name.len,
+ .len = tag_name_len,
.child = .u8_type,
.sentinel = .zero_u8,
});
const name_val = try zcu.intern(.{ .aggregate = .{
.ty = name_ty.toIntern(),
- .storage = .{ .bytes = tag_name },
+ .storage = .{ .bytes = tag_name.toString() },
} });
try w.print(" case {}: {{\n static ", .{
@@ -2742,7 +2897,7 @@ pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFn
try o.dg.renderType(w, name_slice_ty);
try w.print("){{{}, {}}};\n", .{
fmtIdent("name"),
- try o.dg.fmtIntLiteral(try zcu.intValue(Type.usize, tag_name.len), .Other),
+ try o.dg.fmtIntLiteral(try zcu.intValue(Type.usize, tag_name_len), .Other),
});
try w.writeAll(" }\n");
@@ -2757,21 +2912,18 @@ pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFn
const fn_decl = zcu.declPtr(fn_decl_index);
const fn_ctype = try o.dg.ctypeFromType(fn_decl.typeOf(zcu), .complete);
const fn_info = fn_ctype.info(ctype_pool).function;
- const fn_name = val.fn_name.slice(lazy_ctype_pool);
+ const fn_name = fmtCTypePoolString(val.fn_name, lazy_ctype_pool);
const fwd_decl_writer = o.dg.fwdDeclWriter();
try fwd_decl_writer.print("static zig_{s} ", .{@tagName(key)});
- try o.dg.renderFunctionSignature(
- fwd_decl_writer,
- fn_decl_index,
- .forward,
- .{ .ident = fn_name },
- );
+ try o.dg.renderFunctionSignature(fwd_decl_writer, fn_decl_index, .forward, .{
+ .fmt_ctype_pool_string = fn_name,
+ });
try fwd_decl_writer.writeAll(";\n");
try w.print("static zig_{s} ", .{@tagName(key)});
try o.dg.renderFunctionSignature(w, fn_decl_index, .complete, .{
- .ident = fn_name,
+ .fmt_ctype_pool_string = fn_name,
});
try w.writeAll(" {\n return ");
try o.dg.renderDeclName(w, fn_decl_index, 0);
@@ -2810,7 +2962,7 @@ pub fn genFunc(f: *Function) !void {
try o.indent_writer.insertNewline();
if (!is_global) try o.writer().writeAll("static ");
- if (zcu.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |s|
+ if (decl.@"linksection".toSlice(&zcu.intern_pool)) |s|
try o.writer().print("zig_linksection_fn({s}) ", .{fmtStringLiteral(s, null)});
try o.dg.renderFunctionSignature(o.writer(), decl_index, .complete, .{ .export_index = 0 });
try o.writer().writeByte(' ');
@@ -2900,7 +3052,7 @@ pub fn genDecl(o: *Object) !void {
if (!is_global) try w.writeAll("static ");
if (variable.is_weak_linkage) try w.writeAll("zig_weak_linkage ");
if (variable.is_threadlocal and !o.dg.mod.single_threaded) try w.writeAll("zig_threadlocal ");
- if (zcu.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |s|
+ if (decl.@"linksection".toSlice(&zcu.intern_pool)) |s|
try w.print("zig_linksection({s}) ", .{fmtStringLiteral(s, null)});
const decl_c_value = .{ .decl = decl_index };
try o.dg.renderTypeAndName(w, decl_ty, decl_c_value, .{}, decl.alignment, .complete);
@@ -2933,7 +3085,7 @@ pub fn genDeclValue(
switch (o.dg.pass) {
.decl => |decl_index| {
if (zcu.decl_exports.get(decl_index)) |exports| {
- const export_name = zcu.intern_pool.stringToSlice(exports.items[0].opts.name);
+ const export_name = exports.items[0].opts.name.toSlice(&zcu.intern_pool);
if (isMangledIdent(export_name, true)) {
try fwd_decl_writer.print(" zig_mangled_final({ }, {s})", .{
fmtIdent(export_name), fmtStringLiteral(export_name, null),
@@ -2949,7 +3101,7 @@ pub fn genDeclValue(
const w = o.writer();
if (!is_global) try w.writeAll("static ");
- if (zcu.intern_pool.stringToSliceUnwrap(@"linksection")) |s|
+ if (@"linksection".toSlice(&zcu.intern_pool)) |s|
try w.print("zig_linksection({s}) ", .{fmtStringLiteral(s, null)});
try o.dg.renderTypeAndName(w, ty, decl_c_value, Const, alignment, .complete);
try w.writeAll(" = ");
@@ -3156,8 +3308,8 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
.shl_exact => try airBinOp(f, inst, "<<", "shl", .none),
.not => try airNot (f, inst),
- .optional_payload => try airOptionalPayload(f, inst),
- .optional_payload_ptr => try airOptionalPayloadPtr(f, inst),
+ .optional_payload => try airOptionalPayload(f, inst, false),
+ .optional_payload_ptr => try airOptionalPayload(f, inst, true),
.optional_payload_ptr_set => try airOptionalPayloadPtrSet(f, inst),
.wrap_optional => try airWrapOptional(f, inst),
@@ -3166,10 +3318,10 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
.is_err_ptr => try airIsErr(f, inst, true, "!="),
.is_non_err_ptr => try airIsErr(f, inst, true, "=="),
- .is_null => try airIsNull(f, inst, "==", false),
- .is_non_null => try airIsNull(f, inst, "!=", false),
- .is_null_ptr => try airIsNull(f, inst, "==", true),
- .is_non_null_ptr => try airIsNull(f, inst, "!=", true),
+ .is_null => try airIsNull(f, inst, .eq, false),
+ .is_non_null => try airIsNull(f, inst, .neq, false),
+ .is_null_ptr => try airIsNull(f, inst, .eq, true),
+ .is_non_null_ptr => try airIsNull(f, inst, .neq, true),
.alloc => try airAlloc(f, inst),
.ret_ptr => try airRetPtr(f, inst),
@@ -3252,8 +3404,8 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
.slice_ptr => try airSliceField(f, inst, false, "ptr"),
.slice_len => try airSliceField(f, inst, false, "len"),
- .ptr_slice_len_ptr => try airSliceField(f, inst, true, "len"),
.ptr_slice_ptr_ptr => try airSliceField(f, inst, true, "ptr"),
+ .ptr_slice_len_ptr => try airSliceField(f, inst, true, "len"),
.ptr_elem_val => try airPtrElemVal(f, inst),
.ptr_elem_ptr => try airPtrElemPtr(f, inst),
@@ -3321,7 +3473,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
}
try f.value_map.putNoClobber(inst.toRef(), switch (result_value) {
.none => continue,
- .new_local => |i| .{ .local = i },
+ .new_local => |local_index| .{ .local = local_index },
else => result_value,
});
}
@@ -3336,7 +3488,7 @@ fn airSliceField(f: *Function, inst: Air.Inst.Index, is_ptr: bool, field_name: [
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
if (is_ptr) {
@@ -3362,7 +3514,7 @@ fn airPtrElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try f.writeCValue(writer, ptr, .Other);
@@ -3388,7 +3540,7 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try writer.writeByte('(');
@@ -3423,7 +3575,7 @@ fn airSliceElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try f.writeCValueMember(writer, slice, .{ .identifier = "ptr" });
@@ -3450,7 +3602,7 @@ fn airSliceElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
if (elem_has_bits) try writer.writeByte('&');
@@ -3479,7 +3631,7 @@ fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try f.writeCValue(writer, array, .Other);
@@ -3704,17 +3856,18 @@ fn airIntCast(f: *Function, inst: Air.Inst.Index) !CValue {
const operand_ty = f.typeOf(ty_op.operand);
const scalar_ty = operand_ty.scalarType(zcu);
+ if (f.object.dg.intCastIsNoop(inst_scalar_ty, scalar_ty)) return f.moveCValue(inst, inst_ty, operand);
+
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
const v = try Vectorize.start(f, inst, writer, operand_ty);
- const a = try Assignment.start(f, writer, scalar_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(scalar_ty, .complete));
try f.writeCValue(writer, local, .Other);
try v.elem(f, writer);
try a.assign(f, writer);
try f.renderIntCast(writer, inst_scalar_ty, operand, v, scalar_ty, .Other);
try a.end(f, writer);
try v.end(f, inst, writer);
-
return local;
}
@@ -3724,38 +3877,40 @@ fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
+
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(zcu);
const dest_int_info = inst_scalar_ty.intInfo(zcu);
const dest_bits = dest_int_info.bits;
- const dest_c_bits = toCIntBits(dest_int_info.bits) orelse
+ const dest_c_bits = toCIntBits(dest_bits) orelse
return f.fail("TODO: C backend: implement integer types larger than 128 bits", .{});
const operand_ty = f.typeOf(ty_op.operand);
const scalar_ty = operand_ty.scalarType(zcu);
const scalar_int_info = scalar_ty.intInfo(zcu);
+ const need_cast = dest_c_bits < 64;
+ const need_lo = scalar_int_info.bits > 64 and dest_bits <= 64;
+ const need_mask = dest_bits < 8 or !std.math.isPowerOfTwo(dest_bits);
+ if (!need_cast and !need_lo and !need_mask) return f.moveCValue(inst, inst_ty, operand);
+
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
const v = try Vectorize.start(f, inst, writer, operand_ty);
-
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_scalar_ty, .complete));
try f.writeCValue(writer, local, .Other);
try v.elem(f, writer);
- try writer.writeAll(" = ");
-
- if (dest_c_bits < 64) {
+ try a.assign(f, writer);
+ if (need_cast) {
try writer.writeByte('(');
try f.renderType(writer, inst_scalar_ty);
try writer.writeByte(')');
}
-
- const needs_lo = scalar_int_info.bits > 64 and dest_bits <= 64;
- if (needs_lo) {
+ if (need_lo) {
try writer.writeAll("zig_lo_");
try f.object.dg.renderTypeForBuiltinFnName(writer, scalar_ty);
try writer.writeByte('(');
}
-
- if (dest_bits >= 8 and std.math.isPowerOfTwo(dest_bits)) {
+ if (!need_mask) {
try f.writeCValue(writer, operand, .Other);
try v.elem(f, writer);
} else switch (dest_int_info.signedness) {
@@ -3795,11 +3950,9 @@ fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.print(", {})", .{try f.fmtIntLiteral(shift_val)});
},
}
-
- if (needs_lo) try writer.writeByte(')');
- try writer.writeAll(";\n");
+ if (need_lo) try writer.writeByte(')');
+ try a.end(f, writer);
try v.end(f, inst, writer);
-
return local;
}
@@ -3810,7 +3963,7 @@ fn airIntFromBool(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const inst_ty = f.typeOfIndex(inst);
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try f.writeCValue(writer, operand, .Other);
@@ -3855,9 +4008,8 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
const src_val = try f.resolveInst(bin_op.rhs);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
+ const src_scalar_ctype = try f.ctypeFromType(src_ty.scalarType(zcu), .complete);
const writer = f.object.writer();
- const v = try Vectorize.start(f, inst, writer, ptr_ty);
-
if (need_memcpy) {
// For this memcpy to safely work we need the rhs to have the same
// underlying type as the lhs (i.e. they must both be arrays of the same underlying type).
@@ -3876,6 +4028,7 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
break :blk new_local;
} else src_val;
+ const v = try Vectorize.start(f, inst, writer, ptr_ty);
try writer.writeAll("memcpy((char *)");
try f.writeCValue(writer, ptr_val, .FunctionArgument);
try v.elem(f, writer);
@@ -3886,9 +4039,9 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
try writer.writeAll(", sizeof(");
try f.renderType(writer, src_ty);
try writer.writeAll("))");
- if (src_val == .constant) {
- try freeLocal(f, inst, array_src.new_local, null);
- }
+ try f.freeCValue(inst, array_src);
+ try writer.writeAll(";\n");
+ try v.end(f, inst, writer);
} else if (ptr_info.packed_offset.host_size > 0 and ptr_info.flags.vector_index == .none) {
const host_bits = ptr_info.packed_offset.host_size * 8;
const host_ty = try zcu.intType(.unsigned, host_bits);
@@ -3911,9 +4064,12 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
const mask_val = try zcu.intValue_big(host_ty, mask.toConst());
+ const v = try Vectorize.start(f, inst, writer, ptr_ty);
+ const a = try Assignment.start(f, writer, src_scalar_ctype);
try f.writeCValueDeref(writer, ptr_val);
try v.elem(f, writer);
- try writer.writeAll(" = zig_or_");
+ try a.assign(f, writer);
+ try writer.writeAll("zig_or_");
try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty);
try writer.writeAll("(zig_and_");
try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty);
@@ -3944,16 +4100,27 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
try v.elem(f, writer);
if (cant_cast) try writer.writeByte(')');
try writer.print(", {}))", .{try f.fmtIntLiteral(bit_offset_val)});
+ try a.end(f, writer);
+ try v.end(f, inst, writer);
} else {
+ switch (ptr_val) {
+ .local_ref => |ptr_local_index| switch (src_val) {
+ .new_local, .local => |src_local_index| if (ptr_local_index == src_local_index)
+ return .none,
+ else => {},
+ },
+ else => {},
+ }
+ const v = try Vectorize.start(f, inst, writer, ptr_ty);
+ const a = try Assignment.start(f, writer, src_scalar_ctype);
try f.writeCValueDeref(writer, ptr_val);
try v.elem(f, writer);
- try writer.writeAll(" = ");
+ try a.assign(f, writer);
try f.writeCValue(writer, src_val, .Other);
try v.elem(f, writer);
+ try a.end(f, writer);
+ try v.end(f, inst, writer);
}
- try writer.writeAll(";\n");
- try v.end(f, inst, writer);
-
return .none;
}
@@ -4116,6 +4283,7 @@ fn airEquality(
operator: std.math.CompareOperator,
) !CValue {
const zcu = f.object.dg.zcu;
+ const ctype_pool = &f.object.dg.ctype_pool;
const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
const operand_ty = f.typeOf(bin_op.lhs);
@@ -4137,28 +4305,47 @@ fn airEquality(
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const writer = f.object.writer();
- const inst_ty = f.typeOfIndex(inst);
- const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const local = try f.allocLocal(inst, Type.bool);
+ const a = try Assignment.start(f, writer, CType.bool);
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
- if (operand_ty.zigTypeTag(zcu) == .Optional and !operand_ty.optionalReprIsPayload(zcu)) {
- try f.writeCValueMember(writer, lhs, .{ .identifier = "is_null" });
- try writer.writeAll(" || ");
- try f.writeCValueMember(writer, rhs, .{ .identifier = "is_null" });
- try writer.writeAll(" ? ");
- try f.writeCValueMember(writer, lhs, .{ .identifier = "is_null" });
- try writer.writeAll(compareOperatorC(operator));
- try f.writeCValueMember(writer, rhs, .{ .identifier = "is_null" });
- try writer.writeAll(" : ");
- try f.writeCValueMember(writer, lhs, .{ .identifier = "payload" });
- try writer.writeAll(compareOperatorC(operator));
- try f.writeCValueMember(writer, rhs, .{ .identifier = "payload" });
- } else {
- try f.writeCValue(writer, lhs, .Other);
- try writer.writeAll(compareOperatorC(operator));
- try f.writeCValue(writer, rhs, .Other);
+ const operand_ctype = try f.ctypeFromType(operand_ty, .complete);
+ switch (operand_ctype.info(ctype_pool)) {
+ .basic, .pointer => {
+ try f.writeCValue(writer, lhs, .Other);
+ try writer.writeAll(compareOperatorC(operator));
+ try f.writeCValue(writer, rhs, .Other);
+ },
+ .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => |aggregate| if (aggregate.fields.len == 2 and
+ (aggregate.fields.at(0, ctype_pool).name.index == .is_null or
+ aggregate.fields.at(1, ctype_pool).name.index == .is_null))
+ {
+ try f.writeCValueMember(writer, lhs, .{ .identifier = "is_null" });
+ try writer.writeAll(" || ");
+ try f.writeCValueMember(writer, rhs, .{ .identifier = "is_null" });
+ try writer.writeAll(" ? ");
+ try f.writeCValueMember(writer, lhs, .{ .identifier = "is_null" });
+ try writer.writeAll(compareOperatorC(operator));
+ try f.writeCValueMember(writer, rhs, .{ .identifier = "is_null" });
+ try writer.writeAll(" : ");
+ try f.writeCValueMember(writer, lhs, .{ .identifier = "payload" });
+ try writer.writeAll(compareOperatorC(operator));
+ try f.writeCValueMember(writer, rhs, .{ .identifier = "payload" });
+ } else for (0..aggregate.fields.len) |field_index| {
+ if (field_index > 0) try writer.writeAll(switch (operator) {
+ .lt, .lte, .gte, .gt => unreachable,
+ .eq => " && ",
+ .neq => " || ",
+ });
+ const field_name: CValue = .{
+ .ctype_pool_string = aggregate.fields.at(field_index, ctype_pool).name,
+ };
+ try f.writeCValueMember(writer, lhs, field_name);
+ try writer.writeAll(compareOperatorC(operator));
+ try f.writeCValueMember(writer, rhs, field_name);
+ },
}
try a.end(f, writer);
@@ -4168,12 +4355,11 @@ fn airEquality(
fn airCmpLtErrorsLen(f: *Function, inst: Air.Inst.Index) !CValue {
const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
- const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
const writer = f.object.writer();
- const local = try f.allocLocal(inst, inst_ty);
+ const local = try f.allocLocal(inst, Type.bool);
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = ");
try f.writeCValue(writer, operand, .Other);
@@ -4193,39 +4379,34 @@ fn airPtrAddSub(f: *Function, inst: Air.Inst.Index, operator: u8) !CValue {
const inst_ty = f.typeOfIndex(inst);
const inst_scalar_ty = inst_ty.scalarType(zcu);
const elem_ty = inst_scalar_ty.elemType2(zcu);
+ if (!elem_ty.hasRuntimeBitsIgnoreComptime(zcu)) return f.moveCValue(inst, inst_ty, lhs);
+ const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete);
const local = try f.allocLocal(inst, inst_ty);
const writer = f.object.writer();
const v = try Vectorize.start(f, inst, writer, inst_ty);
+ const a = try Assignment.start(f, writer, inst_scalar_ctype);
try f.writeCValue(writer, local, .Other);
try v.elem(f, writer);
- try writer.writeAll(" = ");
-
- if (elem_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
- // We must convert to and from integer types to prevent UB if the operation
- // results in a NULL pointer, or if LHS is NULL. The operation is only UB
- // if the result is NULL and then dereferenced.
- try writer.writeByte('(');
- try f.renderType(writer, inst_scalar_ty);
- try writer.writeAll(")(((uintptr_t)");
- try f.writeCValue(writer, lhs, .Other);
- try v.elem(f, writer);
- try writer.writeAll(") ");
- try writer.writeByte(operator);
- try writer.writeAll(" (");
- try f.writeCValue(writer, rhs, .Other);
- try v.elem(f, writer);
- try writer.writeAll("*sizeof(");
- try f.renderType(writer, elem_ty);
- try writer.writeAll(")))");
- } else {
- try f.writeCValue(writer, lhs, .Other);
- try v.elem(f, writer);
- }
-
- try writer.writeAll(";\n");
+ try a.assign(f, writer);
+ // We must convert to and from integer types to prevent UB if the operation
+ // results in a NULL pointer, or if LHS is NULL. The operation is only UB
+ // if the result is NULL and then dereferenced.
+ try writer.writeByte('(');
+ try f.renderCType(writer, inst_scalar_ctype);
+ try writer.writeAll(")(((uintptr_t)");
+ try f.writeCValue(writer, lhs, .Other);
+ try v.elem(f, writer);
+ try writer.writeAll(") ");
+ try writer.writeByte(operator);
+ try writer.writeAll(" (");
+ try f.writeCValue(writer, rhs, .Other);
+ try v.elem(f, writer);
+ try writer.writeAll("*sizeof(");
+ try f.renderType(writer, elem_ty);
+ try writer.writeAll(")))");
+ try a.end(f, writer);
try v.end(f, inst, writer);
-
return local;
}
@@ -4286,14 +4467,14 @@ fn airSlice(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
{
- const a = try Assignment.start(f, writer, ptr_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(ptr_ty, .complete));
try f.writeCValueMember(writer, local, .{ .identifier = "ptr" });
try a.assign(f, writer);
try f.writeCValue(writer, ptr, .Other);
try a.end(f, writer);
}
{
- const a = try Assignment.start(f, writer, Type.usize);
+ const a = try Assignment.start(f, writer, CType.usize);
try f.writeCValueMember(writer, local, .{ .identifier = "len" });
try a.assign(f, writer);
try f.writeCValue(writer, len, .Initializer);
@@ -4360,7 +4541,7 @@ fn airCall(
}).?;
const ret_ty = Type.fromInterned(fn_info.return_type);
const ret_ctype: CType = if (ret_ty.isNoReturn(zcu))
- .{ .index = .void }
+ CType.void
else
try f.ctypeFromType(ret_ty, .parameter);
@@ -4372,7 +4553,7 @@ fn airCall(
break :result .none;
} else if (f.liveness.isUnused(inst)) {
try writer.writeByte('(');
- try f.renderCType(writer, .{ .index = .void });
+ try f.renderCType(writer, CType.void);
try writer.writeByte(')');
break :result .none;
} else {
@@ -4427,10 +4608,7 @@ fn airCall(
if (need_comma) try writer.writeAll(", ");
need_comma = true;
try f.writeCValue(writer, resolved_arg, .FunctionArgument);
- switch (resolved_arg) {
- .new_local => |local| try freeLocal(f, inst, local, null),
- else => {},
- }
+ try f.freeCValue(inst, resolved_arg);
}
try writer.writeAll(");\n");
@@ -4614,7 +4792,7 @@ fn lowerTry(
}
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
if (is_ptr) {
@@ -4637,7 +4815,7 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(branch.operand);
try reap(f, inst, &.{branch.operand});
- const a = try Assignment.start(f, writer, operand_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(operand_ty, .complete));
try f.writeCValue(writer, result, .Other);
try a.assign(f, writer);
try f.writeCValue(writer, operand, .Other);
@@ -4650,53 +4828,17 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !CValue {
fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
- const dest_ty = f.typeOfIndex(inst);
+ const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(ty_op.operand);
const operand_ty = f.typeOf(ty_op.operand);
- const bitcasted = try bitcast(f, dest_ty, operand, operand_ty);
+ const bitcasted = try bitcast(f, inst_ty, operand, operand_ty);
try reap(f, inst, &.{ty_op.operand});
- return bitcasted.move(f, inst, dest_ty);
+ return f.moveCValue(inst, inst_ty, bitcasted);
}
-const LocalResult = struct {
- c_value: CValue,
- need_free: bool,
-
- fn move(lr: LocalResult, f: *Function, inst: Air.Inst.Index, dest_ty: Type) !CValue {
- const zcu = f.object.dg.zcu;
-
- if (lr.need_free) {
- // Move the freshly allocated local to be owned by this instruction,
- // by returning it here instead of freeing it.
- return lr.c_value;
- }
-
- const local = try f.allocLocal(inst, dest_ty);
- try lr.free(f);
- const writer = f.object.writer();
- try f.writeCValue(writer, local, .Other);
- if (dest_ty.isAbiInt(zcu)) {
- try writer.writeAll(" = ");
- } else {
- try writer.writeAll(" = (");
- try f.renderType(writer, dest_ty);
- try writer.writeByte(')');
- }
- try f.writeCValue(writer, lr.c_value, .Initializer);
- try writer.writeAll(";\n");
- return local;
- }
-
- fn free(lr: LocalResult, f: *Function) !void {
- if (lr.need_free) {
- try freeLocal(f, null, lr.c_value.new_local, null);
- }
- }
-};
-
-fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !LocalResult {
+fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !CValue {
const zcu = f.object.dg.zcu;
const target = &f.object.dg.mod.resolved_target.result;
const ctype_pool = &f.object.dg.ctype_pool;
@@ -4706,13 +4848,7 @@ fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !Loca
const src_info = dest_ty.intInfo(zcu);
const dest_info = operand_ty.intInfo(zcu);
if (src_info.signedness == dest_info.signedness and
- src_info.bits == dest_info.bits)
- {
- return .{
- .c_value = operand,
- .need_free = false,
- };
- }
+ src_info.bits == dest_info.bits) return operand;
}
if (dest_ty.isPtrAtRuntime(zcu) and operand_ty.isPtrAtRuntime(zcu)) {
@@ -4723,10 +4859,7 @@ fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !Loca
try writer.writeByte(')');
try f.writeCValue(writer, operand, .Other);
try writer.writeAll(";\n");
- return .{
- .c_value = local,
- .need_free = true,
- };
+ return local;
}
const operand_lval = if (operand == .constant) blk: {
@@ -4813,14 +4946,8 @@ fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !Loca
try writer.writeAll(");\n");
}
- if (operand == .constant) {
- try freeLocal(f, null, operand_lval.new_local, null);
- }
-
- return .{
- .c_value = local,
- .need_free = true,
- };
+ try f.freeCValue(null, operand_lval);
+ return local;
}
fn airTrap(f: *Function, writer: anytype) !CValue {
@@ -4931,13 +5058,16 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
try writer.writeAll("switch (");
- if (condition_ty.zigTypeTag(zcu) == .Bool) {
- try writer.writeByte('(');
- try f.renderType(writer, Type.u1);
- try writer.writeByte(')');
- } else if (condition_ty.isPtrAtRuntime(zcu)) {
+
+ const lowered_condition_ty = if (condition_ty.toIntern() == .bool_type)
+ Type.u1
+ else if (condition_ty.isPtrAtRuntime(zcu))
+ Type.usize
+ else
+ condition_ty;
+ if (condition_ty.toIntern() != lowered_condition_ty.toIntern()) {
try writer.writeByte('(');
- try f.renderType(writer, Type.usize);
+ try f.renderType(writer, lowered_condition_ty);
try writer.writeByte(')');
}
try f.writeCValue(writer, condition, .Other);
@@ -4956,18 +5086,24 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
for (0..switch_br.data.cases_len) |case_i| {
const case = f.air.extraData(Air.SwitchBr.Case, extra_index);
const items = @as([]const Air.Inst.Ref, @ptrCast(f.air.extra[case.end..][0..case.data.items_len]));
- const case_body: []const Air.Inst.Index = @ptrCast(f.air.extra[case.end + items.len ..][0..case.data.body_len]);
+ const case_body: []const Air.Inst.Index =
+ @ptrCast(f.air.extra[case.end + items.len ..][0..case.data.body_len]);
extra_index = case.end + case.data.items_len + case_body.len;
for (items) |item| {
try f.object.indent_writer.insertNewline();
try writer.writeAll("case ");
- if (condition_ty.isPtrAtRuntime(zcu)) {
- try writer.writeByte('(');
- try f.renderType(writer, Type.usize);
- try writer.writeByte(')');
+ const item_value = try f.air.value(item, zcu);
+ if (item_value.?.getUnsignedInt(zcu)) |item_int| try writer.print("{}\n", .{
+ try f.fmtIntLiteral(try zcu.intValue(lowered_condition_ty, item_int)),
+ }) else {
+ if (condition_ty.isPtrAtRuntime(zcu)) {
+ try writer.writeByte('(');
+ try f.renderType(writer, Type.usize);
+ try writer.writeByte(')');
+ }
+ try f.object.dg.renderValue(writer, (try f.air.value(item, zcu)).?, .Other);
}
- try f.object.dg.renderValue(writer, (try f.air.value(item, zcu)).?, .Other);
try writer.writeByte(':');
}
try writer.writeByte(' ');
@@ -5289,10 +5425,11 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
fn airIsNull(
f: *Function,
inst: Air.Inst.Index,
- operator: []const u8,
+ operator: std.math.CompareOperator,
is_ptr: bool,
) !CValue {
const zcu = f.object.dg.zcu;
+ const ctype_pool = &f.object.dg.ctype_pool;
const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
const writer = f.object.writer();
@@ -5300,106 +5437,84 @@ fn airIsNull(
try reap(f, inst, &.{un_op});
const local = try f.allocLocal(inst, Type.bool);
+ const a = try Assignment.start(f, writer, CType.bool);
try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = ");
- if (is_ptr) {
- try f.writeCValueDeref(writer, operand);
- } else {
- try f.writeCValue(writer, operand, .Other);
- }
+ try a.assign(f, writer);
const operand_ty = f.typeOf(un_op);
const optional_ty = if (is_ptr) operand_ty.childType(zcu) else operand_ty;
- const payload_ty = optional_ty.optionalChild(zcu);
- const err_int_ty = try zcu.errorIntType();
-
- const rhs = if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu))
- Value.true
- else if (optional_ty.isPtrLikeOptional(zcu))
- // operand is a regular pointer, test `operand !=/== NULL`
- try zcu.getCoerced(Value.null, optional_ty)
- else if (payload_ty.zigTypeTag(zcu) == .ErrorSet)
- try zcu.intValue(err_int_ty, 0)
- else if (payload_ty.isSlice(zcu) and optional_ty.optionalReprIsPayload(zcu)) rhs: {
- try writer.writeAll(".ptr");
- const slice_ptr_ty = payload_ty.slicePtrFieldType(zcu);
- const opt_slice_ptr_ty = try zcu.optionalType(slice_ptr_ty.toIntern());
- break :rhs try zcu.nullValue(opt_slice_ptr_ty);
- } else rhs: {
- try writer.writeAll(".is_null");
- break :rhs Value.true;
+ const opt_ctype = try f.ctypeFromType(optional_ty, .complete);
+ const rhs = switch (opt_ctype.info(ctype_pool)) {
+ .basic, .pointer => rhs: {
+ if (is_ptr)
+ try f.writeCValueDeref(writer, operand)
+ else
+ try f.writeCValue(writer, operand, .Other);
+ break :rhs if (opt_ctype.isBool())
+ "true"
+ else if (opt_ctype.isInteger())
+ "0"
+ else
+ "NULL";
+ },
+ .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) {
+ .is_null, .payload => rhs: {
+ if (is_ptr)
+ try f.writeCValueDerefMember(writer, operand, .{ .identifier = "is_null" })
+ else
+ try f.writeCValueMember(writer, operand, .{ .identifier = "is_null" });
+ break :rhs "true";
+ },
+ .ptr, .len => rhs: {
+ if (is_ptr)
+ try f.writeCValueDerefMember(writer, operand, .{ .identifier = "ptr" })
+ else
+ try f.writeCValueMember(writer, operand, .{ .identifier = "ptr" });
+ break :rhs "NULL";
+ },
+ else => unreachable,
+ },
};
- try writer.writeByte(' ');
- try writer.writeAll(operator);
- try writer.writeByte(' ');
- try f.object.dg.renderValue(writer, rhs, .Other);
- try writer.writeAll(";\n");
- return local;
-}
-
-fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue {
- const zcu = f.object.dg.zcu;
- const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
-
- const operand = try f.resolveInst(ty_op.operand);
- try reap(f, inst, &.{ty_op.operand});
- const opt_ty = f.typeOf(ty_op.operand);
-
- const payload_ty = opt_ty.optionalChild(zcu);
-
- if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
- return .none;
- }
-
- const inst_ty = f.typeOfIndex(inst);
- const writer = f.object.writer();
- const local = try f.allocLocal(inst, inst_ty);
-
- if (opt_ty.optionalReprIsPayload(zcu)) {
- try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = ");
- try f.writeCValue(writer, operand, .Other);
- try writer.writeAll(";\n");
- return local;
- }
-
- const a = try Assignment.start(f, writer, inst_ty);
- try f.writeCValue(writer, local, .Other);
- try a.assign(f, writer);
- try f.writeCValueMember(writer, operand, .{ .identifier = "payload" });
+ try writer.writeAll(compareOperatorC(operator));
+ try writer.writeAll(rhs);
try a.end(f, writer);
return local;
}
-fn airOptionalPayloadPtr(f: *Function, inst: Air.Inst.Index) !CValue {
+fn airOptionalPayload(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
const zcu = f.object.dg.zcu;
+ const ctype_pool = &f.object.dg.ctype_pool;
const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
- const writer = f.object.writer();
- const operand = try f.resolveInst(ty_op.operand);
- try reap(f, inst, &.{ty_op.operand});
- const ptr_ty = f.typeOf(ty_op.operand);
- const opt_ty = ptr_ty.childType(zcu);
const inst_ty = f.typeOfIndex(inst);
+ const operand_ty = f.typeOf(ty_op.operand);
+ const opt_ty = if (is_ptr) operand_ty.childType(zcu) else operand_ty;
+ const opt_ctype = try f.ctypeFromType(opt_ty, .complete);
+ if (opt_ctype.isBool()) return if (is_ptr) .{ .undef = inst_ty } else .none;
- if (!inst_ty.childType(zcu).hasRuntimeBitsIgnoreComptime(zcu)) {
- return .{ .undef = inst_ty };
- }
-
- const local = try f.allocLocal(inst, inst_ty);
- try f.writeCValue(writer, local, .Other);
-
- if (opt_ty.optionalReprIsPayload(zcu)) {
- // the operand is just a regular pointer, no need to do anything special.
- // *?*T -> **T and ?*T -> *T are **T -> **T and *T -> *T in C
- try writer.writeAll(" = ");
- try f.writeCValue(writer, operand, .Other);
- } else {
- try writer.writeAll(" = &");
- try f.writeCValueDerefMember(writer, operand, .{ .identifier = "payload" });
+ const operand = try f.resolveInst(ty_op.operand);
+ switch (opt_ctype.info(ctype_pool)) {
+ .basic, .pointer => return f.moveCValue(inst, inst_ty, operand),
+ .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) {
+ .is_null, .payload => {
+ const writer = f.object.writer();
+ const local = try f.allocLocal(inst, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
+ try f.writeCValue(writer, local, .Other);
+ try a.assign(f, writer);
+ if (is_ptr) {
+ try writer.writeByte('&');
+ try f.writeCValueDerefMember(writer, operand, .{ .identifier = "payload" });
+ } else try f.writeCValueMember(writer, operand, .{ .identifier = "payload" });
+ try a.end(f, writer);
+ return local;
+ },
+ .ptr, .len => return f.moveCValue(inst, inst_ty, operand),
+ else => unreachable,
+ },
}
- try writer.writeAll(";\n");
- return local;
}
fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -5410,38 +5525,46 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
try reap(f, inst, &.{ty_op.operand});
const operand_ty = f.typeOf(ty_op.operand);
- const opt_ty = operand_ty.childType(zcu);
-
const inst_ty = f.typeOfIndex(inst);
-
- if (opt_ty.optionalReprIsPayload(zcu)) {
- if (f.liveness.isUnused(inst)) {
- return .none;
- }
- const local = try f.allocLocal(inst, inst_ty);
- // The payload and the optional are the same value.
- // Setting to non-null will be done when the payload is set.
- try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = ");
- try f.writeCValue(writer, operand, .Other);
- try writer.writeAll(";\n");
- return local;
- } else {
- try f.writeCValueDeref(writer, operand);
- try writer.writeAll(".is_null = ");
- try f.object.dg.renderValue(writer, Value.false, .Initializer);
- try writer.writeAll(";\n");
-
- if (f.liveness.isUnused(inst)) {
+ const opt_ctype = try f.ctypeFromType(operand_ty.childType(zcu), .complete);
+ switch (opt_ctype.info(&f.object.dg.ctype_pool)) {
+ .basic => {
+ const a = try Assignment.start(f, writer, opt_ctype);
+ try f.writeCValueDeref(writer, operand);
+ try a.assign(f, writer);
+ try f.object.dg.renderValue(writer, Value.false, .Initializer);
+ try a.end(f, writer);
return .none;
- }
-
- const local = try f.allocLocal(inst, inst_ty);
- try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = &");
- try f.writeCValueDeref(writer, operand);
- try writer.writeAll(".payload;\n");
- return local;
+ },
+ .pointer => {
+ if (f.liveness.isUnused(inst)) return .none;
+ const local = try f.allocLocal(inst, inst_ty);
+ const a = try Assignment.start(f, writer, opt_ctype);
+ try f.writeCValue(writer, local, .Other);
+ try a.assign(f, writer);
+ try f.writeCValue(writer, operand, .Other);
+ try a.end(f, writer);
+ return local;
+ },
+ .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => {
+ {
+ const a = try Assignment.start(f, writer, opt_ctype);
+ try f.writeCValueDerefMember(writer, operand, .{ .identifier = "is_null" });
+ try a.assign(f, writer);
+ try f.object.dg.renderValue(writer, Value.false, .Initializer);
+ try a.end(f, writer);
+ }
+ if (f.liveness.isUnused(inst)) return .none;
+ const local = try f.allocLocal(inst, inst_ty);
+ const a = try Assignment.start(f, writer, opt_ctype);
+ try f.writeCValue(writer, local, .Other);
+ try a.assign(f, writer);
+ try writer.writeByte('&');
+ try f.writeCValueDerefMember(writer, operand, .{ .identifier = "payload" });
+ try a.end(f, writer);
+ return local;
+ },
}
}
@@ -5453,49 +5576,39 @@ fn fieldLocation(
) union(enum) {
begin: void,
field: CValue,
- byte_offset: u32,
- end: void,
+ byte_offset: u64,
} {
const ip = &zcu.intern_pool;
const container_ty = Type.fromInterned(ip.indexToKey(container_ptr_ty.toIntern()).ptr_type.child);
switch (ip.indexToKey(container_ty.toIntern())) {
.struct_type => {
const loaded_struct = ip.loadStructType(container_ty.toIntern());
- switch (loaded_struct.layout) {
- .auto, .@"extern" => {
- var field_it = loaded_struct.iterateRuntimeOrder(ip);
- var before = true;
- while (field_it.next()) |next_field_index| {
- if (next_field_index == field_index) before = false;
- if (before) continue;
- const field_type = Type.fromInterned(loaded_struct.field_types.get(ip)[next_field_index]);
- if (!field_type.hasRuntimeBitsIgnoreComptime(zcu)) continue;
- return .{ .field = if (loaded_struct.fieldName(ip, next_field_index).unwrap()) |field_name|
- .{ .identifier = ip.stringToSlice(field_name) }
- else
- .{ .field = next_field_index } };
- }
- return if (container_ty.hasRuntimeBitsIgnoreComptime(zcu)) .end else .begin;
- },
- .@"packed" => return if (field_ptr_ty.ptrInfo(zcu).packed_offset.host_size == 0)
+ return switch (loaded_struct.layout) {
+ .auto, .@"extern" => if (!container_ty.hasRuntimeBitsIgnoreComptime(zcu))
+ .begin
+ else if (!field_ptr_ty.childType(zcu).hasRuntimeBitsIgnoreComptime(zcu))
+ .{ .byte_offset = loaded_struct.offsets.get(ip)[field_index] }
+ else
+ .{ .field = if (loaded_struct.fieldName(ip, field_index).unwrap()) |field_name|
+ .{ .identifier = field_name.toSlice(ip) }
+ else
+ .{ .field = field_index } },
+ .@"packed" => if (field_ptr_ty.ptrInfo(zcu).packed_offset.host_size == 0)
.{ .byte_offset = @divExact(zcu.structPackedFieldBitOffset(loaded_struct, field_index) +
container_ptr_ty.ptrInfo(zcu).packed_offset.bit_offset, 8) }
else
.begin,
- }
- },
- .anon_struct_type => |anon_struct_info| {
- for (field_index..anon_struct_info.types.len) |next_field_index| {
- if (anon_struct_info.values.get(ip)[next_field_index] != .none) continue;
- const field_type = Type.fromInterned(anon_struct_info.types.get(ip)[next_field_index]);
- if (!field_type.hasRuntimeBitsIgnoreComptime(zcu)) continue;
- return .{ .field = if (anon_struct_info.fieldName(ip, next_field_index).unwrap()) |field_name|
- .{ .identifier = ip.stringToSlice(field_name) }
- else
- .{ .field = next_field_index } };
- }
- return if (container_ty.hasRuntimeBitsIgnoreComptime(zcu)) .end else .begin;
+ };
},
+ .anon_struct_type => |anon_struct_info| return if (!container_ty.hasRuntimeBitsIgnoreComptime(zcu))
+ .begin
+ else if (!field_ptr_ty.childType(zcu).hasRuntimeBitsIgnoreComptime(zcu))
+ .{ .byte_offset = container_ty.structFieldOffset(field_index, zcu) }
+ else
+ .{ .field = if (anon_struct_info.fieldName(ip, field_index).unwrap()) |field_name|
+ .{ .identifier = field_name.toSlice(ip) }
+ else
+ .{ .field = field_index } },
.union_type => {
const loaded_union = ip.loadUnionType(container_ty.toIntern());
switch (loaded_union.getLayout(ip)) {
@@ -5508,9 +5621,9 @@ fn fieldLocation(
.begin;
const field_name = loaded_union.loadTagType(ip).names.get(ip)[field_index];
return .{ .field = if (loaded_union.hasTag(ip))
- .{ .payload_identifier = ip.stringToSlice(field_name) }
+ .{ .payload_identifier = field_name.toSlice(ip) }
else
- .{ .identifier = ip.stringToSlice(field_name) } };
+ .{ .identifier = field_name.toSlice(ip) } };
},
.@"packed" => return .begin,
}
@@ -5591,10 +5704,6 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue {
try f.fmtIntLiteral(try zcu.intValue(Type.usize, byte_offset)),
});
},
- .end => {
- try f.writeCValue(writer, field_ptr_val, .Other);
- try writer.print(" - {}", .{try f.fmtIntLiteral(try zcu.intValue(Type.usize, 1))});
- },
}
try writer.writeAll(";\n");
@@ -5639,11 +5748,6 @@ fn fieldPtr(
try f.fmtIntLiteral(try zcu.intValue(Type.usize, byte_offset)),
});
},
- .end => {
- try writer.writeByte('(');
- try f.writeCValue(writer, container_ptr_val, .Other);
- try writer.print(" + {})", .{try f.fmtIntLiteral(try zcu.intValue(Type.usize, 1))});
- },
}
try writer.writeAll(";\n");
@@ -5675,7 +5779,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
const loaded_struct = ip.loadStructType(struct_ty.toIntern());
switch (loaded_struct.layout) {
.auto, .@"extern" => break :field_name if (loaded_struct.fieldName(ip, extra.field_index).unwrap()) |field_name|
- .{ .identifier = ip.stringToSlice(field_name) }
+ .{ .identifier = field_name.toSlice(ip) }
else
.{ .field = extra.field_index },
.@"packed" => {
@@ -5720,20 +5824,22 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
if (inst_ty.eql(field_int_ty, f.object.dg.zcu)) return temp_local;
const local = try f.allocLocal(inst, inst_ty);
- try writer.writeAll("memcpy(");
- try f.writeCValue(writer, .{ .local_ref = local.new_local }, .FunctionArgument);
- try writer.writeAll(", ");
- try f.writeCValue(writer, .{ .local_ref = temp_local.new_local }, .FunctionArgument);
- try writer.writeAll(", sizeof(");
- try f.renderType(writer, inst_ty);
- try writer.writeAll("));\n");
+ if (local.new_local != temp_local.new_local) {
+ try writer.writeAll("memcpy(");
+ try f.writeCValue(writer, .{ .local_ref = local.new_local }, .FunctionArgument);
+ try writer.writeAll(", ");
+ try f.writeCValue(writer, .{ .local_ref = temp_local.new_local }, .FunctionArgument);
+ try writer.writeAll(", sizeof(");
+ try f.renderType(writer, inst_ty);
+ try writer.writeAll("));\n");
+ }
try freeLocal(f, inst, temp_local.new_local, null);
return local;
},
}
},
.anon_struct_type => |anon_struct_info| if (anon_struct_info.fieldName(ip, extra.field_index).unwrap()) |field_name|
- .{ .identifier = ip.stringToSlice(field_name) }
+ .{ .identifier = field_name.toSlice(ip) }
else
.{ .field = extra.field_index },
.union_type => field_name: {
@@ -5742,9 +5848,9 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
.auto, .@"extern" => {
const name = loaded_union.loadTagType(ip).names.get(ip)[extra.field_index];
break :field_name if (loaded_union.hasTag(ip))
- .{ .payload_identifier = ip.stringToSlice(name) }
+ .{ .payload_identifier = name.toSlice(ip) }
else
- .{ .identifier = ip.stringToSlice(name) };
+ .{ .identifier = name.toSlice(ip) };
},
.@"packed" => {
const operand_lval = if (struct_byval == .constant) blk: {
@@ -5755,20 +5861,23 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
break :blk operand_local;
} else struct_byval;
-
const local = try f.allocLocal(inst, inst_ty);
- try writer.writeAll("memcpy(&");
- try f.writeCValue(writer, local, .Other);
- try writer.writeAll(", &");
- try f.writeCValue(writer, operand_lval, .Other);
- try writer.writeAll(", sizeof(");
- try f.renderType(writer, inst_ty);
- try writer.writeAll("));\n");
-
- if (struct_byval == .constant) {
- try freeLocal(f, inst, operand_lval.new_local, null);
+ if (switch (local) {
+ .new_local, .local => |local_index| switch (operand_lval) {
+ .new_local, .local => |operand_local_index| local_index != operand_local_index,
+ else => true,
+ },
+ else => true,
+ }) {
+ try writer.writeAll("memcpy(&");
+ try f.writeCValue(writer, local, .Other);
+ try writer.writeAll(", &");
+ try f.writeCValue(writer, operand_lval, .Other);
+ try writer.writeAll(", sizeof(");
+ try f.renderType(writer, inst_ty);
+ try writer.writeAll("));\n");
}
-
+ try f.freeCValue(inst, operand_lval);
return local;
},
}
@@ -5777,7 +5886,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
};
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try f.writeCValueMember(writer, struct_byval, field_name);
@@ -5850,7 +5959,7 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValu
}
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
if (is_ptr) {
@@ -5862,35 +5971,42 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValu
}
fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue {
- const zcu = f.object.dg.zcu;
+ const ctype_pool = &f.object.dg.ctype_pool;
const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
const inst_ty = f.typeOfIndex(inst);
- const repr_is_payload = inst_ty.optionalReprIsPayload(zcu);
- const payload_ty = f.typeOf(ty_op.operand);
- const payload = try f.resolveInst(ty_op.operand);
- try reap(f, inst, &.{ty_op.operand});
+ const inst_ctype = try f.ctypeFromType(inst_ty, .complete);
+ if (inst_ctype.isBool()) return .{ .constant = Value.true };
- const writer = f.object.writer();
- const local = try f.allocLocal(inst, inst_ty);
- {
- const a = try Assignment.start(f, writer, payload_ty);
- if (repr_is_payload)
- try f.writeCValue(writer, local, .Other)
- else
- try f.writeCValueMember(writer, local, .{ .identifier = "payload" });
- try a.assign(f, writer);
- try f.writeCValue(writer, payload, .Other);
- try a.end(f, writer);
- }
- if (!repr_is_payload) {
- const a = try Assignment.start(f, writer, Type.bool);
- try f.writeCValueMember(writer, local, .{ .identifier = "is_null" });
- try a.assign(f, writer);
- try f.object.dg.renderValue(writer, Value.false, .Other);
- try a.end(f, writer);
+ const operand = try f.resolveInst(ty_op.operand);
+ switch (inst_ctype.info(ctype_pool)) {
+ .basic, .pointer => return f.moveCValue(inst, inst_ty, operand),
+ .aligned, .array, .vector, .fwd_decl, .function => unreachable,
+ .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) {
+ .is_null, .payload => {
+ const operand_ctype = try f.ctypeFromType(f.typeOf(ty_op.operand), .complete);
+ const writer = f.object.writer();
+ const local = try f.allocLocal(inst, inst_ty);
+ {
+ const a = try Assignment.start(f, writer, CType.bool);
+ try f.writeCValueMember(writer, local, .{ .identifier = "is_null" });
+ try a.assign(f, writer);
+ try writer.writeAll("false");
+ try a.end(f, writer);
+ }
+ {
+ const a = try Assignment.start(f, writer, operand_ctype);
+ try f.writeCValueMember(writer, local, .{ .identifier = "payload" });
+ try a.assign(f, writer);
+ try f.writeCValue(writer, operand, .Initializer);
+ try a.end(f, writer);
+ }
+ return local;
+ },
+ .ptr, .len => return f.moveCValue(inst, inst_ty, operand),
+ else => unreachable,
+ },
}
- return local;
}
fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -5913,14 +6029,14 @@ fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
}
if (!repr_is_err) {
- const a = try Assignment.start(f, writer, payload_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(payload_ty, .complete));
try f.writeCValueMember(writer, local, .{ .identifier = "payload" });
try a.assign(f, writer);
try f.object.dg.renderUndefValue(writer, payload_ty, .Other);
try a.end(f, writer);
}
{
- const a = try Assignment.start(f, writer, err_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(err_ty, .complete));
if (repr_is_err)
try f.writeCValue(writer, local, .Other)
else
@@ -5936,31 +6052,43 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
const zcu = f.object.dg.zcu;
const writer = f.object.writer();
const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
+ const inst_ty = f.typeOfIndex(inst);
const operand = try f.resolveInst(ty_op.operand);
- const error_union_ty = f.typeOf(ty_op.operand).childType(zcu);
+ const operand_ty = f.typeOf(ty_op.operand);
+ const error_union_ty = operand_ty.childType(zcu);
const payload_ty = error_union_ty.errorUnionPayload(zcu);
const err_int_ty = try zcu.errorIntType();
const no_err = try zcu.intValue(err_int_ty, 0);
+ try reap(f, inst, &.{ty_op.operand});
// First, set the non-error value.
if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(operand_ty, .complete));
try f.writeCValueDeref(writer, operand);
- try writer.print(" = {};\n", .{try f.fmtIntLiteral(no_err)});
- return operand;
+ try a.assign(f, writer);
+ try writer.print("{}", .{try f.fmtIntLiteral(no_err)});
+ try a.end(f, writer);
+ return .none;
+ }
+ {
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(err_int_ty, .complete));
+ try f.writeCValueDerefMember(writer, operand, .{ .identifier = "error" });
+ try a.assign(f, writer);
+ try writer.print("{}", .{try f.fmtIntLiteral(no_err)});
+ try a.end(f, writer);
}
- try reap(f, inst, &.{ty_op.operand});
- try f.writeCValueDeref(writer, operand);
- try writer.print(".error = {};\n", .{try f.fmtIntLiteral(no_err)});
// Then return the payload pointer (only if it is used)
if (f.liveness.isUnused(inst)) return .none;
- const local = try f.allocLocal(inst, f.typeOfIndex(inst));
+ const local = try f.allocLocal(inst, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = &(");
- try f.writeCValueDeref(writer, operand);
- try writer.writeAll(").payload;\n");
+ try a.assign(f, writer);
+ try writer.writeByte('&');
+ try f.writeCValueDerefMember(writer, operand, .{ .identifier = "payload" });
+ try a.end(f, writer);
return local;
}
@@ -5993,14 +6121,14 @@ fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
if (!repr_is_err) {
- const a = try Assignment.start(f, writer, payload_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(payload_ty, .complete));
try f.writeCValueMember(writer, local, .{ .identifier = "payload" });
try a.assign(f, writer);
try f.writeCValue(writer, payload, .Other);
try a.end(f, writer);
}
{
- const a = try Assignment.start(f, writer, err_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(err_ty, .complete));
if (repr_is_err)
try f.writeCValue(writer, local, .Other)
else
@@ -6025,7 +6153,7 @@ fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const
const payload_ty = err_union_ty.errorUnionPayload(zcu);
const error_ty = err_union_ty.errorUnionSet(zcu);
- const a = try Assignment.start(f, writer, Type.bool);
+ const a = try Assignment.start(f, writer, CType.bool);
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
const err_int_ty = try zcu.errorIntType();
@@ -6062,7 +6190,7 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue {
const array_ty = operand_ty.childType(zcu);
{
- const a = try Assignment.start(f, writer, ptr_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(ptr_ty, .complete));
try f.writeCValueMember(writer, local, .{ .identifier = "ptr" });
try a.assign(f, writer);
if (operand == .undef) {
@@ -6088,7 +6216,7 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue {
try a.end(f, writer);
}
{
- const a = try Assignment.start(f, writer, Type.usize);
+ const a = try Assignment.start(f, writer, CType.usize);
try f.writeCValueMember(writer, local, .{ .identifier = "len" });
try a.assign(f, writer);
try writer.print("{}", .{
@@ -6123,7 +6251,7 @@ fn airFloatCast(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
const v = try Vectorize.start(f, inst, writer, operand_ty);
- const a = try Assignment.start(f, writer, scalar_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(scalar_ty, .complete));
try f.writeCValue(writer, local, .Other);
try v.elem(f, writer);
try a.assign(f, writer);
@@ -6165,11 +6293,10 @@ fn airIntFromPtr(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(" = (");
try f.renderType(writer, inst_ty);
try writer.writeByte(')');
- if (operand_ty.isSlice(zcu)) {
- try f.writeCValueMember(writer, operand, .{ .identifier = "ptr" });
- } else {
+ if (operand_ty.isSlice(zcu))
+ try f.writeCValueMember(writer, operand, .{ .identifier = "ptr" })
+ else
try f.writeCValue(writer, operand, .Other);
- }
try writer.writeAll(";\n");
return local;
}
@@ -6338,9 +6465,10 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
const new_value = try f.resolveInst(extra.new_value);
const ptr_ty = f.typeOf(extra.ptr);
const ty = ptr_ty.childType(zcu);
+ const ctype = try f.ctypeFromType(ty, .complete);
const writer = f.object.writer();
- const new_value_mat = try Materialize.start(f, inst, writer, ty, new_value);
+ const new_value_mat = try Materialize.start(f, inst, ty, new_value);
try reap(f, inst, &.{ extra.ptr, extra.expected_value, extra.new_value });
const repr_ty = if (ty.isRuntimeFloat())
@@ -6351,7 +6479,7 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
const local = try f.allocLocal(inst, inst_ty);
if (inst_ty.isPtrLikeOptional(zcu)) {
{
- const a = try Assignment.start(f, writer, ty);
+ const a = try Assignment.start(f, writer, ctype);
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try f.writeCValue(writer, expected_value, .Other);
@@ -6381,7 +6509,7 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
try writer.writeAll(") {\n");
f.object.indent_writer.pushIndent();
{
- const a = try Assignment.start(f, writer, ty);
+ const a = try Assignment.start(f, writer, ctype);
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try writer.writeAll("NULL");
@@ -6391,14 +6519,14 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
try writer.writeAll("}\n");
} else {
{
- const a = try Assignment.start(f, writer, ty);
+ const a = try Assignment.start(f, writer, ctype);
try f.writeCValueMember(writer, local, .{ .identifier = "payload" });
try a.assign(f, writer);
try f.writeCValue(writer, expected_value, .Other);
try a.end(f, writer);
}
{
- const a = try Assignment.start(f, writer, Type.bool);
+ const a = try Assignment.start(f, writer, CType.bool);
try f.writeCValueMember(writer, local, .{ .identifier = "is_null" });
try a.assign(f, writer);
try writer.print("zig_cmpxchg_{s}((zig_atomic(", .{flavor});
@@ -6444,7 +6572,7 @@ fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue {
const operand = try f.resolveInst(extra.operand);
const writer = f.object.writer();
- const operand_mat = try Materialize.start(f, inst, writer, ty, operand);
+ const operand_mat = try Materialize.start(f, inst, ty, operand);
try reap(f, inst, &.{ pl_op.operand, extra.operand });
const repr_bits = @as(u16, @intCast(ty.abiSize(zcu) * 8));
@@ -6533,7 +6661,7 @@ fn airAtomicStore(f: *Function, inst: Air.Inst.Index, order: [*:0]const u8) !CVa
const element = try f.resolveInst(bin_op.rhs);
const writer = f.object.writer();
- const element_mat = try Materialize.start(f, inst, writer, ty, element);
+ const element_mat = try Materialize.start(f, inst, ty, element);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const repr_ty = if (ty.isRuntimeFloat())
@@ -6644,7 +6772,7 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
try f.writeCValue(writer, index, .Other);
try writer.writeAll(") ");
- const a = try Assignment.start(f, writer, elem_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(elem_ty, .complete));
try writer.writeAll("((");
try f.renderType(writer, elem_ptr_ty);
try writer.writeByte(')');
@@ -6669,7 +6797,7 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
.Slice => {
try f.writeCValueMember(writer, dest_slice, .{ .identifier = "ptr" });
try writer.writeAll(", ");
- try f.writeCValue(writer, bitcasted.c_value, .FunctionArgument);
+ try f.writeCValue(writer, bitcasted, .FunctionArgument);
try writer.writeAll(", ");
try f.writeCValueMember(writer, dest_slice, .{ .identifier = "len" });
try writer.writeAll(");\n");
@@ -6680,12 +6808,12 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
try f.writeCValue(writer, dest_slice, .FunctionArgument);
try writer.writeAll(", ");
- try f.writeCValue(writer, bitcasted.c_value, .FunctionArgument);
+ try f.writeCValue(writer, bitcasted, .FunctionArgument);
try writer.print(", {d});\n", .{len});
},
.Many, .C => unreachable,
}
- try bitcasted.free(f);
+ try f.freeCValue(inst, bitcasted);
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
return .none;
}
@@ -6732,7 +6860,7 @@ fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
const tag_ty = union_ty.unionTagTypeSafety(zcu).?;
const writer = f.object.writer();
- const a = try Assignment.start(f, writer, tag_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(tag_ty, .complete));
try f.writeCValueDerefMember(writer, union_ptr, .{ .identifier = "tag" });
try a.assign(f, writer);
try f.writeCValue(writer, new_tag, .Other);
@@ -6754,7 +6882,7 @@ fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
const inst_ty = f.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
- const a = try Assignment.start(f, writer, inst_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete));
try f.writeCValue(writer, local, .Other);
try a.assign(f, writer);
try f.writeCValueMember(writer, operand, .{ .identifier = "tag" });
@@ -6812,7 +6940,7 @@ fn airSplat(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
const v = try Vectorize.start(f, inst, writer, inst_ty);
- const a = try Assignment.init(f, inst_scalar_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_scalar_ty, .complete));
try f.writeCValue(writer, local, .Other);
try v.elem(f, writer);
try a.assign(f, writer);
@@ -7065,7 +7193,9 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const local = try f.allocLocal(inst, inst_ty);
switch (ip.indexToKey(inst_ty.toIntern())) {
inline .array_type, .vector_type => |info, tag| {
- const a = try Assignment.init(f, Type.fromInterned(info.child));
+ const a: Assignment = .{
+ .ctype = try f.ctypeFromType(Type.fromInterned(info.child), .complete),
+ };
for (resolved_elements, 0..) |element, i| {
try a.restart(f, writer);
try f.writeCValue(writer, local, .Other);
@@ -7092,9 +7222,9 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const field_ty = Type.fromInterned(loaded_struct.field_types.get(ip)[field_index]);
if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue;
- const a = try Assignment.start(f, writer, field_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(field_ty, .complete));
try f.writeCValueMember(writer, local, if (loaded_struct.fieldName(ip, field_index).unwrap()) |field_name|
- .{ .identifier = ip.stringToSlice(field_name) }
+ .{ .identifier = field_name.toSlice(ip) }
else
.{ .field = field_index });
try a.assign(f, writer);
@@ -7172,9 +7302,9 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const field_ty = Type.fromInterned(anon_struct_info.types.get(ip)[field_index]);
if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue;
- const a = try Assignment.start(f, writer, field_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(field_ty, .complete));
try f.writeCValueMember(writer, local, if (anon_struct_info.fieldName(ip, field_index).unwrap()) |field_name|
- .{ .identifier = ip.stringToSlice(field_name) }
+ .{ .identifier = field_name.toSlice(ip) }
else
.{ .field = field_index });
try a.assign(f, writer);
@@ -7202,13 +7332,7 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, union_ty);
- if (loaded_union.getLayout(ip) == .@"packed") {
- try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = ");
- try f.writeCValue(writer, payload, .Initializer);
- try writer.writeAll(";\n");
- return local;
- }
+ if (loaded_union.getLayout(ip) == .@"packed") return f.moveCValue(inst, union_ty, payload);
const field: CValue = if (union_ty.unionTagTypeSafety(zcu)) |tag_ty| field: {
const layout = union_ty.unionGetLayout(zcu);
@@ -7216,16 +7340,16 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
const field_index = tag_ty.enumFieldIndex(field_name, zcu).?;
const tag_val = try zcu.enumValueFieldIndex(tag_ty, field_index);
- const a = try Assignment.start(f, writer, tag_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(tag_ty, .complete));
try f.writeCValueMember(writer, local, .{ .identifier = "tag" });
try a.assign(f, writer);
try writer.print("{}", .{try f.fmtIntLiteral(try tag_val.intFromEnum(tag_ty, zcu))});
try a.end(f, writer);
}
- break :field .{ .payload_identifier = ip.stringToSlice(field_name) };
- } else .{ .identifier = ip.stringToSlice(field_name) };
+ break :field .{ .payload_identifier = field_name.toSlice(ip) };
+ } else .{ .identifier = field_name.toSlice(ip) };
- const a = try Assignment.start(f, writer, payload_ty);
+ const a = try Assignment.start(f, writer, try f.ctypeFromType(payload_ty, .complete));
try f.writeCValueMember(writer, local, field);
try a.assign(f, writer);
try f.writeCValue(writer, payload, .Other);
@@ -7680,11 +7804,13 @@ fn StringLiteral(comptime WriterType: type) type {
// MSVC throws C2078 if an array of size 65536 or greater is initialized with a string literal,
// regardless of the length of the string literal initializing it. Array initializer syntax is
// used instead.
- const max_string_initializer_len = 65535;
+ // C99 only requires 4095.
+ const max_string_initializer_len = @min(65535, 4095);
// MSVC has a length limit of 16380 per string literal (before concatenation)
+ // C99 only requires 4095.
const max_char_len = 4;
- const max_literal_len = 16380 - max_char_len;
+ const max_literal_len = @min(16380 - max_char_len, 4095);
return struct {
len: u64,
@@ -7856,13 +7982,13 @@ fn formatIntLiteral(
} = switch (data.ctype.info(ctype_pool)) {
.basic => |basic_info| switch (basic_info) {
else => .{
- .ctype = .{ .index = .void },
+ .ctype = CType.void,
.count = 1,
.endian = .little,
.homogeneous = true,
},
.zig_u128, .zig_i128 => .{
- .ctype = .{ .index = .uint64_t },
+ .ctype = CType.u64,
.count = 2,
.endian = .big,
.homogeneous = false,
@@ -7978,28 +8104,12 @@ fn formatIntLiteral(
const Materialize = struct {
local: CValue,
- pub fn start(
- f: *Function,
- inst: Air.Inst.Index,
- writer: anytype,
- ty: Type,
- value: CValue,
- ) !Materialize {
- switch (value) {
- .local_ref, .constant, .decl_ref, .undef => {
- const local = try f.allocLocal(inst, ty);
-
- const a = try Assignment.start(f, writer, ty);
- try f.writeCValue(writer, local, .Other);
- try a.assign(f, writer);
- try f.writeCValue(writer, value, .Other);
- try a.end(f, writer);
-
- return .{ .local = local };
- },
- .new_local => |local| return .{ .local = .{ .local = local } },
- else => return .{ .local = value },
- }
+ pub fn start(f: *Function, inst: Air.Inst.Index, ty: Type, value: CValue) !Materialize {
+ return .{ .local = switch (value) {
+ .local_ref, .constant, .decl_ref, .undef => try f.moveCValue(inst, ty, value),
+ .new_local => |local| .{ .local = local },
+ else => value,
+ } };
}
pub fn mat(self: Materialize, f: *Function, writer: anytype) !void {
@@ -8007,22 +8117,15 @@ const Materialize = struct {
}
pub fn end(self: Materialize, f: *Function, inst: Air.Inst.Index) !void {
- switch (self.local) {
- .new_local => |local| try freeLocal(f, inst, local, null),
- else => {},
- }
+ try f.freeCValue(inst, self.local);
}
};
const Assignment = struct {
ctype: CType,
- pub fn init(f: *Function, ty: Type) !Assignment {
- return .{ .ctype = try f.ctypeFromType(ty, .complete) };
- }
-
- pub fn start(f: *Function, writer: anytype, ty: Type) !Assignment {
- const self = try init(f, ty);
+ pub fn start(f: *Function, writer: anytype, ctype: CType) !Assignment {
+ const self: Assignment = .{ .ctype = ctype };
try self.restart(f, writer);
return self;
}
diff --git a/src/codegen/c/Type.zig b/src/codegen/c/Type.zig
index 1c460acc6b..357677522c 100644
--- a/src/codegen/c/Type.zig
+++ b/src/codegen/c/Type.zig
@@ -1,13 +1,33 @@
index: CType.Index,
+pub const @"void": CType = .{ .index = .void };
+pub const @"bool": CType = .{ .index = .bool };
+pub const @"i8": CType = .{ .index = .int8_t };
+pub const @"u8": CType = .{ .index = .uint8_t };
+pub const @"i16": CType = .{ .index = .int16_t };
+pub const @"u16": CType = .{ .index = .uint16_t };
+pub const @"i32": CType = .{ .index = .int32_t };
+pub const @"u32": CType = .{ .index = .uint32_t };
+pub const @"i64": CType = .{ .index = .int64_t };
+pub const @"u64": CType = .{ .index = .uint64_t };
+pub const @"i128": CType = .{ .index = .zig_i128 };
+pub const @"u128": CType = .{ .index = .zig_u128 };
+pub const @"isize": CType = .{ .index = .intptr_t };
+pub const @"usize": CType = .{ .index = .uintptr_t };
+pub const @"f16": CType = .{ .index = .zig_f16 };
+pub const @"f32": CType = .{ .index = .zig_f32 };
+pub const @"f64": CType = .{ .index = .zig_f64 };
+pub const @"f80": CType = .{ .index = .zig_f80 };
+pub const @"f128": CType = .{ .index = .zig_f128 };
+
pub fn fromPoolIndex(pool_index: usize) CType {
return .{ .index = @enumFromInt(CType.Index.first_pool_index + pool_index) };
}
pub fn toPoolIndex(ctype: CType) ?u32 {
- const pool_index, const is_basic =
+ const pool_index, const is_null =
@subWithOverflow(@intFromEnum(ctype.index), CType.Index.first_pool_index);
- return switch (is_basic) {
+ return switch (is_null) {
0 => pool_index,
1 => null,
};
@@ -710,20 +730,6 @@ pub const Kind = enum {
}
};
-pub const String = struct {
- index: String.Index,
-
- const Index = enum(u32) {
- _,
- };
-
- pub fn slice(string: String, pool: *const Pool) []const u8 {
- const start = pool.string_indices.items[@intFromEnum(string.index)];
- const end = pool.string_indices.items[@intFromEnum(string.index) + 1];
- return pool.string_bytes.items[start..end];
- }
-};
-
pub const Info = union(enum) {
basic: CType.Index,
pointer: Pointer,
@@ -766,7 +772,7 @@ pub const Info = union(enum) {
pub const AggregateTag = enum { @"enum", @"struct", @"union" };
pub const Field = struct {
- name: String,
+ name: Pool.String,
ctype: CType,
alignas: AlignAs,
@@ -812,12 +818,15 @@ pub const Info = union(enum) {
rhs_pool: *const Pool,
pool_adapter: anytype,
) bool {
- return std.meta.eql(lhs_field.alignas, rhs_field.alignas) and
- pool_adapter.eql(lhs_field.ctype, rhs_field.ctype) and std.mem.eql(
- u8,
- lhs_field.name.slice(lhs_pool),
- rhs_field.name.slice(rhs_pool),
- );
+ if (!std.meta.eql(lhs_field.alignas, rhs_field.alignas)) return false;
+ if (!pool_adapter.eql(lhs_field.ctype, rhs_field.ctype)) return false;
+ return if (lhs_field.name.toPoolSlice(lhs_pool)) |lhs_name|
+ if (rhs_field.name.toPoolSlice(rhs_pool)) |rhs_name|
+ std.mem.eql(u8, lhs_name, rhs_name)
+ else
+ false
+ else
+ lhs_field.name.index == rhs_field.name.index;
}
};
@@ -918,6 +927,86 @@ pub const Pool = struct {
const Map = std.AutoArrayHashMapUnmanaged(void, void);
+ pub const String = struct {
+ index: String.Index,
+
+ const FormatData = struct { string: String, pool: *const Pool };
+ fn format(
+ data: FormatData,
+ comptime fmt_str: []const u8,
+ _: std.fmt.FormatOptions,
+ writer: anytype,
+ ) @TypeOf(writer).Error!void {
+ if (fmt_str.len > 0) @compileError("invalid format string '" ++ fmt_str ++ "'");
+ if (data.string.toSlice(data.pool)) |slice|
+ try writer.writeAll(slice)
+ else
+ try writer.print("f{d}", .{@intFromEnum(data.string.index)});
+ }
+ pub fn fmt(str: String, pool: *const Pool) std.fmt.Formatter(format) {
+ return .{ .data = .{ .string = str, .pool = pool } };
+ }
+
+ fn fromUnnamed(index: u31) String {
+ return .{ .index = @enumFromInt(index) };
+ }
+
+ fn isNamed(str: String) bool {
+ return @intFromEnum(str.index) >= String.Index.first_named_index;
+ }
+
+ pub fn toSlice(str: String, pool: *const Pool) ?[]const u8 {
+ return str.toPoolSlice(pool) orelse if (str.isNamed()) @tagName(str.index) else null;
+ }
+
+ fn toPoolSlice(str: String, pool: *const Pool) ?[]const u8 {
+ if (str.toPoolIndex()) |pool_index| {
+ const start = pool.string_indices.items[pool_index + 0];
+ const end = pool.string_indices.items[pool_index + 1];
+ return pool.string_bytes.items[start..end];
+ } else return null;
+ }
+
+ fn fromPoolIndex(pool_index: usize) String {
+ return .{ .index = @enumFromInt(String.Index.first_pool_index + pool_index) };
+ }
+
+ fn toPoolIndex(str: String) ?u32 {
+ const pool_index, const is_null =
+ @subWithOverflow(@intFromEnum(str.index), String.Index.first_pool_index);
+ return switch (is_null) {
+ 0 => pool_index,
+ 1 => null,
+ };
+ }
+
+ const Index = enum(u32) {
+ array = first_named_index,
+ @"error",
+ is_null,
+ len,
+ payload,
+ ptr,
+ tag,
+ _,
+
+ const first_named_index: u32 = 1 << 31;
+ const first_pool_index: u32 = first_named_index + @typeInfo(String.Index).Enum.fields.len;
+ };
+
+ const Adapter = struct {
+ pool: *const Pool,
+ pub fn hash(_: @This(), slice: []const u8) Map.Hash {
+ return @truncate(Hasher.Impl.hash(1, slice));
+ }
+ pub fn eql(string_adapter: @This(), lhs_slice: []const u8, _: void, rhs_index: usize) bool {
+ const rhs_string = String.fromPoolIndex(rhs_index);
+ const rhs_slice = rhs_string.toPoolSlice(string_adapter.pool).?;
+ return std.mem.eql(u8, lhs_slice, rhs_slice);
+ }
+ };
+ };
+
pub const empty: Pool = .{
.map = .{},
.items = .{},
@@ -1200,26 +1289,26 @@ pub const Pool = struct {
kind: Kind,
) !CType {
switch (int_info.bits) {
- 0 => return .{ .index = .void },
+ 0 => return CType.void,
1...8 => switch (int_info.signedness) {
- .unsigned => return .{ .index = .uint8_t },
- .signed => return .{ .index = .int8_t },
+ .signed => return CType.i8,
+ .unsigned => return CType.u8,
},
9...16 => switch (int_info.signedness) {
- .unsigned => return .{ .index = .uint16_t },
- .signed => return .{ .index = .int16_t },
+ .signed => return CType.i16,
+ .unsigned => return CType.u16,
},
17...32 => switch (int_info.signedness) {
- .unsigned => return .{ .index = .uint32_t },
- .signed => return .{ .index = .int32_t },
+ .signed => return CType.i32,
+ .unsigned => return CType.u32,
},
33...64 => switch (int_info.signedness) {
- .unsigned => return .{ .index = .uint64_t },
- .signed => return .{ .index = .int64_t },
+ .signed => return CType.i64,
+ .unsigned => return CType.u64,
},
65...128 => switch (int_info.signedness) {
- .unsigned => return .{ .index = .zig_u128 },
- .signed => return .{ .index = .zig_i128 },
+ .signed => return CType.i128,
+ .unsigned => return CType.u128,
},
else => {
const target = &mod.resolved_target.result;
@@ -1235,7 +1324,7 @@ pub const Pool = struct {
if (!kind.isParameter()) return array_ctype;
var fields = [_]Info.Field{
.{
- .name = try pool.string(allocator, "array"),
+ .name = .{ .index = .array },
.ctype = array_ctype,
.alignas = AlignAs.fromAbiAlignment(abi_align),
},
@@ -1267,19 +1356,19 @@ pub const Pool = struct {
.null_type,
.undefined_type,
.enum_literal_type,
- => return .{ .index = .void },
- .u1_type, .u8_type => return .{ .index = .uint8_t },
- .i8_type => return .{ .index = .int8_t },
- .u16_type => return .{ .index = .uint16_t },
- .i16_type => return .{ .index = .int16_t },
- .u29_type, .u32_type => return .{ .index = .uint32_t },
- .i32_type => return .{ .index = .int32_t },
- .u64_type => return .{ .index = .uint64_t },
- .i64_type => return .{ .index = .int64_t },
- .u80_type, .u128_type => return .{ .index = .zig_u128 },
- .i128_type => return .{ .index = .zig_i128 },
- .usize_type => return .{ .index = .uintptr_t },
- .isize_type => return .{ .index = .intptr_t },
+ => return CType.void,
+ .u1_type, .u8_type => return CType.u8,
+ .i8_type => return CType.i8,
+ .u16_type => return CType.u16,
+ .i16_type => return CType.i16,
+ .u29_type, .u32_type => return CType.u32,
+ .i32_type => return CType.i32,
+ .u64_type => return CType.u64,
+ .i64_type => return CType.i64,
+ .u80_type, .u128_type => return CType.u128,
+ .i128_type => return CType.i128,
+ .usize_type => return CType.usize,
+ .isize_type => return CType.isize,
.c_char_type => return .{ .index = .char },
.c_short_type => return .{ .index = .short },
.c_ushort_type => return .{ .index = .@"unsigned short" },
@@ -1290,12 +1379,12 @@ pub const Pool = struct {
.c_longlong_type => return .{ .index = .@"long long" },
.c_ulonglong_type => return .{ .index = .@"unsigned long long" },
.c_longdouble_type => return .{ .index = .@"long double" },
- .f16_type => return .{ .index = .zig_f16 },
- .f32_type => return .{ .index = .zig_f32 },
- .f64_type => return .{ .index = .zig_f64 },
- .f80_type => return .{ .index = .zig_f80 },
- .f128_type => return .{ .index = .zig_f128 },
- .bool_type, .optional_noreturn_type => return .{ .index = .bool },
+ .f16_type => return CType.f16,
+ .f32_type => return CType.f32,
+ .f64_type => return CType.f64,
+ .f80_type => return CType.f80,
+ .f128_type => return CType.f128,
+ .bool_type, .optional_noreturn_type => return CType.bool,
.noreturn_type,
.anyframe_type,
.generic_poison_type,
@@ -1324,17 +1413,17 @@ pub const Pool = struct {
}, mod, kind),
.manyptr_u8_type,
=> return pool.getPointer(allocator, .{
- .elem_ctype = .{ .index = .uint8_t },
+ .elem_ctype = CType.u8,
}),
.manyptr_const_u8_type,
.manyptr_const_u8_sentinel_0_type,
=> return pool.getPointer(allocator, .{
- .elem_ctype = .{ .index = .uint8_t },
+ .elem_ctype = CType.u8,
.@"const" = true,
}),
.single_const_pointer_to_comptime_int_type,
=> return pool.getPointer(allocator, .{
- .elem_ctype = .{ .index = .void },
+ .elem_ctype = CType.void,
.@"const" = true,
}),
.slice_const_u8_type,
@@ -1343,16 +1432,16 @@ pub const Pool = struct {
const target = &mod.resolved_target.result;
var fields = [_]Info.Field{
.{
- .name = try pool.string(allocator, "ptr"),
+ .name = .{ .index = .ptr },
.ctype = try pool.getPointer(allocator, .{
- .elem_ctype = .{ .index = .uint8_t },
+ .elem_ctype = CType.u8,
.@"const" = true,
}),
.alignas = AlignAs.fromAbiAlignment(Type.ptrAbiAlignment(target.*)),
},
.{
- .name = try pool.string(allocator, "len"),
- .ctype = .{ .index = .uintptr_t },
+ .name = .{ .index = .len },
+ .ctype = CType.usize,
.alignas = AlignAs.fromAbiAlignment(
Type.intAbiAlignment(target.ptrBitWidth(), target.*),
),
@@ -1442,7 +1531,7 @@ pub const Pool = struct {
const target = &mod.resolved_target.result;
var fields = [_]Info.Field{
.{
- .name = try pool.string(allocator, "ptr"),
+ .name = .{ .index = .ptr },
.ctype = try pool.fromType(
allocator,
scratch,
@@ -1454,8 +1543,8 @@ pub const Pool = struct {
.alignas = AlignAs.fromAbiAlignment(Type.ptrAbiAlignment(target.*)),
},
.{
- .name = try pool.string(allocator, "len"),
- .ctype = .{ .index = .uintptr_t },
+ .name = .{ .index = .len },
+ .ctype = CType.usize,
.alignas = AlignAs.fromAbiAlignment(
Type.intAbiAlignment(target.ptrBitWidth(), target.*),
),
@@ -1465,8 +1554,8 @@ pub const Pool = struct {
},
},
.array_type => |array_info| {
- const len = array_info.len + @intFromBool(array_info.sentinel != .none);
- if (len == 0) return .{ .index = .void };
+ const len = array_info.lenIncludingSentinel();
+ if (len == 0) return CType.void;
const elem_type = Type.fromInterned(array_info.child);
const elem_ctype = try pool.fromType(
allocator,
@@ -1476,15 +1565,15 @@ pub const Pool = struct {
mod,
kind.noParameter(),
);
- if (elem_ctype.index == .void) return .{ .index = .void };
+ if (elem_ctype.index == .void) return CType.void;
const array_ctype = try pool.getArray(allocator, .{
.elem_ctype = elem_ctype,
- .len = array_info.len + @intFromBool(array_info.sentinel != .none),
+ .len = len,
});
if (!kind.isParameter()) return array_ctype;
var fields = [_]Info.Field{
.{
- .name = try pool.string(allocator, "array"),
+ .name = .{ .index = .array },
.ctype = array_ctype,
.alignas = AlignAs.fromAbiAlignment(elem_type.abiAlignment(zcu)),
},
@@ -1492,7 +1581,7 @@ pub const Pool = struct {
return pool.fromFields(allocator, .@"struct", &fields, kind);
},
.vector_type => |vector_info| {
- if (vector_info.len == 0) return .{ .index = .void };
+ if (vector_info.len == 0) return CType.void;
const elem_type = Type.fromInterned(vector_info.child);
const elem_ctype = try pool.fromType(
allocator,
@@ -1502,7 +1591,7 @@ pub const Pool = struct {
mod,
kind.noParameter(),
);
- if (elem_ctype.index == .void) return .{ .index = .void };
+ if (elem_ctype.index == .void) return CType.void;
const vector_ctype = try pool.getVector(allocator, .{
.elem_ctype = elem_ctype,
.len = vector_info.len,
@@ -1510,7 +1599,7 @@ pub const Pool = struct {
if (!kind.isParameter()) return vector_ctype;
var fields = [_]Info.Field{
.{
- .name = try pool.string(allocator, "array"),
+ .name = .{ .index = .array },
.ctype = vector_ctype,
.alignas = AlignAs.fromAbiAlignment(elem_type.abiAlignment(zcu)),
},
@@ -1518,7 +1607,7 @@ pub const Pool = struct {
return pool.fromFields(allocator, .@"struct", &fields, kind);
},
.opt_type => |payload_type| {
- if (ip.isNoReturn(payload_type)) return .{ .index = .void };
+ if (ip.isNoReturn(payload_type)) return CType.void;
const payload_ctype = try pool.fromType(
allocator,
scratch,
@@ -1527,7 +1616,7 @@ pub const Pool = struct {
mod,
kind.noParameter(),
);
- if (payload_ctype.index == .void) return .{ .index = .bool };
+ if (payload_ctype.index == .void) return CType.bool;
switch (payload_type) {
.anyerror_type => return payload_ctype,
else => switch (ip.indexToKey(payload_type)) {
@@ -1539,12 +1628,12 @@ pub const Pool = struct {
}
var fields = [_]Info.Field{
.{
- .name = try pool.string(allocator, "is_null"),
- .ctype = .{ .index = .bool },
+ .name = .{ .index = .is_null },
+ .ctype = CType.bool,
.alignas = AlignAs.fromAbiAlignment(.@"1"),
},
.{
- .name = try pool.string(allocator, "payload"),
+ .name = .{ .index = .payload },
.ctype = payload_ctype,
.alignas = AlignAs.fromAbiAlignment(
Type.fromInterned(payload_type).abiAlignment(zcu),
@@ -1574,14 +1663,14 @@ pub const Pool = struct {
const target = &mod.resolved_target.result;
var fields = [_]Info.Field{
.{
- .name = try pool.string(allocator, "error"),
+ .name = .{ .index = .@"error" },
.ctype = error_set_ctype,
.alignas = AlignAs.fromAbiAlignment(
Type.intAbiAlignment(error_set_bits, target.*),
),
},
.{
- .name = try pool.string(allocator, "payload"),
+ .name = .{ .index = .payload },
.ctype = payload_ctype,
.alignas = AlignAs.fromAbiAlignment(payload_type.abiAlignment(zcu)),
},
@@ -1600,7 +1689,7 @@ pub const Pool = struct {
if (kind.isForward()) return if (ty.hasRuntimeBitsIgnoreComptime(zcu))
fwd_decl
else
- .{ .index = .void };
+ CType.void;
const scratch_top = scratch.items.len;
defer scratch.shrinkRetainingCapacity(scratch_top);
try scratch.ensureUnusedCapacity(
@@ -1625,9 +1714,9 @@ pub const Pool = struct {
if (field_ctype.index == .void) continue;
const field_name = if (loaded_struct.fieldName(ip, field_index)
.unwrap()) |field_name|
- try pool.string(allocator, ip.stringToSlice(field_name))
+ try pool.string(allocator, field_name.toSlice(ip))
else
- try pool.fmt(allocator, "f{d}", .{field_index});
+ String.fromUnnamed(@intCast(field_index));
const field_alignas = AlignAs.fromAlignment(.{
.@"align" = loaded_struct.fieldAlign(ip, field_index),
.abi = field_type.abiAlignment(zcu),
@@ -1644,7 +1733,7 @@ pub const Pool = struct {
scratch.items.len - scratch_top,
@typeInfo(Field).Struct.fields.len,
));
- if (fields_len == 0) return .{ .index = .void };
+ if (fields_len == 0) return CType.void;
try pool.ensureUnusedCapacity(allocator, 1);
const extra_index = try pool.addHashedExtra(allocator, &hasher, Aggregate, .{
.fwd_decl = fwd_decl.index,
@@ -1685,7 +1774,7 @@ pub const Pool = struct {
if (field_ctype.index == .void) continue;
const field_name = if (anon_struct_info.fieldName(ip, @intCast(field_index))
.unwrap()) |field_name|
- try pool.string(allocator, ip.stringToSlice(field_name))
+ try pool.string(allocator, field_name.toSlice(ip))
else
try pool.fmt(allocator, "f{d}", .{field_index});
pool.addHashedExtraAssumeCapacityTo(scratch, &hasher, Field, .{
@@ -1700,7 +1789,7 @@ pub const Pool = struct {
scratch.items.len - scratch_top,
@typeInfo(Field).Struct.fields.len,
));
- if (fields_len == 0) return .{ .index = .void };
+ if (fields_len == 0) return CType.void;
if (kind.isForward()) {
try pool.ensureUnusedCapacity(allocator, 1);
const extra_index = try pool.addHashedExtra(
@@ -1739,7 +1828,7 @@ pub const Pool = struct {
if (kind.isForward()) return if (ty.hasRuntimeBitsIgnoreComptime(zcu))
fwd_decl
else
- .{ .index = .void };
+ CType.void;
const loaded_tag = loaded_union.loadTagType(ip);
const scratch_top = scratch.items.len;
defer scratch.shrinkRetainingCapacity(scratch_top);
@@ -1766,7 +1855,7 @@ pub const Pool = struct {
if (field_ctype.index == .void) continue;
const field_name = try pool.string(
allocator,
- ip.stringToSlice(loaded_tag.names.get(ip)[field_index]),
+ loaded_tag.names.get(ip)[field_index].toSlice(ip),
);
const field_alignas = AlignAs.fromAlignment(.{
.@"align" = loaded_union.fieldAlign(ip, @intCast(field_index)),
@@ -1786,7 +1875,7 @@ pub const Pool = struct {
@typeInfo(Field).Struct.fields.len,
));
if (!has_tag) {
- if (fields_len == 0) return .{ .index = .void };
+ if (fields_len == 0) return CType.void;
try pool.ensureUnusedCapacity(allocator, 1);
const extra_index = try pool.addHashedExtra(
allocator,
@@ -1813,7 +1902,7 @@ pub const Pool = struct {
);
if (tag_ctype.index != .void) {
struct_fields[struct_fields_len] = .{
- .name = try pool.string(allocator, "tag"),
+ .name = .{ .index = .tag },
.ctype = tag_ctype,
.alignas = AlignAs.fromAbiAlignment(tag_type.abiAlignment(zcu)),
};
@@ -1846,14 +1935,14 @@ pub const Pool = struct {
};
if (payload_ctype.index != .void) {
struct_fields[struct_fields_len] = .{
- .name = try pool.string(allocator, "payload"),
+ .name = .{ .index = .payload },
.ctype = payload_ctype,
.alignas = AlignAs.fromAbiAlignment(payload_align),
};
struct_fields_len += 1;
}
}
- if (struct_fields_len == 0) return .{ .index = .void };
+ if (struct_fields_len == 0) return CType.void;
sortFields(struct_fields[0..struct_fields_len]);
return pool.getAggregate(allocator, .{
.tag = .@"struct",
@@ -1867,7 +1956,7 @@ pub const Pool = struct {
}, mod, kind),
}
},
- .opaque_type => return .{ .index = .void },
+ .opaque_type => return CType.void,
.enum_type => return pool.fromType(
allocator,
scratch,
@@ -1876,7 +1965,7 @@ pub const Pool = struct {
mod,
kind,
),
- .func_type => |func_info| if (func_info.is_generic) return .{ .index = .void } else {
+ .func_type => |func_info| if (func_info.is_generic) return CType.void else {
const scratch_top = scratch.items.len;
defer scratch.shrinkRetainingCapacity(scratch_top);
try scratch.ensureUnusedCapacity(allocator, func_info.param_types.len);
@@ -1890,7 +1979,7 @@ pub const Pool = struct {
zcu,
mod,
kind.asParameter(),
- ) else .{ .index = .void };
+ ) else CType.void;
for (0..func_info.param_types.len) |param_index| {
const param_type = Type.fromInterned(
func_info.param_types.get(ip)[param_index],
@@ -2024,7 +2113,10 @@ pub const Pool = struct {
});
for (0..fields.len) |field_index| {
const field = fields.at(field_index, source_pool);
- const field_name = try pool.string(allocator, field.name.slice(source_pool));
+ const field_name = if (field.name.toPoolSlice(source_pool)) |slice|
+ try pool.string(allocator, slice)
+ else
+ field.name;
pool.addExtraAssumeCapacity(Field, .{
.name = field_name.index,
.ctype = pool_adapter.copy(field.ctype).index,
@@ -2054,7 +2146,10 @@ pub const Pool = struct {
});
for (0..aggregate_info.fields.len) |field_index| {
const field = aggregate_info.fields.at(field_index, source_pool);
- const field_name = try pool.string(allocator, field.name.slice(source_pool));
+ const field_name = if (field.name.toPoolSlice(source_pool)) |slice|
+ try pool.string(allocator, slice)
+ else
+ field.name;
pool.addExtraAssumeCapacity(Field, .{
.name = field_name.index,
.ctype = pool_adapter.copy(field.ctype).index,
@@ -2082,8 +2177,8 @@ pub const Pool = struct {
return .{ ctype, gop.found_existing };
}
- pub fn string(pool: *Pool, allocator: std.mem.Allocator, str: []const u8) !String {
- try pool.string_bytes.appendSlice(allocator, str);
+ pub fn string(pool: *Pool, allocator: std.mem.Allocator, slice: []const u8) !String {
+ try pool.string_bytes.appendSlice(allocator, slice);
return pool.trailingString(allocator);
}
@@ -2111,12 +2206,15 @@ pub const Pool = struct {
fn updateExtra(hasher: *Hasher, comptime Extra: type, extra: Extra, pool: *const Pool) void {
inline for (@typeInfo(Extra).Struct.fields) |field| {
const value = @field(extra, field.name);
- hasher.update(switch (field.type) {
+ switch (field.type) {
Pool.Tag, String, CType => unreachable,
- CType.Index => (CType{ .index = value }).hash(pool),
- String.Index => (String{ .index = value }).slice(pool),
- else => value,
- });
+ CType.Index => hasher.update((CType{ .index = value }).hash(pool)),
+ String.Index => if ((String{ .index = value }).toPoolSlice(pool)) |slice|
+ hasher.update(slice)
+ else
+ hasher.update(@intFromEnum(value)),
+ else => hasher.update(value),
+ }
}
}
fn update(hasher: *Hasher, data: anytype) void {
@@ -2231,30 +2329,30 @@ pub const Pool = struct {
}
fn trailingString(pool: *Pool, allocator: std.mem.Allocator) !String {
- const StringAdapter = struct {
- pool: *const Pool,
- pub fn hash(_: @This(), slice: []const u8) Map.Hash {
- return @truncate(Hasher.Impl.hash(1, slice));
- }
- pub fn eql(string_adapter: @This(), lhs_slice: []const u8, _: void, rhs_index: usize) bool {
- const rhs_string: String = .{ .index = @enumFromInt(rhs_index) };
- const rhs_slice = rhs_string.slice(string_adapter.pool);
- return std.mem.eql(u8, lhs_slice, rhs_slice);
- }
- };
+ const start = pool.string_indices.getLast();
+ const slice: []const u8 = pool.string_bytes.items[start..];
+ if (slice.len >= 2 and slice[0] == 'f' and switch (slice[1]) {
+ '0' => slice.len == 2,
+ '1'...'9' => true,
+ else => false,
+ }) if (std.fmt.parseInt(u31, slice[1..], 10)) |unnamed| {
+ pool.string_bytes.shrinkRetainingCapacity(start);
+ return String.fromUnnamed(unnamed);
+ } else |_| {};
+ if (std.meta.stringToEnum(String.Index, slice)) |index| {
+ pool.string_bytes.shrinkRetainingCapacity(start);
+ return .{ .index = index };
+ }
+
try pool.string_map.ensureUnusedCapacity(allocator, 1);
try pool.string_indices.ensureUnusedCapacity(allocator, 1);
- const start = pool.string_indices.getLast();
- const gop = pool.string_map.getOrPutAssumeCapacityAdapted(
- @as([]const u8, pool.string_bytes.items[start..]),
- StringAdapter{ .pool = pool },
- );
+ const gop = pool.string_map.getOrPutAssumeCapacityAdapted(slice, String.Adapter{ .pool = pool });
if (gop.found_existing)
pool.string_bytes.shrinkRetainingCapacity(start)
else
pool.string_indices.appendAssumeCapacity(@intCast(pool.string_bytes.items.len));
- return .{ .index = @enumFromInt(gop.index) };
+ return String.fromPoolIndex(gop.index);
}
const Item = struct {
diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig
index 7419e778a1..db0eaa3ce5 100644
--- a/src/codegen/llvm.zig
+++ b/src/codegen/llvm.zig
@@ -1011,7 +1011,7 @@ pub const Object = struct {
llvm_errors[0] = try o.builder.undefConst(llvm_slice_ty);
for (llvm_errors[1..], error_name_list[1..]) |*llvm_error, name| {
- const name_string = try o.builder.stringNull(mod.intern_pool.stringToSlice(name));
+ const name_string = try o.builder.stringNull(name.toSlice(&mod.intern_pool));
const name_init = try o.builder.stringConst(name_string);
const name_variable_index =
try o.builder.addVariable(.empty, name_init.typeOf(&o.builder), .default);
@@ -1086,7 +1086,7 @@ pub const Object = struct {
for (object.extern_collisions.keys()) |decl_index| {
const global = object.decl_map.get(decl_index) orelse continue;
// Same logic as below but for externs instead of exports.
- const decl_name = object.builder.strtabStringIfExists(mod.intern_pool.stringToSlice(mod.declPtr(decl_index).name)) orelse continue;
+ const decl_name = object.builder.strtabStringIfExists(mod.declPtr(decl_index).name.toSlice(&mod.intern_pool)) orelse continue;
const other_global = object.builder.getGlobal(decl_name) orelse continue;
if (other_global.toConst().getBase(&object.builder) ==
global.toConst().getBase(&object.builder)) continue;
@@ -1116,7 +1116,7 @@ pub const Object = struct {
for (export_list) |exp| {
// Detect if the LLVM global has already been created as an extern. In such
// case, we need to replace all uses of it with this exported global.
- const exp_name = object.builder.strtabStringIfExists(mod.intern_pool.stringToSlice(exp.opts.name)) orelse continue;
+ const exp_name = object.builder.strtabStringIfExists(exp.opts.name.toSlice(&mod.intern_pool)) orelse continue;
const other_global = object.builder.getGlobal(exp_name) orelse continue;
if (other_global.toConst().getBase(&object.builder) == global_base) continue;
@@ -1442,7 +1442,7 @@ pub const Object = struct {
} }, &o.builder);
}
- if (ip.stringToSliceUnwrap(decl.@"linksection")) |section|
+ if (decl.@"linksection".toSlice(ip)) |section|
function_index.setSection(try o.builder.string(section), &o.builder);
var deinit_wip = true;
@@ -1662,7 +1662,7 @@ pub const Object = struct {
const subprogram = try o.builder.debugSubprogram(
file,
- try o.builder.metadataString(ip.stringToSlice(decl.name)),
+ try o.builder.metadataString(decl.name.toSlice(ip)),
try o.builder.metadataStringFromStrtabString(function_index.name(&o.builder)),
line_number,
line_number + func.lbrace_line,
@@ -1752,6 +1752,7 @@ pub const Object = struct {
.value => |val| return updateExportedValue(self, mod, val, exports),
};
const gpa = mod.gpa;
+ const ip = &mod.intern_pool;
// If the module does not already have the function, we ignore this function call
// because we call `updateExports` at the end of `updateFunc` and `updateDecl`.
const global_index = self.decl_map.get(decl_index) orelse return;
@@ -1759,17 +1760,14 @@ pub const Object = struct {
const comp = mod.comp;
if (decl.isExtern(mod)) {
const decl_name = decl_name: {
- const decl_name = mod.intern_pool.stringToSlice(decl.name);
-
if (mod.getTarget().isWasm() and decl.val.typeOf(mod).zigTypeTag(mod) == .Fn) {
- if (mod.intern_pool.stringToSliceUnwrap(decl.getOwnedExternFunc(mod).?.lib_name)) |lib_name| {
+ if (decl.getOwnedExternFunc(mod).?.lib_name.toSlice(ip)) |lib_name| {
if (!std.mem.eql(u8, lib_name, "c")) {
- break :decl_name try self.builder.strtabStringFmt("{s}|{s}", .{ decl_name, lib_name });
+ break :decl_name try self.builder.strtabStringFmt("{}|{s}", .{ decl.name.fmt(ip), lib_name });
}
}
}
-
- break :decl_name try self.builder.strtabString(decl_name);
+ break :decl_name try self.builder.strtabString(decl.name.toSlice(ip));
};
if (self.builder.getGlobal(decl_name)) |other_global| {
@@ -1792,9 +1790,7 @@ pub const Object = struct {
if (decl_var.is_weak_linkage) global_index.setLinkage(.extern_weak, &self.builder);
}
} else if (exports.len != 0) {
- const main_exp_name = try self.builder.strtabString(
- mod.intern_pool.stringToSlice(exports[0].opts.name),
- );
+ const main_exp_name = try self.builder.strtabString(exports[0].opts.name.toSlice(ip));
try global_index.rename(main_exp_name, &self.builder);
if (decl.val.getVariable(mod)) |decl_var| if (decl_var.is_threadlocal)
@@ -1803,9 +1799,7 @@ pub const Object = struct {
return updateExportedGlobal(self, mod, global_index, exports);
} else {
- const fqn = try self.builder.strtabString(
- mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod)),
- );
+ const fqn = try self.builder.strtabString((try decl.fullyQualifiedName(mod)).toSlice(ip));
try global_index.rename(fqn, &self.builder);
global_index.setLinkage(.internal, &self.builder);
if (comp.config.dll_export_fns)
@@ -1832,9 +1826,8 @@ pub const Object = struct {
exports: []const *Module.Export,
) link.File.UpdateExportsError!void {
const gpa = mod.gpa;
- const main_exp_name = try o.builder.strtabString(
- mod.intern_pool.stringToSlice(exports[0].opts.name),
- );
+ const ip = &mod.intern_pool;
+ const main_exp_name = try o.builder.strtabString(exports[0].opts.name.toSlice(ip));
const global_index = i: {
const gop = try o.anon_decl_map.getOrPut(gpa, exported_value);
if (gop.found_existing) {
@@ -1845,7 +1838,7 @@ pub const Object = struct {
const llvm_addr_space = toLlvmAddressSpace(.generic, o.target);
const variable_index = try o.builder.addVariable(
main_exp_name,
- try o.lowerType(Type.fromInterned(mod.intern_pool.typeOf(exported_value))),
+ try o.lowerType(Type.fromInterned(ip.typeOf(exported_value))),
llvm_addr_space,
);
const global_index = variable_index.ptrConst(&o.builder).global;
@@ -1867,8 +1860,9 @@ pub const Object = struct {
global_index: Builder.Global.Index,
exports: []const *Module.Export,
) link.File.UpdateExportsError!void {
- global_index.setUnnamedAddr(.default, &o.builder);
const comp = mod.comp;
+ const ip = &mod.intern_pool;
+ global_index.setUnnamedAddr(.default, &o.builder);
if (comp.config.dll_export_fns)
global_index.setDllStorageClass(.dllexport, &o.builder);
global_index.setLinkage(switch (exports[0].opts.linkage) {
@@ -1882,7 +1876,7 @@ pub const Object = struct {
.hidden => .hidden,
.protected => .protected,
}, &o.builder);
- if (mod.intern_pool.stringToSliceUnwrap(exports[0].opts.section)) |section|
+ if (exports[0].opts.section.toSlice(ip)) |section|
switch (global_index.ptrConst(&o.builder).kind) {
.variable => |impl_index| impl_index.setSection(
try o.builder.string(section),
@@ -1900,7 +1894,7 @@ pub const Object = struct {
// Until then we iterate over existing aliases and make them point
// to the correct decl, or otherwise add a new alias. Old aliases are leaked.
for (exports[1..]) |exp| {
- const exp_name = try o.builder.strtabString(mod.intern_pool.stringToSlice(exp.opts.name));
+ const exp_name = try o.builder.strtabString(exp.opts.name.toSlice(ip));
if (o.builder.getGlobal(exp_name)) |global| {
switch (global.ptrConst(&o.builder).kind) {
.alias => |alias| {
@@ -2013,7 +2007,7 @@ pub const Object = struct {
std.math.big.int.Mutable.init(&bigint_space.limbs, i).toConst();
enumerators[i] = try o.builder.debugEnumerator(
- try o.builder.metadataString(ip.stringToSlice(field_name_ip)),
+ try o.builder.metadataString(field_name_ip.toSlice(ip)),
int_info.signedness == .unsigned,
int_info.bits,
bigint,
@@ -2473,7 +2467,7 @@ pub const Object = struct {
offset = field_offset + field_size;
const field_name = if (tuple.names.len != 0)
- ip.stringToSlice(tuple.names.get(ip)[i])
+ tuple.names.get(ip)[i].toSlice(ip)
else
try std.fmt.allocPrintZ(gpa, "{d}", .{i});
defer if (tuple.names.len == 0) gpa.free(field_name);
@@ -2557,10 +2551,10 @@ pub const Object = struct {
const field_offset = ty.structFieldOffset(field_index, mod);
const field_name = struct_type.fieldName(ip, field_index).unwrap() orelse
- try ip.getOrPutStringFmt(gpa, "{d}", .{field_index});
+ try ip.getOrPutStringFmt(gpa, "{d}", .{field_index}, .no_embedded_nulls);
fields.appendAssumeCapacity(try o.builder.debugMemberType(
- try o.builder.metadataString(ip.stringToSlice(field_name)),
+ try o.builder.metadataString(field_name.toSlice(ip)),
.none, // File
debug_fwd_ref,
0, // Line
@@ -2655,7 +2649,7 @@ pub const Object = struct {
const field_name = tag_type.names.get(ip)[field_index];
fields.appendAssumeCapacity(try o.builder.debugMemberType(
- try o.builder.metadataString(ip.stringToSlice(field_name)),
+ try o.builder.metadataString(field_name.toSlice(ip)),
.none, // File
debug_union_fwd_ref,
0, // Line
@@ -2827,7 +2821,7 @@ pub const Object = struct {
const mod = o.module;
const decl = mod.declPtr(decl_index);
return o.builder.debugStructType(
- try o.builder.metadataString(mod.intern_pool.stringToSlice(decl.name)), // TODO use fully qualified name
+ try o.builder.metadataString(decl.name.toSlice(&mod.intern_pool)), // TODO use fully qualified name
try o.getDebugFile(mod.namespacePtr(decl.src_namespace).file_scope),
try o.namespaceToDebugScope(decl.src_namespace),
decl.src_line + 1,
@@ -2844,11 +2838,11 @@ pub const Object = struct {
const std_mod = mod.std_mod;
const std_file = (mod.importPkg(std_mod) catch unreachable).file;
- const builtin_str = try mod.intern_pool.getOrPutString(mod.gpa, "builtin");
+ const builtin_str = try mod.intern_pool.getOrPutString(mod.gpa, "builtin", .no_embedded_nulls);
const std_namespace = mod.namespacePtr(mod.declPtr(std_file.root_decl.unwrap().?).src_namespace);
const builtin_decl = std_namespace.decls.getKeyAdapted(builtin_str, Module.DeclAdapter{ .zcu = mod }).?;
- const stack_trace_str = try mod.intern_pool.getOrPutString(mod.gpa, "StackTrace");
+ const stack_trace_str = try mod.intern_pool.getOrPutString(mod.gpa, "StackTrace", .no_embedded_nulls);
// buffer is only used for int_type, `builtin` is a struct.
const builtin_ty = mod.declPtr(builtin_decl).val.toType();
const builtin_namespace = mod.namespacePtrUnwrap(builtin_ty.getNamespaceIndex(mod)).?;
@@ -2892,10 +2886,10 @@ pub const Object = struct {
const is_extern = decl.isExtern(zcu);
const function_index = try o.builder.addFunction(
try o.lowerType(zig_fn_type),
- try o.builder.strtabString(ip.stringToSlice(if (is_extern)
+ try o.builder.strtabString((if (is_extern)
decl.name
else
- try decl.fullyQualifiedName(zcu))),
+ try decl.fullyQualifiedName(zcu)).toSlice(ip)),
toLlvmAddressSpace(decl.@"addrspace", target),
);
gop.value_ptr.* = function_index.ptrConst(&o.builder).global;
@@ -2910,9 +2904,9 @@ pub const Object = struct {
if (target.isWasm()) {
try attributes.addFnAttr(.{ .string = .{
.kind = try o.builder.string("wasm-import-name"),
- .value = try o.builder.string(ip.stringToSlice(decl.name)),
+ .value = try o.builder.string(decl.name.toSlice(ip)),
} }, &o.builder);
- if (ip.stringToSliceUnwrap(decl.getOwnedExternFunc(zcu).?.lib_name)) |lib_name| {
+ if (decl.getOwnedExternFunc(zcu).?.lib_name.toSlice(ip)) |lib_name| {
if (!std.mem.eql(u8, lib_name, "c")) try attributes.addFnAttr(.{ .string = .{
.kind = try o.builder.string("wasm-import-module"),
.value = try o.builder.string(lib_name),
@@ -3108,9 +3102,10 @@ pub const Object = struct {
const is_extern = decl.isExtern(mod);
const variable_index = try o.builder.addVariable(
- try o.builder.strtabString(mod.intern_pool.stringToSlice(
- if (is_extern) decl.name else try decl.fullyQualifiedName(mod),
- )),
+ try o.builder.strtabString((if (is_extern)
+ decl.name
+ else
+ try decl.fullyQualifiedName(mod)).toSlice(&mod.intern_pool)),
try o.lowerType(decl.typeOf(mod)),
toLlvmGlobalAddressSpace(decl.@"addrspace", mod.getTarget()),
);
@@ -3258,7 +3253,7 @@ pub const Object = struct {
};
},
.array_type => |array_type| o.builder.arrayType(
- array_type.len + @intFromBool(array_type.sentinel != .none),
+ array_type.lenIncludingSentinel(),
try o.lowerType(Type.fromInterned(array_type.child)),
),
.vector_type => |vector_type| o.builder.vectorType(
@@ -3335,9 +3330,7 @@ pub const Object = struct {
return int_ty;
}
- const name = try o.builder.string(ip.stringToSlice(
- try mod.declPtr(struct_type.decl.unwrap().?).fullyQualifiedName(mod),
- ));
+ const fqn = try mod.declPtr(struct_type.decl.unwrap().?).fullyQualifiedName(mod);
var llvm_field_types = std.ArrayListUnmanaged(Builder.Type){};
defer llvm_field_types.deinit(o.gpa);
@@ -3402,7 +3395,7 @@ pub const Object = struct {
);
}
- const ty = try o.builder.opaqueType(name);
+ const ty = try o.builder.opaqueType(try o.builder.string(fqn.toSlice(ip)));
try o.type_map.put(o.gpa, t.toIntern(), ty);
o.builder.namedTypeSetBody(
@@ -3491,9 +3484,7 @@ pub const Object = struct {
return enum_tag_ty;
}
- const name = try o.builder.string(ip.stringToSlice(
- try mod.declPtr(union_obj.decl).fullyQualifiedName(mod),
- ));
+ const fqn = try mod.declPtr(union_obj.decl).fullyQualifiedName(mod);
const aligned_field_ty = Type.fromInterned(union_obj.field_types.get(ip)[layout.most_aligned_field]);
const aligned_field_llvm_ty = try o.lowerType(aligned_field_ty);
@@ -3513,7 +3504,7 @@ pub const Object = struct {
};
if (layout.tag_size == 0) {
- const ty = try o.builder.opaqueType(name);
+ const ty = try o.builder.opaqueType(try o.builder.string(fqn.toSlice(ip)));
try o.type_map.put(o.gpa, t.toIntern(), ty);
o.builder.namedTypeSetBody(
@@ -3541,7 +3532,7 @@ pub const Object = struct {
llvm_fields_len += 1;
}
- const ty = try o.builder.opaqueType(name);
+ const ty = try o.builder.opaqueType(try o.builder.string(fqn.toSlice(ip)));
try o.type_map.put(o.gpa, t.toIntern(), ty);
o.builder.namedTypeSetBody(
@@ -3554,8 +3545,8 @@ pub const Object = struct {
const gop = try o.type_map.getOrPut(o.gpa, t.toIntern());
if (!gop.found_existing) {
const decl = mod.declPtr(ip.loadOpaqueType(t.toIntern()).decl);
- const name = try o.builder.string(ip.stringToSlice(try decl.fullyQualifiedName(mod)));
- gop.value_ptr.* = try o.builder.opaqueType(name);
+ const fqn = try decl.fullyQualifiedName(mod);
+ gop.value_ptr.* = try o.builder.opaqueType(try o.builder.string(fqn.toSlice(ip)));
}
return gop.value_ptr.*;
},
@@ -3859,7 +3850,9 @@ pub const Object = struct {
},
.aggregate => |aggregate| switch (ip.indexToKey(ty.toIntern())) {
.array_type => |array_type| switch (aggregate.storage) {
- .bytes => |bytes| try o.builder.stringConst(try o.builder.string(bytes)),
+ .bytes => |bytes| try o.builder.stringConst(try o.builder.string(
+ bytes.toSlice(array_type.lenIncludingSentinel(), ip),
+ )),
.elems => |elems| {
const array_ty = try o.lowerType(ty);
const elem_ty = array_ty.childType(&o.builder);
@@ -3892,8 +3885,7 @@ pub const Object = struct {
},
.repeated_elem => |elem| {
const len: usize = @intCast(array_type.len);
- const len_including_sentinel: usize =
- @intCast(len + @intFromBool(array_type.sentinel != .none));
+ const len_including_sentinel: usize = @intCast(array_type.lenIncludingSentinel());
const array_ty = try o.lowerType(ty);
const elem_ty = array_ty.childType(&o.builder);
@@ -3942,7 +3934,7 @@ pub const Object = struct {
defer allocator.free(vals);
switch (aggregate.storage) {
- .bytes => |bytes| for (vals, bytes) |*result_val, byte| {
+ .bytes => |bytes| for (vals, bytes.toSlice(vector_type.len, ip)) |*result_val, byte| {
result_val.* = try o.builder.intConst(.i8, byte);
},
.elems => |elems| for (vals, elems) |*result_val, elem| {
@@ -4633,7 +4625,7 @@ pub const Object = struct {
defer wip_switch.finish(&wip);
for (0..enum_type.names.len) |field_index| {
- const name = try o.builder.stringNull(ip.stringToSlice(enum_type.names.get(ip)[field_index]));
+ const name = try o.builder.stringNull(enum_type.names.get(ip)[field_index].toSlice(ip));
const name_init = try o.builder.stringConst(name);
const name_variable_index =
try o.builder.addVariable(.empty, name_init.typeOf(&o.builder), .default);
@@ -4693,6 +4685,7 @@ pub const DeclGen = struct {
fn genDecl(dg: *DeclGen) !void {
const o = dg.object;
const zcu = o.module;
+ const ip = &zcu.intern_pool;
const decl = dg.decl;
const decl_index = dg.decl_index;
assert(decl.has_tv);
@@ -4705,7 +4698,7 @@ pub const DeclGen = struct {
decl.getAlignment(zcu).toLlvm(),
&o.builder,
);
- if (zcu.intern_pool.stringToSliceUnwrap(decl.@"linksection")) |section|
+ if (decl.@"linksection".toSlice(ip)) |section|
variable_index.setSection(try o.builder.string(section), &o.builder);
assert(decl.has_tv);
const init_val = if (decl.val.getVariable(zcu)) |decl_var| decl_var.init else init_val: {
@@ -4728,7 +4721,7 @@ pub const DeclGen = struct {
const debug_file = try o.getDebugFile(namespace.file_scope);
const debug_global_var = try o.builder.debugGlobalVar(
- try o.builder.metadataString(zcu.intern_pool.stringToSlice(decl.name)), // Name
+ try o.builder.metadataString(decl.name.toSlice(ip)), // Name
try o.builder.metadataStringFromStrtabString(variable_index.name(&o.builder)), // Linkage name
debug_file, // File
debug_file, // Scope
@@ -5156,8 +5149,8 @@ pub const FuncGen = struct {
self.scope = try o.builder.debugSubprogram(
self.file,
- try o.builder.metadataString(zcu.intern_pool.stringToSlice(decl.name)),
- try o.builder.metadataString(zcu.intern_pool.stringToSlice(fqn)),
+ try o.builder.metadataString(decl.name.toSlice(&zcu.intern_pool)),
+ try o.builder.metadataString(fqn.toSlice(&zcu.intern_pool)),
line_number,
line_number + func.lbrace_line,
try o.lowerDebugType(fn_ty),
diff --git a/src/codegen/spirv.zig b/src/codegen/spirv.zig
index 2570343763..53ec59d531 100644
--- a/src/codegen/spirv.zig
+++ b/src/codegen/spirv.zig
@@ -1028,39 +1028,30 @@ const DeclGen = struct {
inline .array_type, .vector_type => |array_type, tag| {
const elem_ty = Type.fromInterned(array_type.child);
- const constituents = try self.gpa.alloc(IdRef, @as(u32, @intCast(ty.arrayLenIncludingSentinel(mod))));
+ const constituents = try self.gpa.alloc(IdRef, @intCast(ty.arrayLenIncludingSentinel(mod)));
defer self.gpa.free(constituents);
switch (aggregate.storage) {
.bytes => |bytes| {
// TODO: This is really space inefficient, perhaps there is a better
// way to do it?
- for (bytes, 0..) |byte, i| {
- constituents[i] = try self.constInt(elem_ty, byte, .indirect);
+ for (constituents, bytes.toSlice(constituents.len, ip)) |*constituent, byte| {
+ constituent.* = try self.constInt(elem_ty, byte, .indirect);
}
},
.elems => |elems| {
- for (0..@as(usize, @intCast(array_type.len))) |i| {
- constituents[i] = try self.constant(elem_ty, Value.fromInterned(elems[i]), .indirect);
+ for (constituents, elems) |*constituent, elem| {
+ constituent.* = try self.constant(elem_ty, Value.fromInterned(elem), .indirect);
}
},
.repeated_elem => |elem| {
- const val_id = try self.constant(elem_ty, Value.fromInterned(elem), .indirect);
- for (0..@as(usize, @intCast(array_type.len))) |i| {
- constituents[i] = val_id;
- }
+ @memset(constituents, try self.constant(elem_ty, Value.fromInterned(elem), .indirect));
},
}
switch (tag) {
- inline .array_type => {
- if (array_type.sentinel != .none) {
- const sentinel = Value.fromInterned(array_type.sentinel);
- constituents[constituents.len - 1] = try self.constant(elem_ty, sentinel, .indirect);
- }
- return self.constructArray(ty, constituents);
- },
- inline .vector_type => return self.constructVector(ty, constituents),
+ .array_type => return self.constructArray(ty, constituents),
+ .vector_type => return self.constructVector(ty, constituents),
else => unreachable,
}
},
@@ -1683,9 +1674,9 @@ const DeclGen = struct {
}
const field_name = struct_type.fieldName(ip, field_index).unwrap() orelse
- try ip.getOrPutStringFmt(mod.gpa, "{d}", .{field_index});
+ try ip.getOrPutStringFmt(mod.gpa, "{d}", .{field_index}, .no_embedded_nulls);
try member_types.append(try self.resolveType(field_ty, .indirect));
- try member_names.append(ip.stringToSlice(field_name));
+ try member_names.append(field_name.toSlice(ip));
}
const result_id = try self.spv.structType(member_types.items, member_names.items);
@@ -2123,12 +2114,12 @@ const DeclGen = struct {
// Append the actual code into the functions section.
try self.spv.addFunction(spv_decl_index, self.func);
- const fqn = ip.stringToSlice(try decl.fullyQualifiedName(self.module));
- try self.spv.debugName(result_id, fqn);
+ const fqn = try decl.fullyQualifiedName(self.module);
+ try self.spv.debugName(result_id, fqn.toSlice(ip));
// Temporarily generate a test kernel declaration if this is a test function.
if (self.module.test_functions.contains(self.decl_index)) {
- try self.generateTestEntryPoint(fqn, spv_decl_index);
+ try self.generateTestEntryPoint(fqn.toSlice(ip), spv_decl_index);
}
},
.global => {
@@ -2152,8 +2143,8 @@ const DeclGen = struct {
.storage_class = final_storage_class,
});
- const fqn = ip.stringToSlice(try decl.fullyQualifiedName(self.module));
- try self.spv.debugName(result_id, fqn);
+ const fqn = try decl.fullyQualifiedName(self.module);
+ try self.spv.debugName(result_id, fqn.toSlice(ip));
try self.spv.declareDeclDeps(spv_decl_index, &.{});
},
.invocation_global => {
@@ -2197,8 +2188,8 @@ const DeclGen = struct {
try self.func.body.emit(self.spv.gpa, .OpFunctionEnd, {});
try self.spv.addFunction(spv_decl_index, self.func);
- const fqn = ip.stringToSlice(try decl.fullyQualifiedName(self.module));
- try self.spv.debugNameFmt(initializer_id, "initializer of {s}", .{fqn});
+ const fqn = try decl.fullyQualifiedName(self.module);
+ try self.spv.debugNameFmt(initializer_id, "initializer of {}", .{fqn.fmt(ip)});
try self.spv.sections.types_globals_constants.emit(self.spv.gpa, .OpExtInst, .{
.id_result_type = ptr_ty_id,
diff --git a/src/link/Coff.zig b/src/link/Coff.zig
index aaf840e02c..413294489a 100644
--- a/src/link/Coff.zig
+++ b/src/link/Coff.zig
@@ -1176,9 +1176,9 @@ pub fn lowerUnnamedConst(self: *Coff, val: Value, decl_index: InternPool.DeclInd
gop.value_ptr.* = .{};
}
const unnamed_consts = gop.value_ptr;
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
const index = unnamed_consts.items.len;
- const sym_name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
+ const sym_name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{ decl_name.fmt(&mod.intern_pool), index });
defer gpa.free(sym_name);
const ty = val.typeOf(mod);
const atom_index = switch (try self.lowerConst(sym_name, val, ty.abiAlignment(mod), self.rdata_section_index.?, decl.srcLoc(mod))) {
@@ -1257,8 +1257,8 @@ pub fn updateDecl(
if (decl.isExtern(mod)) {
// TODO make this part of getGlobalSymbol
const variable = decl.getOwnedVariable(mod).?;
- const name = mod.intern_pool.stringToSlice(decl.name);
- const lib_name = mod.intern_pool.stringToSliceUnwrap(variable.lib_name);
+ const name = decl.name.toSlice(&mod.intern_pool);
+ const lib_name = variable.lib_name.toSlice(&mod.intern_pool);
const global_index = try self.getGlobalSymbol(name, lib_name);
try self.need_got_table.put(gpa, global_index, {});
return;
@@ -1425,9 +1425,9 @@ fn updateDeclCode(self: *Coff, decl_index: InternPool.DeclIndex, code: []u8, com
const mod = self.base.comp.module.?;
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
- log.debug("updateDeclCode {s}{*}", .{ decl_name, decl });
+ log.debug("updateDeclCode {}{*}", .{ decl_name.fmt(&mod.intern_pool), decl });
const required_alignment: u32 = @intCast(decl.getAlignment(mod).toByteUnits() orelse 0);
const decl_metadata = self.decls.get(decl_index).?;
@@ -1439,7 +1439,7 @@ fn updateDeclCode(self: *Coff, decl_index: InternPool.DeclIndex, code: []u8, com
if (atom.size != 0) {
const sym = atom.getSymbolPtr(self);
- try self.setSymbolName(sym, decl_name);
+ try self.setSymbolName(sym, decl_name.toSlice(&mod.intern_pool));
sym.section_number = @as(coff.SectionNumber, @enumFromInt(sect_index + 1));
sym.type = .{ .complex_type = complex_type, .base_type = .NULL };
@@ -1447,7 +1447,7 @@ fn updateDeclCode(self: *Coff, decl_index: InternPool.DeclIndex, code: []u8, com
const need_realloc = code.len > capacity or !mem.isAlignedGeneric(u64, sym.value, required_alignment);
if (need_realloc) {
const vaddr = try self.growAtom(atom_index, code_len, required_alignment);
- log.debug("growing {s} from 0x{x} to 0x{x}", .{ decl_name, sym.value, vaddr });
+ log.debug("growing {} from 0x{x} to 0x{x}", .{ decl_name.fmt(&mod.intern_pool), sym.value, vaddr });
log.debug(" (required alignment 0x{x}", .{required_alignment});
if (vaddr != sym.value) {
@@ -1463,13 +1463,13 @@ fn updateDeclCode(self: *Coff, decl_index: InternPool.DeclIndex, code: []u8, com
self.getAtomPtr(atom_index).size = code_len;
} else {
const sym = atom.getSymbolPtr(self);
- try self.setSymbolName(sym, decl_name);
+ try self.setSymbolName(sym, decl_name.toSlice(&mod.intern_pool));
sym.section_number = @as(coff.SectionNumber, @enumFromInt(sect_index + 1));
sym.type = .{ .complex_type = complex_type, .base_type = .NULL };
const vaddr = try self.allocateAtom(atom_index, code_len, required_alignment);
errdefer self.freeAtom(atom_index);
- log.debug("allocated atom for {s} at 0x{x}", .{ decl_name, vaddr });
+ log.debug("allocated atom for {} at 0x{x}", .{ decl_name.fmt(&mod.intern_pool), vaddr });
self.getAtomPtr(atom_index).size = code_len;
sym.value = vaddr;
@@ -1534,20 +1534,18 @@ pub fn updateExports(
else => std.builtin.CallingConvention.C,
};
const decl_cc = exported_decl.typeOf(mod).fnCallingConvention(mod);
- if (decl_cc == .C and ip.stringEqlSlice(exp.opts.name, "main") and
- comp.config.link_libc)
- {
+ if (decl_cc == .C and exp.opts.name.eqlSlice("main", ip) and comp.config.link_libc) {
mod.stage1_flags.have_c_main = true;
} else if (decl_cc == winapi_cc and target.os.tag == .windows) {
- if (ip.stringEqlSlice(exp.opts.name, "WinMain")) {
+ if (exp.opts.name.eqlSlice("WinMain", ip)) {
mod.stage1_flags.have_winmain = true;
- } else if (ip.stringEqlSlice(exp.opts.name, "wWinMain")) {
+ } else if (exp.opts.name.eqlSlice("wWinMain", ip)) {
mod.stage1_flags.have_wwinmain = true;
- } else if (ip.stringEqlSlice(exp.opts.name, "WinMainCRTStartup")) {
+ } else if (exp.opts.name.eqlSlice("WinMainCRTStartup", ip)) {
mod.stage1_flags.have_winmain_crt_startup = true;
- } else if (ip.stringEqlSlice(exp.opts.name, "wWinMainCRTStartup")) {
+ } else if (exp.opts.name.eqlSlice("wWinMainCRTStartup", ip)) {
mod.stage1_flags.have_wwinmain_crt_startup = true;
- } else if (ip.stringEqlSlice(exp.opts.name, "DllMainCRTStartup")) {
+ } else if (exp.opts.name.eqlSlice("DllMainCRTStartup", ip)) {
mod.stage1_flags.have_dllmain_crt_startup = true;
}
}
@@ -1585,7 +1583,7 @@ pub fn updateExports(
for (exports) |exp| {
log.debug("adding new export '{}'", .{exp.opts.name.fmt(&mod.intern_pool)});
- if (mod.intern_pool.stringToSliceUnwrap(exp.opts.section)) |section_name| {
+ if (exp.opts.section.toSlice(&mod.intern_pool)) |section_name| {
if (!mem.eql(u8, section_name, ".text")) {
try mod.failed_exports.putNoClobber(gpa, exp, try Module.ErrorMsg.create(
gpa,
@@ -1607,7 +1605,7 @@ pub fn updateExports(
continue;
}
- const exp_name = mod.intern_pool.stringToSlice(exp.opts.name);
+ const exp_name = exp.opts.name.toSlice(&mod.intern_pool);
const sym_index = metadata.getExport(self, exp_name) orelse blk: {
const sym_index = if (self.getGlobalIndex(exp_name)) |global_index| ind: {
const global = self.globals.items[global_index];
@@ -1646,18 +1644,18 @@ pub fn updateExports(
pub fn deleteDeclExport(
self: *Coff,
decl_index: InternPool.DeclIndex,
- name_ip: InternPool.NullTerminatedString,
+ name: InternPool.NullTerminatedString,
) void {
if (self.llvm_object) |_| return;
const metadata = self.decls.getPtr(decl_index) orelse return;
const mod = self.base.comp.module.?;
- const name = mod.intern_pool.stringToSlice(name_ip);
- const sym_index = metadata.getExportPtr(self, name) orelse return;
+ const name_slice = name.toSlice(&mod.intern_pool);
+ const sym_index = metadata.getExportPtr(self, name_slice) orelse return;
const gpa = self.base.comp.gpa;
const sym_loc = SymbolWithLoc{ .sym_index = sym_index.*, .file = null };
const sym = self.getSymbolPtr(sym_loc);
- log.debug("deleting export '{s}'", .{name});
+ log.debug("deleting export '{}'", .{name.fmt(&mod.intern_pool)});
assert(sym.storage_class == .EXTERNAL and sym.section_number != .UNDEFINED);
sym.* = .{
.name = [_]u8{0} ** 8,
@@ -1669,7 +1667,7 @@ pub fn deleteDeclExport(
};
self.locals_free_list.append(gpa, sym_index.*) catch {};
- if (self.resolver.fetchRemove(name)) |entry| {
+ if (self.resolver.fetchRemove(name_slice)) |entry| {
defer gpa.free(entry.key);
self.globals_free_list.append(gpa, entry.value) catch {};
self.globals.items[entry.value] = .{
diff --git a/src/link/Dwarf.zig b/src/link/Dwarf.zig
index 6a41bb3793..5f82c924c7 100644
--- a/src/link/Dwarf.zig
+++ b/src/link/Dwarf.zig
@@ -339,15 +339,14 @@ pub const DeclState = struct {
struct_type.field_names.get(ip),
struct_type.field_types.get(ip),
struct_type.offsets.get(ip),
- ) |field_name_ip, field_ty, field_off| {
+ ) |field_name, field_ty, field_off| {
if (!Type.fromInterned(field_ty).hasRuntimeBits(mod)) continue;
- const field_name = ip.stringToSlice(field_name_ip);
+ const field_name_slice = field_name.toSlice(ip);
// DW.AT.member
- try dbg_info_buffer.ensureUnusedCapacity(field_name.len + 2);
+ try dbg_info_buffer.ensureUnusedCapacity(field_name_slice.len + 2);
dbg_info_buffer.appendAssumeCapacity(@intFromEnum(AbbrevCode.struct_member));
// DW.AT.name, DW.FORM.string
- dbg_info_buffer.appendSliceAssumeCapacity(field_name);
- dbg_info_buffer.appendAssumeCapacity(0);
+ dbg_info_buffer.appendSliceAssumeCapacity(field_name_slice[0 .. field_name_slice.len + 1]);
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.appendNTimes(0, 4);
@@ -374,14 +373,13 @@ pub const DeclState = struct {
try dbg_info_buffer.append(0);
const enum_type = ip.loadEnumType(ty.ip_index);
- for (enum_type.names.get(ip), 0..) |field_name_index, field_i| {
- const field_name = ip.stringToSlice(field_name_index);
+ for (enum_type.names.get(ip), 0..) |field_name, field_i| {
+ const field_name_slice = field_name.toSlice(ip);
// DW.AT.enumerator
- try dbg_info_buffer.ensureUnusedCapacity(field_name.len + 2 + @sizeOf(u64));
+ try dbg_info_buffer.ensureUnusedCapacity(field_name_slice.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(@intFromEnum(AbbrevCode.enum_variant));
// DW.AT.name, DW.FORM.string
- dbg_info_buffer.appendSliceAssumeCapacity(field_name);
- dbg_info_buffer.appendAssumeCapacity(0);
+ dbg_info_buffer.appendSliceAssumeCapacity(field_name_slice[0 .. field_name_slice.len + 1]);
// DW.AT.const_value, DW.FORM.data8
const value: u64 = value: {
if (enum_type.values.len == 0) break :value field_i; // auto-numbered
@@ -443,11 +441,11 @@ pub const DeclState = struct {
for (union_obj.field_types.get(ip), union_obj.loadTagType(ip).names.get(ip)) |field_ty, field_name| {
if (!Type.fromInterned(field_ty).hasRuntimeBits(mod)) continue;
+ const field_name_slice = field_name.toSlice(ip);
// DW.AT.member
try dbg_info_buffer.append(@intFromEnum(AbbrevCode.struct_member));
// DW.AT.name, DW.FORM.string
- try dbg_info_buffer.appendSlice(ip.stringToSlice(field_name));
- try dbg_info_buffer.append(0);
+ try dbg_info_buffer.appendSlice(field_name_slice[0 .. field_name_slice.len + 1]);
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.appendNTimes(0, 4);
@@ -1155,8 +1153,8 @@ pub fn initDeclState(self: *Dwarf, mod: *Module, decl_index: InternPool.DeclInde
dbg_line_buffer.appendAssumeCapacity(DW.LNS.copy);
// .debug_info subprogram
- const decl_name_slice = mod.intern_pool.stringToSlice(decl.name);
- const decl_linkage_name_slice = mod.intern_pool.stringToSlice(decl_linkage_name);
+ const decl_name_slice = decl.name.toSlice(&mod.intern_pool);
+ const decl_linkage_name_slice = decl_linkage_name.toSlice(&mod.intern_pool);
try dbg_info_buffer.ensureUnusedCapacity(1 + ptr_width_bytes + 4 + 4 +
(decl_name_slice.len + 1) + (decl_linkage_name_slice.len + 1));
@@ -2866,15 +2864,14 @@ fn addDbgInfoErrorSetNames(
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), 0, target_endian);
- for (error_names) |error_name_ip| {
- const int = try mod.getErrorValue(error_name_ip);
- const error_name = mod.intern_pool.stringToSlice(error_name_ip);
+ for (error_names) |error_name| {
+ const int = try mod.getErrorValue(error_name);
+ const error_name_slice = error_name.toSlice(&mod.intern_pool);
// DW.AT.enumerator
- try dbg_info_buffer.ensureUnusedCapacity(error_name.len + 2 + @sizeOf(u64));
+ try dbg_info_buffer.ensureUnusedCapacity(error_name_slice.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(@intFromEnum(AbbrevCode.enum_variant));
// DW.AT.name, DW.FORM.string
- dbg_info_buffer.appendSliceAssumeCapacity(error_name);
- dbg_info_buffer.appendAssumeCapacity(0);
+ dbg_info_buffer.appendSliceAssumeCapacity(error_name_slice[0 .. error_name_slice.len + 1]);
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), int, target_endian);
}
diff --git a/src/link/Elf.zig b/src/link/Elf.zig
index c20a4b6afa..03451d140e 100644
--- a/src/link/Elf.zig
+++ b/src/link/Elf.zig
@@ -1517,7 +1517,7 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
if (self.base.isStatic()) {
try argv.append("-static");
- } else if (self.base.isDynLib()) {
+ } else if (self.isEffectivelyDynLib()) {
try argv.append("-shared");
}
@@ -1997,7 +1997,7 @@ fn markImportsExports(self: *Elf) void {
}
if (file_ptr.index() == file_index) {
global.flags.@"export" = true;
- if (elf_file.base.isDynLib() and vis != .PROTECTED) {
+ if (elf_file.isEffectivelyDynLib() and vis != .PROTECTED) {
global.flags.import = true;
}
}
@@ -2005,7 +2005,7 @@ fn markImportsExports(self: *Elf) void {
}
}.mark;
- if (!self.base.isDynLib()) {
+ if (!self.isEffectivelyDynLib()) {
for (self.shared_objects.items) |index| {
for (self.file(index).?.globals()) |global_index| {
const global = self.symbol(global_index);
@@ -2469,7 +2469,10 @@ fn linkWithLLD(self: *Elf, arena: Allocator, prog_node: *std.Progress.Node) !voi
} else {
try argv.append("-static");
}
- } else if (is_dyn_lib) {
+ } else if (switch (target.os.tag) {
+ else => is_dyn_lib,
+ .haiku => is_exe_or_dyn_lib,
+ }) {
try argv.append("-shared");
}
@@ -2925,7 +2928,7 @@ pub fn writeElfHeader(self: *Elf) !void {
const output_mode = comp.config.output_mode;
const link_mode = comp.config.link_mode;
const elf_type: elf.ET = switch (output_mode) {
- .Exe => if (comp.config.pie) .DYN else .EXEC,
+ .Exe => if (comp.config.pie or target.os.tag == .haiku) .DYN else .EXEC,
.Obj => .REL,
.Lib => switch (link_mode) {
.static => @as(elf.ET, .REL),
@@ -3114,7 +3117,7 @@ fn addLinkerDefinedSymbols(self: *Elf) !void {
}
}
- if (self.getTarget().cpu.arch == .riscv64 and self.base.isDynLib()) {
+ if (self.getTarget().cpu.arch == .riscv64 and self.isEffectivelyDynLib()) {
self.global_pointer_index = try linker_defined.addGlobal("__global_pointer$", self);
}
@@ -3420,7 +3423,7 @@ fn initSyntheticSections(self: *Elf) !void {
});
}
- if (self.base.isDynLib() or self.shared_objects.items.len > 0 or comp.config.pie) {
+ if (self.isEffectivelyDynLib() or self.shared_objects.items.len > 0 or comp.config.pie) {
self.dynstrtab_section_index = try self.addSection(.{
.name = ".dynstr",
.flags = elf.SHF_ALLOC,
@@ -3657,7 +3660,7 @@ fn setDynamicSection(self: *Elf, rpaths: []const []const u8) !void {
try self.dynamic.addNeeded(shared_object, self);
}
- if (self.base.isDynLib()) {
+ if (self.isEffectivelyDynLib()) {
if (self.soname) |soname| {
try self.dynamic.setSoname(soname, self);
}
@@ -5246,6 +5249,16 @@ const CsuObjects = struct {
}
};
+/// If a target compiles other output modes as dynamic libraries,
+/// this function returns true for those too.
+pub fn isEffectivelyDynLib(self: Elf) bool {
+ if (self.base.isDynLib()) return true;
+ return switch (self.getTarget().os.tag) {
+ .haiku => self.base.isExe(),
+ else => false,
+ };
+}
+
pub fn isZigSection(self: Elf, shndx: u32) bool {
inline for (&[_]?u32{
self.zig_text_section_index,
diff --git a/src/link/Elf/Atom.zig b/src/link/Elf/Atom.zig
index 3db1182696..b5ceeb24b8 100644
--- a/src/link/Elf/Atom.zig
+++ b/src/link/Elf/Atom.zig
@@ -1054,7 +1054,7 @@ const x86_64 = struct {
it: *RelocsIterator,
) !void {
const is_static = elf_file.base.isStatic();
- const is_dyn_lib = elf_file.base.isDynLib();
+ const is_dyn_lib = elf_file.isEffectivelyDynLib();
const r_type: elf.R_X86_64 = @enumFromInt(rel.r_type());
const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow;
@@ -1599,7 +1599,7 @@ const aarch64 = struct {
_ = it;
const r_type: elf.R_AARCH64 = @enumFromInt(rel.r_type());
- const is_dyn_lib = elf_file.base.isDynLib();
+ const is_dyn_lib = elf_file.isEffectivelyDynLib();
switch (r_type) {
.ABS64 => {
diff --git a/src/link/Elf/Object.zig b/src/link/Elf/Object.zig
index cc135f2f97..d483540aa6 100644
--- a/src/link/Elf/Object.zig
+++ b/src/link/Elf/Object.zig
@@ -568,7 +568,7 @@ pub fn claimUnresolved(self: *Object, elf_file: *Elf) void {
}
const is_import = blk: {
- if (!elf_file.base.isDynLib()) break :blk false;
+ if (!elf_file.isEffectivelyDynLib()) break :blk false;
const vis = @as(elf.STV, @enumFromInt(esym.st_other));
if (vis == .HIDDEN) break :blk false;
break :blk true;
diff --git a/src/link/Elf/Symbol.zig b/src/link/Elf/Symbol.zig
index 0ddf19cd05..9db17f2f4f 100644
--- a/src/link/Elf/Symbol.zig
+++ b/src/link/Elf/Symbol.zig
@@ -105,6 +105,29 @@ pub fn address(symbol: Symbol, opts: struct { plt: bool = true }, elf_file: *Elf
return symbol.pltAddress(elf_file);
}
if (symbol.atom(elf_file)) |atom_ptr| {
+ if (!atom_ptr.flags.alive) {
+ if (mem.eql(u8, atom_ptr.name(elf_file), ".eh_frame")) {
+ const sym_name = symbol.name(elf_file);
+ if (mem.startsWith(u8, sym_name, "__EH_FRAME_BEGIN__") or
+ mem.startsWith(u8, sym_name, "__EH_FRAME_LIST__") or
+ mem.startsWith(u8, sym_name, ".eh_frame_seg") or
+ symbol.elfSym(elf_file).st_type() == elf.STT_SECTION)
+ {
+ return elf_file.shdrs.items[elf_file.eh_frame_section_index.?].sh_addr;
+ }
+
+ if (mem.startsWith(u8, sym_name, "__FRAME_END__") or
+ mem.startsWith(u8, sym_name, "__EH_FRAME_LIST_END__"))
+ {
+ const shdr = elf_file.shdrs.items[elf_file.eh_frame_section_index.?];
+ return shdr.sh_addr + shdr.sh_size;
+ }
+
+ // TODO I think we potentially should error here
+ }
+
+ return 0;
+ }
return atom_ptr.address(elf_file) + symbol.value;
}
return symbol.value;
@@ -432,6 +455,7 @@ pub const Index = u32;
const assert = std.debug.assert;
const elf = std.elf;
+const mem = std.mem;
const std = @import("std");
const synthetic_sections = @import("synthetic_sections.zig");
diff --git a/src/link/Elf/ZigObject.zig b/src/link/Elf/ZigObject.zig
index 6aede441c8..f65ef43eac 100644
--- a/src/link/Elf/ZigObject.zig
+++ b/src/link/Elf/ZigObject.zig
@@ -367,7 +367,7 @@ pub fn claimUnresolved(self: ZigObject, elf_file: *Elf) void {
}
const is_import = blk: {
- if (!elf_file.base.isDynLib()) break :blk false;
+ if (!elf_file.isEffectivelyDynLib()) break :blk false;
const vis = @as(elf.STV, @enumFromInt(esym.st_other));
if (vis == .HIDDEN) break :blk false;
break :blk true;
@@ -902,9 +902,9 @@ fn updateDeclCode(
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
- log.debug("updateDeclCode {s}{*}", .{ decl_name, decl });
+ log.debug("updateDeclCode {}{*}", .{ decl_name.fmt(&mod.intern_pool), decl });
const required_alignment = decl.getAlignment(mod);
@@ -915,7 +915,7 @@ fn updateDeclCode(
sym.output_section_index = shdr_index;
atom_ptr.output_section_index = shdr_index;
- sym.name_offset = try self.strtab.insert(gpa, decl_name);
+ sym.name_offset = try self.strtab.insert(gpa, decl_name.toSlice(&mod.intern_pool));
atom_ptr.flags.alive = true;
atom_ptr.name_offset = sym.name_offset;
esym.st_name = sym.name_offset;
@@ -932,7 +932,7 @@ fn updateDeclCode(
const need_realloc = code.len > capacity or !required_alignment.check(atom_ptr.value);
if (need_realloc) {
try atom_ptr.grow(elf_file);
- log.debug("growing {s} from 0x{x} to 0x{x}", .{ decl_name, old_vaddr, atom_ptr.value });
+ log.debug("growing {} from 0x{x} to 0x{x}", .{ decl_name.fmt(&mod.intern_pool), old_vaddr, atom_ptr.value });
if (old_vaddr != atom_ptr.value) {
sym.value = 0;
esym.st_value = 0;
@@ -1000,9 +1000,9 @@ fn updateTlv(
const gpa = elf_file.base.comp.gpa;
const mod = elf_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
- log.debug("updateTlv {s} ({*})", .{ decl_name, decl });
+ log.debug("updateTlv {} ({*})", .{ decl_name.fmt(&mod.intern_pool), decl });
const required_alignment = decl.getAlignment(mod);
@@ -1014,7 +1014,7 @@ fn updateTlv(
sym.output_section_index = shndx;
atom_ptr.output_section_index = shndx;
- sym.name_offset = try self.strtab.insert(gpa, decl_name);
+ sym.name_offset = try self.strtab.insert(gpa, decl_name.toSlice(&mod.intern_pool));
atom_ptr.flags.alive = true;
atom_ptr.name_offset = sym.name_offset;
esym.st_value = 0;
@@ -1136,8 +1136,8 @@ pub fn updateDecl(
if (decl.isExtern(mod)) {
// Extern variable gets a .got entry only.
const variable = decl.getOwnedVariable(mod).?;
- const name = mod.intern_pool.stringToSlice(decl.name);
- const lib_name = mod.intern_pool.stringToSliceUnwrap(variable.lib_name);
+ const name = decl.name.toSlice(&mod.intern_pool);
+ const lib_name = variable.lib_name.toSlice(&mod.intern_pool);
const esym_index = try self.getGlobalSymbol(elf_file, name, lib_name);
elf_file.symbol(self.symbol(esym_index)).flags.needs_got = true;
return;
@@ -1293,9 +1293,9 @@ pub fn lowerUnnamedConst(
}
const unnamed_consts = gop.value_ptr;
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
const index = unnamed_consts.items.len;
- const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
+ const name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{ decl_name.fmt(&mod.intern_pool), index });
defer gpa.free(name);
const ty = val.typeOf(mod);
const sym_index = switch (try self.lowerConst(
@@ -1418,7 +1418,7 @@ pub fn updateExports(
for (exports) |exp| {
if (exp.opts.section.unwrap()) |section_name| {
- if (!mod.intern_pool.stringEqlSlice(section_name, ".text")) {
+ if (!section_name.eqlSlice(".text", &mod.intern_pool)) {
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
mod.failed_exports.putAssumeCapacityNoClobber(exp, try Module.ErrorMsg.create(
gpa,
@@ -1445,7 +1445,7 @@ pub fn updateExports(
},
};
const stt_bits: u8 = @as(u4, @truncate(esym.st_info));
- const exp_name = mod.intern_pool.stringToSlice(exp.opts.name);
+ const exp_name = exp.opts.name.toSlice(&mod.intern_pool);
const name_off = try self.strtab.insert(gpa, exp_name);
const global_esym_index = if (metadata.@"export"(self, exp_name)) |exp_index|
exp_index.*
@@ -1476,9 +1476,9 @@ pub fn updateDeclLineNumber(
defer tracy.end();
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
- log.debug("updateDeclLineNumber {s}{*}", .{ decl_name, decl });
+ log.debug("updateDeclLineNumber {}{*}", .{ decl_name.fmt(&mod.intern_pool), decl });
if (self.dwarf) |*dw| {
try dw.updateDeclLineNumber(mod, decl_index);
@@ -1493,7 +1493,7 @@ pub fn deleteDeclExport(
) void {
const metadata = self.decls.getPtr(decl_index) orelse return;
const mod = elf_file.base.comp.module.?;
- const exp_name = mod.intern_pool.stringToSlice(name);
+ const exp_name = name.toSlice(&mod.intern_pool);
const esym_index = metadata.@"export"(self, exp_name) orelse return;
log.debug("deleting export '{s}'", .{exp_name});
const esym = &self.global_esyms.items(.elf_sym)[esym_index.*];
diff --git a/src/link/Elf/synthetic_sections.zig b/src/link/Elf/synthetic_sections.zig
index 0e7cb90545..2ef7d49540 100644
--- a/src/link/Elf/synthetic_sections.zig
+++ b/src/link/Elf/synthetic_sections.zig
@@ -89,7 +89,7 @@ pub const DynamicSection = struct {
if (elf_file.verneed_section_index != null) nentries += 2; // VERNEED
if (dt.getFlags(elf_file) != null) nentries += 1; // FLAGS
if (dt.getFlags1(elf_file) != null) nentries += 1; // FLAGS_1
- if (!elf_file.base.isDynLib()) nentries += 1; // DEBUG
+ if (!elf_file.isEffectivelyDynLib()) nentries += 1; // DEBUG
nentries += 1; // NULL
return nentries * @sizeOf(elf.Elf64_Dyn);
}
@@ -216,7 +216,7 @@ pub const DynamicSection = struct {
}
// DEBUG
- if (!elf_file.base.isDynLib()) try writer.writeStruct(elf.Elf64_Dyn{ .d_tag = elf.DT_DEBUG, .d_val = 0 });
+ if (!elf_file.isEffectivelyDynLib()) try writer.writeStruct(elf.Elf64_Dyn{ .d_tag = elf.DT_DEBUG, .d_val = 0 });
// NULL
try writer.writeStruct(elf.Elf64_Dyn{ .d_tag = elf.DT_NULL, .d_val = 0 });
@@ -256,7 +256,7 @@ pub const ZigGotSection = struct {
entry.* = sym_index;
const symbol = elf_file.symbol(sym_index);
symbol.flags.has_zig_got = true;
- if (elf_file.base.isDynLib() or (elf_file.base.isExe() and comp.config.pie)) {
+ if (elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) {
zig_got.flags.needs_rela = true;
}
if (symbol.extra(elf_file)) |extra| {
@@ -495,7 +495,7 @@ pub const GotSection = struct {
const symbol = elf_file.symbol(sym_index);
symbol.flags.has_got = true;
if (symbol.flags.import or symbol.isIFunc(elf_file) or
- ((elf_file.base.isDynLib() or (elf_file.base.isExe() and comp.config.pie)) and !symbol.isAbs(elf_file)))
+ ((elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) and !symbol.isAbs(elf_file)))
{
got.flags.needs_rela = true;
}
@@ -528,7 +528,7 @@ pub const GotSection = struct {
entry.symbol_index = sym_index;
const symbol = elf_file.symbol(sym_index);
symbol.flags.has_tlsgd = true;
- if (symbol.flags.import or elf_file.base.isDynLib()) got.flags.needs_rela = true;
+ if (symbol.flags.import or elf_file.isEffectivelyDynLib()) got.flags.needs_rela = true;
if (symbol.extra(elf_file)) |extra| {
var new_extra = extra;
new_extra.tlsgd = index;
@@ -545,7 +545,7 @@ pub const GotSection = struct {
entry.symbol_index = sym_index;
const symbol = elf_file.symbol(sym_index);
symbol.flags.has_gottp = true;
- if (symbol.flags.import or elf_file.base.isDynLib()) got.flags.needs_rela = true;
+ if (symbol.flags.import or elf_file.isEffectivelyDynLib()) got.flags.needs_rela = true;
if (symbol.extra(elf_file)) |extra| {
var new_extra = extra;
new_extra.gottp = index;
@@ -580,7 +580,7 @@ pub const GotSection = struct {
pub fn write(got: GotSection, elf_file: *Elf, writer: anytype) !void {
const comp = elf_file.base.comp;
- const is_dyn_lib = elf_file.base.isDynLib();
+ const is_dyn_lib = elf_file.isEffectivelyDynLib();
const apply_relocs = true; // TODO add user option for this
for (got.entries.items) |entry| {
@@ -595,7 +595,7 @@ pub const GotSection = struct {
if (symbol.?.flags.import) break :blk 0;
if (symbol.?.isIFunc(elf_file))
break :blk if (apply_relocs) value else 0;
- if ((elf_file.base.isDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
+ if ((elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
!symbol.?.isAbs(elf_file))
{
break :blk if (apply_relocs) value else 0;
@@ -653,7 +653,7 @@ pub const GotSection = struct {
pub fn addRela(got: GotSection, elf_file: *Elf) !void {
const comp = elf_file.base.comp;
const gpa = comp.gpa;
- const is_dyn_lib = elf_file.base.isDynLib();
+ const is_dyn_lib = elf_file.isEffectivelyDynLib();
const cpu_arch = elf_file.getTarget().cpu.arch;
try elf_file.rela_dyn.ensureUnusedCapacity(gpa, got.numRela(elf_file));
@@ -683,7 +683,7 @@ pub const GotSection = struct {
});
continue;
}
- if ((elf_file.base.isDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
+ if ((elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
!symbol.?.isAbs(elf_file))
{
elf_file.addRelaDynAssumeCapacity(.{
@@ -758,7 +758,7 @@ pub const GotSection = struct {
pub fn numRela(got: GotSection, elf_file: *Elf) usize {
const comp = elf_file.base.comp;
- const is_dyn_lib = elf_file.base.isDynLib();
+ const is_dyn_lib = elf_file.isEffectivelyDynLib();
var num: usize = 0;
for (got.entries.items) |entry| {
const symbol = switch (entry.tag) {
@@ -767,7 +767,7 @@ pub const GotSection = struct {
};
switch (entry.tag) {
.got => if (symbol.?.flags.import or symbol.?.isIFunc(elf_file) or
- ((elf_file.base.isDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
+ ((elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
!symbol.?.isAbs(elf_file)))
{
num += 1;
diff --git a/src/link/MachO/ZigObject.zig b/src/link/MachO/ZigObject.zig
index fb27c96525..bb788fcacc 100644
--- a/src/link/MachO/ZigObject.zig
+++ b/src/link/MachO/ZigObject.zig
@@ -716,8 +716,8 @@ pub fn updateDecl(
if (decl.isExtern(mod)) {
// Extern variable gets a __got entry only
const variable = decl.getOwnedVariable(mod).?;
- const name = mod.intern_pool.stringToSlice(decl.name);
- const lib_name = mod.intern_pool.stringToSliceUnwrap(variable.lib_name);
+ const name = decl.name.toSlice(&mod.intern_pool);
+ const lib_name = variable.lib_name.toSlice(&mod.intern_pool);
const index = try self.getGlobalSymbol(macho_file, name, lib_name);
const actual_index = self.symbols.items[index];
macho_file.getSymbol(actual_index).flags.needs_got = true;
@@ -786,9 +786,9 @@ fn updateDeclCode(
const gpa = macho_file.base.comp.gpa;
const mod = macho_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
- log.debug("updateDeclCode {s}{*}", .{ decl_name, decl });
+ log.debug("updateDeclCode {}{*}", .{ decl_name.fmt(&mod.intern_pool), decl });
const required_alignment = decl.getAlignment(mod);
@@ -800,7 +800,7 @@ fn updateDeclCode(
sym.out_n_sect = sect_index;
atom.out_n_sect = sect_index;
- sym.name = try self.strtab.insert(gpa, decl_name);
+ sym.name = try self.strtab.insert(gpa, decl_name.toSlice(&mod.intern_pool));
atom.flags.alive = true;
atom.name = sym.name;
nlist.n_strx = sym.name;
@@ -819,7 +819,7 @@ fn updateDeclCode(
if (need_realloc) {
try atom.grow(macho_file);
- log.debug("growing {s} from 0x{x} to 0x{x}", .{ decl_name, old_vaddr, atom.value });
+ log.debug("growing {} from 0x{x} to 0x{x}", .{ decl_name.fmt(&mod.intern_pool), old_vaddr, atom.value });
if (old_vaddr != atom.value) {
sym.value = 0;
nlist.n_value = 0;
@@ -870,23 +870,24 @@ fn updateTlv(
) !void {
const mod = macho_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
- log.debug("updateTlv {s} ({*})", .{ decl_name, decl });
+ log.debug("updateTlv {} ({*})", .{ decl_name.fmt(&mod.intern_pool), decl });
+ const decl_name_slice = decl_name.toSlice(&mod.intern_pool);
const required_alignment = decl.getAlignment(mod);
// 1. Lower TLV initializer
const init_sym_index = try self.createTlvInitializer(
macho_file,
- decl_name,
+ decl_name_slice,
required_alignment,
sect_index,
code,
);
// 2. Create TLV descriptor
- try self.createTlvDescriptor(macho_file, sym_index, init_sym_index, decl_name);
+ try self.createTlvDescriptor(macho_file, sym_index, init_sym_index, decl_name_slice);
}
fn createTlvInitializer(
@@ -1073,9 +1074,9 @@ pub fn lowerUnnamedConst(
}
const unnamed_consts = gop.value_ptr;
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
const index = unnamed_consts.items.len;
- const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
+ const name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{ decl_name.fmt(&mod.intern_pool), index });
defer gpa.free(name);
const sym_index = switch (try self.lowerConst(
macho_file,
@@ -1199,7 +1200,7 @@ pub fn updateExports(
for (exports) |exp| {
if (exp.opts.section.unwrap()) |section_name| {
- if (!mod.intern_pool.stringEqlSlice(section_name, "__text")) {
+ if (!section_name.eqlSlice("__text", &mod.intern_pool)) {
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
mod.failed_exports.putAssumeCapacityNoClobber(exp, try Module.ErrorMsg.create(
gpa,
@@ -1220,7 +1221,7 @@ pub fn updateExports(
continue;
}
- const exp_name = mod.intern_pool.stringToSlice(exp.opts.name);
+ const exp_name = exp.opts.name.toSlice(&mod.intern_pool);
const global_nlist_index = if (metadata.@"export"(self, exp_name)) |exp_index|
exp_index.*
else blk: {
@@ -1349,13 +1350,12 @@ pub fn deleteDeclExport(
decl_index: InternPool.DeclIndex,
name: InternPool.NullTerminatedString,
) void {
- const metadata = self.decls.getPtr(decl_index) orelse return;
-
const mod = macho_file.base.comp.module.?;
- const exp_name = mod.intern_pool.stringToSlice(name);
- const nlist_index = metadata.@"export"(self, exp_name) orelse return;
- log.debug("deleting export '{s}'", .{exp_name});
+ const metadata = self.decls.getPtr(decl_index) orelse return;
+ const nlist_index = metadata.@"export"(self, name.toSlice(&mod.intern_pool)) orelse return;
+
+ log.debug("deleting export '{}'", .{name.fmt(&mod.intern_pool)});
const nlist = &self.symtab.items(.nlist)[nlist_index.*];
self.symtab.items(.size)[nlist_index.*] = 0;
diff --git a/src/link/Plan9.zig b/src/link/Plan9.zig
index d5900d2b16..323cc8c4a9 100644
--- a/src/link/Plan9.zig
+++ b/src/link/Plan9.zig
@@ -477,11 +477,11 @@ pub fn lowerUnnamedConst(self: *Plan9, val: Value, decl_index: InternPool.DeclIn
}
const unnamed_consts = gop.value_ptr;
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
const index = unnamed_consts.items.len;
// name is freed when the unnamed const is freed
- const name = try std.fmt.allocPrint(gpa, "__unnamed_{s}_{d}", .{ decl_name, index });
+ const name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{ decl_name.fmt(&mod.intern_pool), index });
const sym_index = try self.allocateSymbolIndex();
const new_atom_idx = try self.createAtom();
@@ -529,7 +529,7 @@ pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: InternPool.DeclIndex)
const decl = mod.declPtr(decl_index);
if (decl.isExtern(mod)) {
- log.debug("found extern decl: {s}", .{mod.intern_pool.stringToSlice(decl.name)});
+ log.debug("found extern decl: {}", .{decl.name.fmt(&mod.intern_pool)});
return;
}
const atom_idx = try self.seeDecl(decl_index);
@@ -573,7 +573,7 @@ fn updateFinish(self: *Plan9, decl_index: InternPool.DeclIndex) !void {
const sym: aout.Sym = .{
.value = undefined, // the value of stuff gets filled in in flushModule
.type = atom.type,
- .name = try gpa.dupe(u8, mod.intern_pool.stringToSlice(decl.name)),
+ .name = try gpa.dupe(u8, decl.name.toSlice(&mod.intern_pool)),
};
if (atom.sym_index) |s| {
@@ -1013,10 +1013,12 @@ fn addDeclExports(
const atom = self.getAtom(metadata.index);
for (exports) |exp| {
- const exp_name = mod.intern_pool.stringToSlice(exp.opts.name);
+ const exp_name = exp.opts.name.toSlice(&mod.intern_pool);
// plan9 does not support custom sections
if (exp.opts.section.unwrap()) |section_name| {
- if (!mod.intern_pool.stringEqlSlice(section_name, ".text") and !mod.intern_pool.stringEqlSlice(section_name, ".data")) {
+ if (!section_name.eqlSlice(".text", &mod.intern_pool) and
+ !section_name.eqlSlice(".data", &mod.intern_pool))
+ {
try mod.failed_exports.put(mod.gpa, exp, try Module.ErrorMsg.create(
gpa,
mod.declPtr(decl_index).srcLoc(mod),
@@ -1129,19 +1131,21 @@ pub fn seeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) !Atom.Index {
// handle externs here because they might not get updateDecl called on them
const mod = self.base.comp.module.?;
const decl = mod.declPtr(decl_index);
- const name = mod.intern_pool.stringToSlice(decl.name);
if (decl.isExtern(mod)) {
// this is a "phantom atom" - it is never actually written to disk, just convenient for us to store stuff about externs
- if (std.mem.eql(u8, name, "etext")) {
+ if (decl.name.eqlSlice("etext", &mod.intern_pool)) {
self.etext_edata_end_atom_indices[0] = atom_idx;
- } else if (std.mem.eql(u8, name, "edata")) {
+ } else if (decl.name.eqlSlice("edata", &mod.intern_pool)) {
self.etext_edata_end_atom_indices[1] = atom_idx;
- } else if (std.mem.eql(u8, name, "end")) {
+ } else if (decl.name.eqlSlice("end", &mod.intern_pool)) {
self.etext_edata_end_atom_indices[2] = atom_idx;
}
try self.updateFinish(decl_index);
- log.debug("seeDecl(extern) for {s} (got_addr=0x{x})", .{ name, self.getAtom(atom_idx).getOffsetTableAddress(self) });
- } else log.debug("seeDecl for {s}", .{name});
+ log.debug("seeDecl(extern) for {} (got_addr=0x{x})", .{
+ decl.name.fmt(&mod.intern_pool),
+ self.getAtom(atom_idx).getOffsetTableAddress(self),
+ });
+ } else log.debug("seeDecl for {}", .{decl.name.fmt(&mod.intern_pool)});
return atom_idx;
}
@@ -1393,7 +1397,7 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
const sym = self.syms.items[atom.sym_index.?];
try self.writeSym(writer, sym);
if (self.base.comp.module.?.decl_exports.get(decl_index)) |exports| {
- for (exports.items) |e| if (decl_metadata.getExport(self, ip.stringToSlice(e.opts.name))) |exp_i| {
+ for (exports.items) |e| if (decl_metadata.getExport(self, e.opts.name.toSlice(ip))) |exp_i| {
try self.writeSym(writer, self.syms.items[exp_i]);
};
}
@@ -1440,7 +1444,7 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
const sym = self.syms.items[atom.sym_index.?];
try self.writeSym(writer, sym);
if (self.base.comp.module.?.decl_exports.get(decl_index)) |exports| {
- for (exports.items) |e| if (decl_metadata.getExport(self, ip.stringToSlice(e.opts.name))) |exp_i| {
+ for (exports.items) |e| if (decl_metadata.getExport(self, e.opts.name.toSlice(ip))) |exp_i| {
const s = self.syms.items[exp_i];
if (mem.eql(u8, s.name, "_start"))
self.entry_val = s.value;
@@ -1483,25 +1487,25 @@ pub fn getDeclVAddr(
reloc_info: link.File.RelocInfo,
) !u64 {
const mod = self.base.comp.module.?;
+ const ip = &mod.intern_pool;
const decl = mod.declPtr(decl_index);
- log.debug("getDeclVAddr for {s}", .{mod.intern_pool.stringToSlice(decl.name)});
+ log.debug("getDeclVAddr for {}", .{decl.name.fmt(ip)});
if (decl.isExtern(mod)) {
- const extern_name = mod.intern_pool.stringToSlice(decl.name);
- if (std.mem.eql(u8, extern_name, "etext")) {
+ if (decl.name.eqlSlice("etext", ip)) {
try self.addReloc(reloc_info.parent_atom_index, .{
.target = undefined,
.offset = reloc_info.offset,
.addend = reloc_info.addend,
.type = .special_etext,
});
- } else if (std.mem.eql(u8, extern_name, "edata")) {
+ } else if (decl.name.eqlSlice("edata", ip)) {
try self.addReloc(reloc_info.parent_atom_index, .{
.target = undefined,
.offset = reloc_info.offset,
.addend = reloc_info.addend,
.type = .special_edata,
});
- } else if (std.mem.eql(u8, extern_name, "end")) {
+ } else if (decl.name.eqlSlice("end", ip)) {
try self.addReloc(reloc_info.parent_atom_index, .{
.target = undefined,
.offset = reloc_info.offset,
diff --git a/src/link/SpirV.zig b/src/link/SpirV.zig
index 950e0375f0..27c905cc61 100644
--- a/src/link/SpirV.zig
+++ b/src/link/SpirV.zig
@@ -130,7 +130,7 @@ pub fn updateFunc(self: *SpirV, module: *Module, func_index: InternPool.Index, a
const func = module.funcInfo(func_index);
const decl = module.declPtr(func.owner_decl);
- log.debug("lowering function {s}", .{module.intern_pool.stringToSlice(decl.name)});
+ log.debug("lowering function {}", .{decl.name.fmt(&module.intern_pool)});
try self.object.updateFunc(module, func_index, air, liveness);
}
@@ -141,7 +141,7 @@ pub fn updateDecl(self: *SpirV, module: *Module, decl_index: InternPool.DeclInde
}
const decl = module.declPtr(decl_index);
- log.debug("lowering declaration {s}", .{module.intern_pool.stringToSlice(decl.name)});
+ log.debug("lowering declaration {}", .{decl.name.fmt(&module.intern_pool)});
try self.object.updateDecl(module, decl_index);
}
@@ -178,7 +178,7 @@ pub fn updateExports(
for (exports) |exp| {
try self.object.spv.declareEntryPoint(
spv_decl_index,
- mod.intern_pool.stringToSlice(exp.opts.name),
+ exp.opts.name.toSlice(&mod.intern_pool),
execution_model,
);
}
@@ -227,16 +227,24 @@ pub fn flushModule(self: *SpirV, arena: Allocator, prog_node: *std.Progress.Node
try error_info.appendSlice("zig_errors");
const mod = self.base.comp.module.?;
- for (mod.global_error_set.keys()) |name_nts| {
- const name = mod.intern_pool.stringToSlice(name_nts);
+ for (mod.global_error_set.keys()) |name| {
// Errors can contain pretty much any character - to encode them in a string we must escape
// them somehow. Easiest here is to use some established scheme, one which also preseves the
// name if it contains no strange characters is nice for debugging. URI encoding fits the bill.
// We're using : as separator, which is a reserved character.
- const escaped_name = try std.Uri.escapeString(gpa, name);
- defer gpa.free(escaped_name);
- try error_info.writer().print(":{s}", .{escaped_name});
+ try std.Uri.Component.percentEncode(
+ error_info.writer(),
+ name.toSlice(&mod.intern_pool),
+ struct {
+ fn isValidChar(c: u8) bool {
+ return switch (c) {
+ 0, '%', ':' => false,
+ else => true,
+ };
+ }
+ }.isValidChar,
+ );
}
try spv.sections.debug_strings.emit(gpa, .OpSourceExtension, .{
.extension = error_info.items,
diff --git a/src/link/Wasm/ZigObject.zig b/src/link/Wasm/ZigObject.zig
index 9bf7718d2b..bcd98c3d3c 100644
--- a/src/link/Wasm/ZigObject.zig
+++ b/src/link/Wasm/ZigObject.zig
@@ -258,8 +258,8 @@ pub fn updateDecl(
if (decl.isExtern(mod)) {
const variable = decl.getOwnedVariable(mod).?;
- const name = mod.intern_pool.stringToSlice(decl.name);
- const lib_name = mod.intern_pool.stringToSliceUnwrap(variable.lib_name);
+ const name = decl.name.toSlice(&mod.intern_pool);
+ const lib_name = variable.lib_name.toSlice(&mod.intern_pool);
return zig_object.addOrUpdateImport(wasm_file, name, atom.sym_index, lib_name, null);
}
const val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
@@ -341,8 +341,8 @@ fn finishUpdateDecl(
const atom_index = decl_info.atom;
const atom = wasm_file.getAtomPtr(atom_index);
const sym = zig_object.symbol(atom.sym_index);
- const full_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
- sym.name = try zig_object.string_table.insert(gpa, full_name);
+ const full_name = try decl.fullyQualifiedName(mod);
+ sym.name = try zig_object.string_table.insert(gpa, full_name.toSlice(&mod.intern_pool));
try atom.code.appendSlice(gpa, code);
atom.size = @intCast(code.len);
@@ -382,7 +382,7 @@ fn finishUpdateDecl(
// Will be freed upon freeing of decl or after cleanup of Wasm binary.
const full_segment_name = try std.mem.concat(gpa, u8, &.{
segment_name,
- full_name,
+ full_name.toSlice(&mod.intern_pool),
});
errdefer gpa.free(full_segment_name);
sym.tag = .data;
@@ -427,9 +427,9 @@ pub fn getOrCreateAtomForDecl(zig_object: *ZigObject, wasm_file: *Wasm, decl_ind
gop.value_ptr.* = .{ .atom = try wasm_file.createAtom(sym_index, zig_object.index) };
const mod = wasm_file.base.comp.module.?;
const decl = mod.declPtr(decl_index);
- const full_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const full_name = try decl.fullyQualifiedName(mod);
const sym = zig_object.symbol(sym_index);
- sym.name = try zig_object.string_table.insert(gpa, full_name);
+ sym.name = try zig_object.string_table.insert(gpa, full_name.toSlice(&mod.intern_pool));
}
return gop.value_ptr.atom;
}
@@ -478,9 +478,9 @@ pub fn lowerUnnamedConst(zig_object: *ZigObject, wasm_file: *Wasm, val: Value, d
const parent_atom_index = try zig_object.getOrCreateAtomForDecl(wasm_file, decl_index);
const parent_atom = wasm_file.getAtom(parent_atom_index);
const local_index = parent_atom.locals.items.len;
- const fqn = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
- const name = try std.fmt.allocPrintZ(gpa, "__unnamed_{s}_{d}", .{
- fqn, local_index,
+ const fqn = try decl.fullyQualifiedName(mod);
+ const name = try std.fmt.allocPrintZ(gpa, "__unnamed_{}_{d}", .{
+ fqn.fmt(&mod.intern_pool), local_index,
});
defer gpa.free(name);
@@ -623,11 +623,11 @@ fn populateErrorNameTable(zig_object: *ZigObject, wasm_file: *Wasm) !void {
// Addend for each relocation to the table
var addend: u32 = 0;
const mod = wasm_file.base.comp.module.?;
- for (mod.global_error_set.keys()) |error_name_nts| {
+ for (mod.global_error_set.keys()) |error_name| {
const atom = wasm_file.getAtomPtr(atom_index);
- const error_name = mod.intern_pool.stringToSlice(error_name_nts);
- const len: u32 = @intCast(error_name.len + 1); // names are 0-terminated
+ const error_name_slice = error_name.toSlice(&mod.intern_pool);
+ const len: u32 = @intCast(error_name_slice.len + 1); // names are 0-terminated
const slice_ty = Type.slice_const_u8_sentinel_0;
const offset = @as(u32, @intCast(atom.code.items.len));
@@ -646,10 +646,9 @@ fn populateErrorNameTable(zig_object: *ZigObject, wasm_file: *Wasm) !void {
// as we updated the error name table, we now store the actual name within the names atom
try names_atom.code.ensureUnusedCapacity(gpa, len);
- names_atom.code.appendSliceAssumeCapacity(error_name);
- names_atom.code.appendAssumeCapacity(0);
+ names_atom.code.appendSliceAssumeCapacity(error_name_slice[0..len]);
- log.debug("Populated error name: '{s}'", .{error_name});
+ log.debug("Populated error name: '{}'", .{error_name.fmt(&mod.intern_pool)});
}
names_atom.size = addend;
zig_object.error_names_atom = names_atom_index;
@@ -833,8 +832,7 @@ pub fn deleteDeclExport(
) void {
const mod = wasm_file.base.comp.module.?;
const decl_info = zig_object.decls_map.getPtr(decl_index) orelse return;
- const export_name = mod.intern_pool.stringToSlice(name);
- if (decl_info.@"export"(zig_object, export_name)) |sym_index| {
+ if (decl_info.@"export"(zig_object, name.toSlice(&mod.intern_pool))) |sym_index| {
const sym = zig_object.symbol(sym_index);
decl_info.deleteExport(sym_index);
std.debug.assert(zig_object.global_syms.remove(sym.name));
@@ -864,10 +862,10 @@ pub fn updateExports(
const atom = wasm_file.getAtom(atom_index);
const atom_sym = atom.symbolLoc().getSymbol(wasm_file).*;
const gpa = mod.gpa;
- log.debug("Updating exports for decl '{s}'", .{mod.intern_pool.stringToSlice(decl.name)});
+ log.debug("Updating exports for decl '{}'", .{decl.name.fmt(&mod.intern_pool)});
for (exports) |exp| {
- if (mod.intern_pool.stringToSliceUnwrap(exp.opts.section)) |section| {
+ if (exp.opts.section.toSlice(&mod.intern_pool)) |section| {
try mod.failed_exports.putNoClobber(gpa, exp, try Module.ErrorMsg.create(
gpa,
decl.srcLoc(mod),
@@ -877,10 +875,8 @@ pub fn updateExports(
continue;
}
- const export_string = mod.intern_pool.stringToSlice(exp.opts.name);
- const sym_index = if (decl_info.@"export"(zig_object, export_string)) |idx|
- idx
- else index: {
+ const export_string = exp.opts.name.toSlice(&mod.intern_pool);
+ const sym_index = if (decl_info.@"export"(zig_object, export_string)) |idx| idx else index: {
const sym_index = try zig_object.allocateSymbol(gpa);
try decl_info.appendExport(gpa, sym_index);
break :index sym_index;
@@ -1089,9 +1085,9 @@ pub fn createDebugSectionForIndex(zig_object: *ZigObject, wasm_file: *Wasm, inde
pub fn updateDeclLineNumber(zig_object: *ZigObject, mod: *Module, decl_index: InternPool.DeclIndex) !void {
if (zig_object.dwarf) |*dw| {
const decl = mod.declPtr(decl_index);
- const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
+ const decl_name = try decl.fullyQualifiedName(mod);
- log.debug("updateDeclLineNumber {s}{*}", .{ decl_name, decl });
+ log.debug("updateDeclLineNumber {}{*}", .{ decl_name.fmt(&mod.intern_pool), decl });
try dw.updateDeclLineNumber(mod, decl_index);
}
}
diff --git a/src/mutable_value.zig b/src/mutable_value.zig
index 327c354108..f16a8fd3f9 100644
--- a/src/mutable_value.zig
+++ b/src/mutable_value.zig
@@ -73,7 +73,7 @@ pub const MutableValue = union(enum) {
} }),
.bytes => |b| try ip.get(gpa, .{ .aggregate = .{
.ty = b.ty,
- .storage = .{ .bytes = b.data },
+ .storage = .{ .bytes = try ip.getOrPutString(gpa, b.data, .maybe_embedded_nulls) },
} }),
.aggregate => |a| {
const elems = try arena.alloc(InternPool.Index, a.elems.len);
@@ -158,18 +158,18 @@ pub const MutableValue = union(enum) {
},
.aggregate => |agg| switch (agg.storage) {
.bytes => |bytes| {
- assert(bytes.len == ip.aggregateTypeLenIncludingSentinel(agg.ty));
+ const len: usize = @intCast(ip.aggregateTypeLenIncludingSentinel(agg.ty));
assert(ip.childType(agg.ty) == .u8_type);
if (allow_bytes) {
- const arena_bytes = try arena.alloc(u8, bytes.len);
- @memcpy(arena_bytes, bytes);
+ const arena_bytes = try arena.alloc(u8, len);
+ @memcpy(arena_bytes, bytes.toSlice(len, ip));
mv.* = .{ .bytes = .{
.ty = agg.ty,
.data = arena_bytes,
} };
} else {
- const mut_elems = try arena.alloc(MutableValue, bytes.len);
- for (bytes, mut_elems) |b, *mut_elem| {
+ const mut_elems = try arena.alloc(MutableValue, len);
+ for (bytes.toSlice(len, ip), mut_elems) |b, *mut_elem| {
mut_elem.* = .{ .interned = try ip.get(gpa, .{ .int = .{
.ty = .u8_type,
.storage = .{ .u64 = b },
diff --git a/src/print_value.zig b/src/print_value.zig
index 21a322cd63..c021b011c1 100644
--- a/src/print_value.zig
+++ b/src/print_value.zig
@@ -204,26 +204,35 @@ fn printAggregate(
try writer.writeAll(" }");
return;
},
- .Array => if (aggregate.storage == .bytes and aggregate.storage.bytes.len > 0) {
- const skip_terminator = aggregate.storage.bytes[aggregate.storage.bytes.len - 1] == 0;
- const bytes = if (skip_terminator) b: {
- break :b aggregate.storage.bytes[0 .. aggregate.storage.bytes.len - 1];
- } else aggregate.storage.bytes;
- try writer.print("\"{}\"", .{std.zig.fmtEscapes(bytes)});
- if (!is_ref) try writer.writeAll(".*");
- return;
- } else if (ty.arrayLen(zcu) == 0) {
- if (is_ref) try writer.writeByte('&');
- return writer.writeAll(".{}");
- } else if (ty.arrayLen(zcu) == 1) one_byte_str: {
- // The repr isn't `bytes`, but we might still be able to print this as a string
- if (ty.childType(zcu).toIntern() != .u8_type) break :one_byte_str;
- const elem_val = Value.fromInterned(aggregate.storage.values()[0]);
- if (elem_val.isUndef(zcu)) break :one_byte_str;
- const byte = elem_val.toUnsignedInt(zcu);
- try writer.print("\"{}\"", .{std.zig.fmtEscapes(&.{@intCast(byte)})});
- if (!is_ref) try writer.writeAll(".*");
- return;
+ .Array => {
+ switch (aggregate.storage) {
+ .bytes => |bytes| string: {
+ const len = ty.arrayLenIncludingSentinel(zcu);
+ if (len == 0) break :string;
+ const slice = bytes.toSlice(if (bytes.at(len - 1, ip) == 0) len - 1 else len, ip);
+ try writer.print("\"{}\"", .{std.zig.fmtEscapes(slice)});
+ if (!is_ref) try writer.writeAll(".*");
+ return;
+ },
+ .elems, .repeated_elem => {},
+ }
+ switch (ty.arrayLen(zcu)) {
+ 0 => {
+ if (is_ref) try writer.writeByte('&');
+ return writer.writeAll(".{}");
+ },
+ 1 => one_byte_str: {
+ // The repr isn't `bytes`, but we might still be able to print this as a string
+ if (ty.childType(zcu).toIntern() != .u8_type) break :one_byte_str;
+ const elem_val = Value.fromInterned(aggregate.storage.values()[0]);
+ if (elem_val.isUndef(zcu)) break :one_byte_str;
+ const byte = elem_val.toUnsignedInt(zcu);
+ try writer.print("\"{}\"", .{std.zig.fmtEscapes(&.{@intCast(byte)})});
+ if (!is_ref) try writer.writeAll(".*");
+ return;
+ },
+ else => {},
+ }
},
.Vector => if (ty.arrayLen(zcu) == 0) {
if (is_ref) try writer.writeByte('&');
diff --git a/src/target.zig b/src/target.zig
index fa782075c7..3ad36deab2 100644
--- a/src/target.zig
+++ b/src/target.zig
@@ -59,10 +59,15 @@ pub fn alwaysSingleThreaded(target: std.Target) bool {
}
pub fn defaultSingleThreaded(target: std.Target) bool {
- return switch (target.cpu.arch) {
- .wasm32, .wasm64 => true,
- else => false,
- };
+ switch (target.cpu.arch) {
+ .wasm32, .wasm64 => return true,
+ else => {},
+ }
+ switch (target.os.tag) {
+ .haiku => return true,
+ else => {},
+ }
+ return false;
}
/// Valgrind supports more, but Zig does not support them yet.
diff --git a/src/translate_c.zig b/src/translate_c.zig
index 16c2060163..bf6b8ec1d8 100644
--- a/src/translate_c.zig
+++ b/src/translate_c.zig
@@ -2086,6 +2086,11 @@ fn finishBoolExpr(
}
},
.Pointer => {
+ if (node.tag() == .string_literal) {
+ // @intFromPtr(node) != 0
+ const int_from_ptr = try Tag.int_from_ptr.create(c.arena, node);
+ return Tag.not_equal.create(c.arena, .{ .lhs = int_from_ptr, .rhs = Tag.zero_literal.init() });
+ }
// node != null
return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.null_literal.init() });
},
@@ -5793,7 +5798,12 @@ fn macroIntToBool(c: *Context, node: Node) !Node {
if (isBoolRes(node)) {
return node;
}
-
+ if (node.tag() == .string_literal) {
+ // @intFromPtr(node) != 0
+ const int_from_ptr = try Tag.int_from_ptr.create(c.arena, node);
+ return Tag.not_equal.create(c.arena, .{ .lhs = int_from_ptr, .rhs = Tag.zero_literal.init() });
+ }
+ // node != 0
return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.zero_literal.init() });
}
diff --git a/src/type.zig b/src/type.zig
index 8352552463..264125c6d0 100644
--- a/src/type.zig
+++ b/src/type.zig
@@ -490,18 +490,10 @@ pub const Type = struct {
};
},
.anyframe_type => true,
- .array_type => |array_type| {
- if (array_type.sentinel != .none) {
- return Type.fromInterned(array_type.child).hasRuntimeBitsAdvanced(mod, ignore_comptime_only, strat);
- } else {
- return array_type.len > 0 and
- try Type.fromInterned(array_type.child).hasRuntimeBitsAdvanced(mod, ignore_comptime_only, strat);
- }
- },
- .vector_type => |vector_type| {
- return vector_type.len > 0 and
- try Type.fromInterned(vector_type.child).hasRuntimeBitsAdvanced(mod, ignore_comptime_only, strat);
- },
+ .array_type => |array_type| return array_type.lenIncludingSentinel() > 0 and
+ try Type.fromInterned(array_type.child).hasRuntimeBitsAdvanced(mod, ignore_comptime_only, strat),
+ .vector_type => |vector_type| return vector_type.len > 0 and
+ try Type.fromInterned(vector_type.child).hasRuntimeBitsAdvanced(mod, ignore_comptime_only, strat),
.opt_type => |child| {
const child_ty = Type.fromInterned(child);
if (child_ty.isNoReturn(mod)) {
@@ -1240,7 +1232,7 @@ pub const Type = struct {
.anyframe_type => return AbiSizeAdvanced{ .scalar = @divExact(target.ptrBitWidth(), 8) },
.array_type => |array_type| {
- const len = array_type.len + @intFromBool(array_type.sentinel != .none);
+ const len = array_type.lenIncludingSentinel();
if (len == 0) return .{ .scalar = 0 };
switch (try Type.fromInterned(array_type.child).abiSizeAdvanced(mod, strat)) {
.scalar => |elem_size| return .{ .scalar = len * elem_size },
@@ -1577,7 +1569,7 @@ pub const Type = struct {
.anyframe_type => return target.ptrBitWidth(),
.array_type => |array_type| {
- const len = array_type.len + @intFromBool(array_type.sentinel != .none);
+ const len = array_type.lenIncludingSentinel();
if (len == 0) return 0;
const elem_ty = Type.fromInterned(array_type.child);
const elem_size = @max(
@@ -1731,7 +1723,7 @@ pub const Type = struct {
.struct_type => ip.loadStructType(ty.toIntern()).haveLayout(ip),
.union_type => ip.loadUnionType(ty.toIntern()).haveLayout(ip),
.array_type => |array_type| {
- if ((array_type.len + @intFromBool(array_type.sentinel != .none)) == 0) return true;
+ if (array_type.lenIncludingSentinel() == 0) return true;
return Type.fromInterned(array_type.child).layoutIsResolved(mod);
},
.opt_type => |child| Type.fromInterned(child).layoutIsResolved(mod),