aboutsummaryrefslogtreecommitdiff
path: root/src/link
diff options
context:
space:
mode:
Diffstat (limited to 'src/link')
-rw-r--r--src/link/C.zig242
-rw-r--r--src/link/Coff.zig400
-rw-r--r--src/link/Dwarf.zig1039
-rw-r--r--src/link/Elf.zig40
-rw-r--r--src/link/Elf/ZigObject.zig545
-rw-r--r--src/link/MachO.zig38
-rw-r--r--src/link/MachO/Atom.zig20
-rw-r--r--src/link/MachO/ZigObject.zig554
-rw-r--r--src/link/NvPtx.zig4
-rw-r--r--src/link/Plan9.zig527
-rw-r--r--src/link/SpirV.zig40
-rw-r--r--src/link/Wasm.zig51
-rw-r--r--src/link/Wasm/ZigObject.zig397
13 files changed, 1674 insertions, 2223 deletions
diff --git a/src/link/C.zig b/src/link/C.zig
index 1a6cee068e..e7c8f6a7b0 100644
--- a/src/link/C.zig
+++ b/src/link/C.zig
@@ -19,28 +19,27 @@ const Value = @import("../Value.zig");
const Air = @import("../Air.zig");
const Liveness = @import("../Liveness.zig");
-pub const base_tag: link.File.Tag = .c;
pub const zig_h = "#include \"zig.h\"\n";
base: link.File,
/// This linker backend does not try to incrementally link output C source code.
/// Instead, it tracks all declarations in this table, and iterates over it
/// in the flush function, stitching pre-rendered pieces of C code together.
-decl_table: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, DeclBlock) = .{},
+navs: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, AvBlock) = .{},
/// All the string bytes of rendered C code, all squished into one array.
/// While in progress, a separate buffer is used, and then when finished, the
/// buffer is copied into this one.
string_bytes: std.ArrayListUnmanaged(u8) = .{},
/// Tracks all the anonymous decls that are used by all the decls so they can
/// be rendered during flush().
-anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, DeclBlock) = .{},
-/// Sparse set of anon decls that are overaligned. Underaligned anon decls are
+uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, AvBlock) = .{},
+/// Sparse set of uavs that are overaligned. Underaligned anon decls are
/// lowered the same as ABI-aligned anon decls. The keys here are a subset of
-/// the keys of `anon_decls`.
-aligned_anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment) = .{},
+/// the keys of `uavs`.
+aligned_uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment) = .{},
-exported_decls: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, ExportedBlock) = .{},
-exported_values: std.AutoArrayHashMapUnmanaged(InternPool.Index, ExportedBlock) = .{},
+exported_navs: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, ExportedBlock) = .{},
+exported_uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, ExportedBlock) = .{},
/// Optimization, `updateDecl` reuses this buffer rather than creating a new
/// one with every call.
@@ -67,7 +66,7 @@ const String = extern struct {
};
/// Per-declaration data.
-pub const DeclBlock = struct {
+pub const AvBlock = struct {
code: String = String.empty,
fwd_decl: String = String.empty,
/// Each `Decl` stores a set of used `CType`s. In `flush()`, we iterate
@@ -76,10 +75,10 @@ pub const DeclBlock = struct {
/// May contain string references to ctype_pool
lazy_fns: codegen.LazyFnMap = .{},
- fn deinit(db: *DeclBlock, gpa: Allocator) void {
- db.lazy_fns.deinit(gpa);
- db.ctype_pool.deinit(gpa);
- db.* = undefined;
+ fn deinit(ab: *AvBlock, gpa: Allocator) void {
+ ab.lazy_fns.deinit(gpa);
+ ab.ctype_pool.deinit(gpa);
+ ab.* = undefined;
}
};
@@ -158,16 +157,16 @@ pub fn createEmpty(
pub fn deinit(self: *C) void {
const gpa = self.base.comp.gpa;
- for (self.decl_table.values()) |*db| {
+ for (self.navs.values()) |*db| {
db.deinit(gpa);
}
- self.decl_table.deinit(gpa);
+ self.navs.deinit(gpa);
- for (self.anon_decls.values()) |*db| {
+ for (self.uavs.values()) |*db| {
db.deinit(gpa);
}
- self.anon_decls.deinit(gpa);
- self.aligned_anon_decls.deinit(gpa);
+ self.uavs.deinit(gpa);
+ self.aligned_uavs.deinit(gpa);
self.string_bytes.deinit(gpa);
self.fwd_decl_buf.deinit(gpa);
@@ -194,9 +193,7 @@ pub fn updateFunc(
const zcu = pt.zcu;
const gpa = zcu.gpa;
const func = zcu.funcInfo(func_index);
- const decl_index = func.owner_decl;
- const decl = zcu.declPtr(decl_index);
- const gop = try self.decl_table.getOrPut(gpa, decl_index);
+ const gop = try self.navs.getOrPut(gpa, func.owner_nav);
if (!gop.found_existing) gop.value_ptr.* = .{};
const ctype_pool = &gop.value_ptr.ctype_pool;
const lazy_fns = &gop.value_ptr.lazy_fns;
@@ -208,8 +205,6 @@ pub fn updateFunc(
fwd_decl.clearRetainingCapacity();
code.clearRetainingCapacity();
- const file_scope = zcu.namespacePtr(decl.src_namespace).fileScope(zcu);
-
var function: codegen.Function = .{
.value_map = codegen.CValueMap.init(gpa),
.air = air,
@@ -219,15 +214,15 @@ pub fn updateFunc(
.dg = .{
.gpa = gpa,
.pt = pt,
- .mod = file_scope.mod,
+ .mod = zcu.navFileScope(func.owner_nav).mod,
.error_msg = null,
- .pass = .{ .decl = decl_index },
- .is_naked_fn = decl.typeOf(zcu).fnCallingConvention(zcu) == .Naked,
+ .pass = .{ .nav = func.owner_nav },
+ .is_naked_fn = zcu.navValue(func.owner_nav).typeOf(zcu).fnCallingConvention(zcu) == .Naked,
.fwd_decl = fwd_decl.toManaged(gpa),
.ctype_pool = ctype_pool.*,
.scratch = .{},
- .anon_decl_deps = self.anon_decls,
- .aligned_anon_decls = self.aligned_anon_decls,
+ .uav_deps = self.uavs,
+ .aligned_uavs = self.aligned_uavs,
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
@@ -236,8 +231,8 @@ pub fn updateFunc(
};
function.object.indent_writer = .{ .underlying_writer = function.object.code.writer() };
defer {
- self.anon_decls = function.object.dg.anon_decl_deps;
- self.aligned_anon_decls = function.object.dg.aligned_anon_decls;
+ self.uavs = function.object.dg.uav_deps;
+ self.aligned_uavs = function.object.dg.aligned_uavs;
fwd_decl.* = function.object.dg.fwd_decl.moveToUnmanaged();
ctype_pool.* = function.object.dg.ctype_pool.move();
ctype_pool.freeUnusedCapacity(gpa);
@@ -248,13 +243,10 @@ pub fn updateFunc(
function.deinit();
}
- try zcu.failed_analysis.ensureUnusedCapacity(gpa, 1);
+ try zcu.failed_codegen.ensureUnusedCapacity(gpa, 1);
codegen.genFunc(&function) catch |err| switch (err) {
error.AnalysisFail => {
- zcu.failed_analysis.putAssumeCapacityNoClobber(
- InternPool.AnalUnit.wrap(.{ .decl = decl_index }),
- function.object.dg.error_msg.?,
- );
+ zcu.failed_codegen.putAssumeCapacityNoClobber(func.owner_nav, function.object.dg.error_msg.?);
return;
},
else => |e| return e,
@@ -263,9 +255,9 @@ pub fn updateFunc(
gop.value_ptr.code = try self.addString(function.object.code.items);
}
-fn updateAnonDecl(self: *C, pt: Zcu.PerThread, i: usize) !void {
+fn updateUav(self: *C, pt: Zcu.PerThread, i: usize) !void {
const gpa = self.base.comp.gpa;
- const anon_decl = self.anon_decls.keys()[i];
+ const uav = self.uavs.keys()[i];
const fwd_decl = &self.fwd_decl_buf;
const code = &self.code_buf;
@@ -278,21 +270,21 @@ fn updateAnonDecl(self: *C, pt: Zcu.PerThread, i: usize) !void {
.pt = pt,
.mod = pt.zcu.root_mod,
.error_msg = null,
- .pass = .{ .anon = anon_decl },
+ .pass = .{ .uav = uav },
.is_naked_fn = false,
.fwd_decl = fwd_decl.toManaged(gpa),
.ctype_pool = codegen.CType.Pool.empty,
.scratch = .{},
- .anon_decl_deps = self.anon_decls,
- .aligned_anon_decls = self.aligned_anon_decls,
+ .uav_deps = self.uavs,
+ .aligned_uavs = self.aligned_uavs,
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
};
object.indent_writer = .{ .underlying_writer = object.code.writer() };
defer {
- self.anon_decls = object.dg.anon_decl_deps;
- self.aligned_anon_decls = object.dg.aligned_anon_decls;
+ self.uavs = object.dg.uav_deps;
+ self.aligned_uavs = object.dg.aligned_uavs;
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
object.dg.ctype_pool.deinit(object.dg.gpa);
object.dg.scratch.deinit(gpa);
@@ -300,8 +292,8 @@ fn updateAnonDecl(self: *C, pt: Zcu.PerThread, i: usize) !void {
}
try object.dg.ctype_pool.init(gpa);
- const c_value: codegen.CValue = .{ .constant = Value.fromInterned(anon_decl) };
- const alignment: Alignment = self.aligned_anon_decls.get(anon_decl) orelse .none;
+ const c_value: codegen.CValue = .{ .constant = Value.fromInterned(uav) };
+ const alignment: Alignment = self.aligned_uavs.get(uav) orelse .none;
codegen.genDeclValue(&object, c_value.constant, c_value, alignment, .none) catch |err| switch (err) {
error.AnalysisFail => {
@panic("TODO: C backend AnalysisFail on anonymous decl");
@@ -312,23 +304,22 @@ fn updateAnonDecl(self: *C, pt: Zcu.PerThread, i: usize) !void {
};
object.dg.ctype_pool.freeUnusedCapacity(gpa);
- object.dg.anon_decl_deps.values()[i] = .{
+ object.dg.uav_deps.values()[i] = .{
.code = try self.addString(object.code.items),
.fwd_decl = try self.addString(object.dg.fwd_decl.items),
.ctype_pool = object.dg.ctype_pool.move(),
};
}
-pub fn updateDecl(self: *C, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNav(self: *C, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !void {
const tracy = trace(@src());
defer tracy.end();
const gpa = self.base.comp.gpa;
const zcu = pt.zcu;
- const decl = zcu.declPtr(decl_index);
- const gop = try self.decl_table.getOrPut(gpa, decl_index);
- errdefer _ = self.decl_table.pop();
+ const gop = try self.navs.getOrPut(gpa, nav_index);
+ errdefer _ = self.navs.pop();
if (!gop.found_existing) gop.value_ptr.* = .{};
const ctype_pool = &gop.value_ptr.ctype_pool;
const fwd_decl = &self.fwd_decl_buf;
@@ -338,29 +329,27 @@ pub fn updateDecl(self: *C, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex)
fwd_decl.clearRetainingCapacity();
code.clearRetainingCapacity();
- const file_scope = zcu.namespacePtr(decl.src_namespace).fileScope(zcu);
-
var object: codegen.Object = .{
.dg = .{
.gpa = gpa,
.pt = pt,
- .mod = file_scope.mod,
+ .mod = zcu.navFileScope(nav_index).mod,
.error_msg = null,
- .pass = .{ .decl = decl_index },
+ .pass = .{ .nav = nav_index },
.is_naked_fn = false,
.fwd_decl = fwd_decl.toManaged(gpa),
.ctype_pool = ctype_pool.*,
.scratch = .{},
- .anon_decl_deps = self.anon_decls,
- .aligned_anon_decls = self.aligned_anon_decls,
+ .uav_deps = self.uavs,
+ .aligned_uavs = self.aligned_uavs,
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
};
object.indent_writer = .{ .underlying_writer = object.code.writer() };
defer {
- self.anon_decls = object.dg.anon_decl_deps;
- self.aligned_anon_decls = object.dg.aligned_anon_decls;
+ self.uavs = object.dg.uav_deps;
+ self.aligned_uavs = object.dg.aligned_uavs;
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
ctype_pool.* = object.dg.ctype_pool.move();
ctype_pool.freeUnusedCapacity(gpa);
@@ -368,13 +357,10 @@ pub fn updateDecl(self: *C, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex)
code.* = object.code.moveToUnmanaged();
}
- try zcu.failed_analysis.ensureUnusedCapacity(gpa, 1);
+ try zcu.failed_codegen.ensureUnusedCapacity(gpa, 1);
codegen.genDecl(&object) catch |err| switch (err) {
error.AnalysisFail => {
- zcu.failed_analysis.putAssumeCapacityNoClobber(
- InternPool.AnalUnit.wrap(.{ .decl = decl_index }),
- object.dg.error_msg.?,
- );
+ zcu.failed_codegen.putAssumeCapacityNoClobber(nav_index, object.dg.error_msg.?);
return;
},
else => |e| return e,
@@ -383,12 +369,12 @@ pub fn updateDecl(self: *C, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex)
gop.value_ptr.fwd_decl = try self.addString(object.dg.fwd_decl.items);
}
-pub fn updateDeclLineNumber(self: *C, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNavLineNumber(self: *C, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !void {
// The C backend does not have the ability to fix line numbers without re-generating
// the entire Decl.
_ = self;
_ = pt;
- _ = decl_index;
+ _ = nav_index;
}
pub fn flush(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: std.Progress.Node) !void {
@@ -422,12 +408,13 @@ pub fn flushModule(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node:
const comp = self.base.comp;
const gpa = comp.gpa;
const zcu = self.base.comp.module.?;
+ const ip = &zcu.intern_pool;
const pt: Zcu.PerThread = .{ .zcu = zcu, .tid = tid };
{
var i: usize = 0;
- while (i < self.anon_decls.count()) : (i += 1) {
- try updateAnonDecl(self, pt, i);
+ while (i < self.uavs.count()) : (i += 1) {
+ try self.updateUav(pt, i);
}
}
@@ -484,30 +471,28 @@ pub fn flushModule(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node:
}
}
- for (self.anon_decls.keys(), self.anon_decls.values()) |value, *decl_block| try self.flushDeclBlock(
+ for (self.uavs.keys(), self.uavs.values()) |uav, *av_block| try self.flushAvBlock(
pt,
zcu.root_mod,
&f,
- decl_block,
- self.exported_values.getPtr(value),
+ av_block,
+ self.exported_uavs.getPtr(uav),
export_names,
.none,
);
- for (self.decl_table.keys(), self.decl_table.values()) |decl_index, *decl_block| {
- const decl = zcu.declPtr(decl_index);
- const extern_name = if (decl.isExtern(zcu)) decl.name.toOptional() else .none;
- const mod = zcu.namespacePtr(decl.src_namespace).fileScope(zcu).mod;
- try self.flushDeclBlock(
- pt,
- mod,
- &f,
- decl_block,
- self.exported_decls.getPtr(decl_index),
- export_names,
- extern_name,
- );
- }
+ for (self.navs.keys(), self.navs.values()) |nav, *av_block| try self.flushAvBlock(
+ pt,
+ zcu.navFileScope(nav).mod,
+ &f,
+ av_block,
+ self.exported_navs.getPtr(nav),
+ export_names,
+ if (ip.indexToKey(zcu.navValue(nav).toIntern()) == .@"extern")
+ ip.getNav(nav).name.toOptional()
+ else
+ .none,
+ );
}
{
@@ -516,12 +501,12 @@ pub fn flushModule(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node:
try f.ctype_pool.init(gpa);
try self.flushCTypes(zcu, &f, .flush, &f.lazy_ctype_pool);
- for (self.anon_decls.keys(), self.anon_decls.values()) |anon_decl, decl_block| {
- try self.flushCTypes(zcu, &f, .{ .anon = anon_decl }, &decl_block.ctype_pool);
+ for (self.uavs.keys(), self.uavs.values()) |uav, av_block| {
+ try self.flushCTypes(zcu, &f, .{ .uav = uav }, &av_block.ctype_pool);
}
- for (self.decl_table.keys(), self.decl_table.values()) |decl_index, decl_block| {
- try self.flushCTypes(zcu, &f, .{ .decl = decl_index }, &decl_block.ctype_pool);
+ for (self.navs.keys(), self.navs.values()) |nav, av_block| {
+ try self.flushCTypes(zcu, &f, .{ .nav = nav }, &av_block.ctype_pool);
}
}
@@ -539,26 +524,21 @@ pub fn flushModule(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node:
f.file_size += lazy_fwd_decl_len;
// Now the code.
- try f.all_buffers.ensureUnusedCapacity(gpa, 1 + (self.anon_decls.count() + self.decl_table.count()) * 2);
+ try f.all_buffers.ensureUnusedCapacity(gpa, 1 + (self.uavs.count() + self.navs.count()) * 2);
f.appendBufAssumeCapacity(self.lazy_code_buf.items);
- for (self.anon_decls.keys(), self.anon_decls.values()) |anon_decl, decl_block| f.appendCodeAssumeCapacity(
- if (self.exported_values.contains(anon_decl))
- .default
- else switch (zcu.intern_pool.indexToKey(anon_decl)) {
- .extern_func => .zig_extern,
- .variable => |variable| if (variable.is_extern) .zig_extern else .static,
+ for (self.uavs.keys(), self.uavs.values()) |uav, av_block| f.appendCodeAssumeCapacity(
+ if (self.exported_uavs.contains(uav)) .default else switch (ip.indexToKey(uav)) {
+ .@"extern" => .zig_extern,
else => .static,
},
- self.getString(decl_block.code),
+ self.getString(av_block.code),
);
- for (self.decl_table.keys(), self.decl_table.values()) |decl_index, decl_block| f.appendCodeAssumeCapacity(
- if (self.exported_decls.contains(decl_index))
- .default
- else if (zcu.declPtr(decl_index).isExtern(zcu))
- .zig_extern
- else
- .static,
- self.getString(decl_block.code),
+ for (self.navs.keys(), self.navs.values()) |nav, av_block| f.appendCodeAssumeCapacity(
+ if (self.exported_navs.contains(nav)) .default else switch (ip.indexToKey(zcu.navValue(nav).toIntern())) {
+ .@"extern" => .zig_extern,
+ else => .static,
+ },
+ self.getString(av_block.code),
);
const file = self.base.file.?;
@@ -689,16 +669,16 @@ fn flushErrDecls(self: *C, pt: Zcu.PerThread, ctype_pool: *codegen.CType.Pool) F
.fwd_decl = fwd_decl.toManaged(gpa),
.ctype_pool = ctype_pool.*,
.scratch = .{},
- .anon_decl_deps = self.anon_decls,
- .aligned_anon_decls = self.aligned_anon_decls,
+ .uav_deps = self.uavs,
+ .aligned_uavs = self.aligned_uavs,
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
};
object.indent_writer = .{ .underlying_writer = object.code.writer() };
defer {
- self.anon_decls = object.dg.anon_decl_deps;
- self.aligned_anon_decls = object.dg.aligned_anon_decls;
+ self.uavs = object.dg.uav_deps;
+ self.aligned_uavs = object.dg.aligned_uavs;
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
ctype_pool.* = object.dg.ctype_pool.move();
ctype_pool.freeUnusedCapacity(gpa);
@@ -736,8 +716,8 @@ fn flushLazyFn(
.fwd_decl = fwd_decl.toManaged(gpa),
.ctype_pool = ctype_pool.*,
.scratch = .{},
- .anon_decl_deps = .{},
- .aligned_anon_decls = .{},
+ .uav_deps = .{},
+ .aligned_uavs = .{},
},
.code = code.toManaged(gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
@@ -746,8 +726,8 @@ fn flushLazyFn(
defer {
// If this assert trips just handle the anon_decl_deps the same as
// `updateFunc()` does.
- assert(object.dg.anon_decl_deps.count() == 0);
- assert(object.dg.aligned_anon_decls.count() == 0);
+ assert(object.dg.uav_deps.count() == 0);
+ assert(object.dg.aligned_uavs.count() == 0);
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
ctype_pool.* = object.dg.ctype_pool.move();
ctype_pool.freeUnusedCapacity(gpa);
@@ -781,31 +761,33 @@ fn flushLazyFns(
}
}
-fn flushDeclBlock(
+fn flushAvBlock(
self: *C,
pt: Zcu.PerThread,
mod: *Module,
f: *Flush,
- decl_block: *const DeclBlock,
+ av_block: *const AvBlock,
exported_block: ?*const ExportedBlock,
export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void),
extern_name: InternPool.OptionalNullTerminatedString,
) FlushDeclError!void {
const gpa = self.base.comp.gpa;
- try self.flushLazyFns(pt, mod, f, &decl_block.ctype_pool, decl_block.lazy_fns);
+ try self.flushLazyFns(pt, mod, f, &av_block.ctype_pool, av_block.lazy_fns);
try f.all_buffers.ensureUnusedCapacity(gpa, 1);
// avoid emitting extern decls that are already exported
if (extern_name.unwrap()) |name| if (export_names.contains(name)) return;
f.appendBufAssumeCapacity(self.getString(if (exported_block) |exported|
exported.fwd_decl
else
- decl_block.fwd_decl));
+ av_block.fwd_decl));
}
pub fn flushEmitH(zcu: *Zcu) !void {
const tracy = trace(@src());
defer tracy.end();
+ if (true) return; // emit-h is regressed
+
const emit_h = zcu.emit_h orelse return;
// We collect a list of buffers to write, and write them all at once with pwritev 😎
@@ -854,17 +836,17 @@ pub fn updateExports(
const zcu = pt.zcu;
const gpa = zcu.gpa;
const mod, const pass: codegen.DeclGen.Pass, const decl_block, const exported_block = switch (exported) {
- .decl_index => |decl_index| .{
- zcu.namespacePtr(zcu.declPtr(decl_index).src_namespace).fileScope(zcu).mod,
- .{ .decl = decl_index },
- self.decl_table.getPtr(decl_index).?,
- (try self.exported_decls.getOrPut(gpa, decl_index)).value_ptr,
+ .nav => |nav| .{
+ zcu.navFileScope(nav).mod,
+ .{ .nav = nav },
+ self.navs.getPtr(nav).?,
+ (try self.exported_navs.getOrPut(gpa, nav)).value_ptr,
},
- .value => |value| .{
+ .uav => |uav| .{
zcu.root_mod,
- .{ .anon = value },
- self.anon_decls.getPtr(value).?,
- (try self.exported_values.getOrPut(gpa, value)).value_ptr,
+ .{ .uav = uav },
+ self.uavs.getPtr(uav).?,
+ (try self.exported_uavs.getOrPut(gpa, uav)).value_ptr,
},
};
const ctype_pool = &decl_block.ctype_pool;
@@ -880,12 +862,12 @@ pub fn updateExports(
.fwd_decl = fwd_decl.toManaged(gpa),
.ctype_pool = decl_block.ctype_pool,
.scratch = .{},
- .anon_decl_deps = .{},
- .aligned_anon_decls = .{},
+ .uav_deps = .{},
+ .aligned_uavs = .{},
};
defer {
- assert(dg.anon_decl_deps.count() == 0);
- assert(dg.aligned_anon_decls.count() == 0);
+ assert(dg.uav_deps.count() == 0);
+ assert(dg.aligned_uavs.count() == 0);
fwd_decl.* = dg.fwd_decl.moveToUnmanaged();
ctype_pool.* = dg.ctype_pool.move();
ctype_pool.freeUnusedCapacity(gpa);
@@ -901,7 +883,7 @@ pub fn deleteExport(
_: InternPool.NullTerminatedString,
) void {
switch (exported) {
- .decl_index => |decl_index| _ = self.exported_decls.swapRemove(decl_index),
- .value => |value| _ = self.exported_values.swapRemove(value),
+ .nav => |nav| _ = self.exported_navs.swapRemove(nav),
+ .uav => |uav| _ = self.exported_uavs.swapRemove(uav),
}
}
diff --git a/src/link/Coff.zig b/src/link/Coff.zig
index 54e8504d00..73822dfec8 100644
--- a/src/link/Coff.zig
+++ b/src/link/Coff.zig
@@ -65,8 +65,8 @@ imports_count_dirty: bool = true,
/// Table of tracked LazySymbols.
lazy_syms: LazySymbolTable = .{},
-/// Table of tracked Decls.
-decls: DeclTable = .{},
+/// Table of tracked `Nav`s.
+navs: NavTable = .{},
/// List of atoms that are either synthetic or map directly to the Zig source program.
atoms: std.ArrayListUnmanaged(Atom) = .{},
@@ -74,27 +74,7 @@ atoms: std.ArrayListUnmanaged(Atom) = .{},
/// Table of atoms indexed by the symbol index.
atom_by_index_table: std.AutoHashMapUnmanaged(u32, Atom.Index) = .{},
-/// Table of unnamed constants associated with a parent `Decl`.
-/// We store them here so that we can free the constants whenever the `Decl`
-/// needs updating or is freed.
-///
-/// For example,
-///
-/// ```zig
-/// const Foo = struct{
-/// a: u8,
-/// };
-///
-/// pub fn main() void {
-/// var foo = Foo{ .a = 1 };
-/// _ = foo;
-/// }
-/// ```
-///
-/// value assigned to label `foo` is an unnamed constant belonging/associated
-/// with `Decl` `main`, and lives as long as that `Decl`.
-unnamed_const_atoms: UnnamedConstTable = .{},
-anon_decls: AnonDeclTable = .{},
+uavs: UavTable = .{},
/// A table of relocations indexed by the owning them `Atom`.
/// Note that once we refactor `Atom`'s lifetime and ownership rules,
@@ -120,11 +100,10 @@ const HotUpdateState = struct {
loaded_base_address: ?std.os.windows.HMODULE = null,
};
-const DeclTable = std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, DeclMetadata);
-const AnonDeclTable = std.AutoHashMapUnmanaged(InternPool.Index, DeclMetadata);
+const NavTable = std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, AvMetadata);
+const UavTable = std.AutoHashMapUnmanaged(InternPool.Index, AvMetadata);
const RelocTable = std.AutoArrayHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(Relocation));
const BaseRelocationTable = std.AutoArrayHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(u32));
-const UnnamedConstTable = std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, std.ArrayListUnmanaged(Atom.Index));
const default_file_alignment: u16 = 0x200;
const default_size_of_stack_reserve: u32 = 0x1000000;
@@ -155,7 +134,7 @@ const Section = struct {
free_list: std.ArrayListUnmanaged(Atom.Index) = .{},
};
-const LazySymbolTable = std.AutoArrayHashMapUnmanaged(InternPool.OptionalDeclIndex, LazySymbolMetadata);
+const LazySymbolTable = std.AutoArrayHashMapUnmanaged(InternPool.Index, LazySymbolMetadata);
const LazySymbolMetadata = struct {
const State = enum { unused, pending_flush, flushed };
@@ -165,17 +144,17 @@ const LazySymbolMetadata = struct {
rdata_state: State = .unused,
};
-const DeclMetadata = struct {
+const AvMetadata = struct {
atom: Atom.Index,
section: u16,
/// A list of all exports aliases of this Decl.
exports: std.ArrayListUnmanaged(u32) = .{},
- fn deinit(m: *DeclMetadata, allocator: Allocator) void {
+ fn deinit(m: *AvMetadata, allocator: Allocator) void {
m.exports.deinit(allocator);
}
- fn getExport(m: DeclMetadata, coff_file: *const Coff, name: []const u8) ?u32 {
+ fn getExport(m: AvMetadata, coff_file: *const Coff, name: []const u8) ?u32 {
for (m.exports.items) |exp| {
if (mem.eql(u8, name, coff_file.getSymbolName(.{
.sym_index = exp,
@@ -185,7 +164,7 @@ const DeclMetadata = struct {
return null;
}
- fn getExportPtr(m: *DeclMetadata, coff_file: *Coff, name: []const u8) ?*u32 {
+ fn getExportPtr(m: *AvMetadata, coff_file: *Coff, name: []const u8) ?*u32 {
for (m.exports.items) |*exp| {
if (mem.eql(u8, name, coff_file.getSymbolName(.{
.sym_index = exp.*,
@@ -486,24 +465,19 @@ pub fn deinit(self: *Coff) void {
self.lazy_syms.deinit(gpa);
- for (self.decls.values()) |*metadata| {
+ for (self.navs.values()) |*metadata| {
metadata.deinit(gpa);
}
- self.decls.deinit(gpa);
+ self.navs.deinit(gpa);
self.atom_by_index_table.deinit(gpa);
- for (self.unnamed_const_atoms.values()) |*atoms| {
- atoms.deinit(gpa);
- }
- self.unnamed_const_atoms.deinit(gpa);
-
{
- var it = self.anon_decls.iterator();
+ var it = self.uavs.iterator();
while (it.next()) |entry| {
entry.value_ptr.exports.deinit(gpa);
}
- self.anon_decls.deinit(gpa);
+ self.uavs.deinit(gpa);
}
for (self.relocs.values()) |*relocs| {
@@ -1132,23 +1106,20 @@ pub fn updateFunc(self: *Coff, pt: Zcu.PerThread, func_index: InternPool.Index,
const tracy = trace(@src());
defer tracy.end();
- const mod = pt.zcu;
- const func = mod.funcInfo(func_index);
- const decl_index = func.owner_decl;
- const decl = mod.declPtr(decl_index);
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const func = zcu.funcInfo(func_index);
- const atom_index = try self.getOrCreateAtomForDecl(decl_index);
- self.freeUnnamedConsts(decl_index);
+ const atom_index = try self.getOrCreateAtomForNav(func.owner_nav);
Atom.freeRelocations(self, atom_index);
- const gpa = self.base.comp.gpa;
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
const res = try codegen.generateFunction(
&self.base,
pt,
- decl.navSrcLoc(mod),
+ zcu.navSrcLoc(func.owner_nav),
func_index,
air,
liveness,
@@ -1158,45 +1129,16 @@ pub fn updateFunc(self: *Coff, pt: Zcu.PerThread, func_index: InternPool.Index,
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
- func.setAnalysisState(&mod.intern_pool, .codegen_failure);
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try zcu.failed_codegen.put(zcu.gpa, func.owner_nav, em);
return;
},
};
- try self.updateDeclCode(pt, decl_index, code, .FUNCTION);
+ try self.updateNavCode(pt, func.owner_nav, code, .FUNCTION);
// Exports will be updated by `Zcu.processExports` after the update.
}
-pub fn lowerUnnamedConst(self: *Coff, pt: Zcu.PerThread, val: Value, decl_index: InternPool.DeclIndex) !u32 {
- const mod = pt.zcu;
- const gpa = mod.gpa;
- const decl = mod.declPtr(decl_index);
- const gop = try self.unnamed_const_atoms.getOrPut(gpa, decl_index);
- if (!gop.found_existing) {
- gop.value_ptr.* = .{};
- }
- const unnamed_consts = gop.value_ptr;
- const index = unnamed_consts.items.len;
- const sym_name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{
- decl.fqn.fmt(&mod.intern_pool), index,
- });
- defer gpa.free(sym_name);
- const ty = val.typeOf(mod);
- const atom_index = switch (try self.lowerConst(pt, sym_name, val, ty.abiAlignment(pt), self.rdata_section_index.?, decl.navSrcLoc(mod))) {
- .ok => |atom_index| atom_index,
- .fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
- log.err("{s}", .{em.msg});
- return error.CodegenFail;
- },
- };
- try unnamed_consts.append(gpa, atom_index);
- return self.getAtom(atom_index).getSymbolIndex().?;
-}
-
const LowerConstResult = union(enum) {
ok: Atom.Index,
fail: *Module.ErrorMsg,
@@ -1246,57 +1188,62 @@ fn lowerConst(
return .{ .ok = atom_index };
}
-pub fn updateDecl(
+pub fn updateNav(
self: *Coff,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
-) link.File.UpdateDeclError!void {
- const mod = pt.zcu;
+ nav_index: InternPool.Nav.Index,
+) link.File.UpdateNavError!void {
if (build_options.skip_non_native and builtin.object_format != .coff) {
@panic("Attempted to compile for object format that was disabled by build configuration");
}
- if (self.llvm_object) |llvm_object| return llvm_object.updateDecl(pt, decl_index);
+ if (self.llvm_object) |llvm_object| return llvm_object.updateNav(pt, nav_index);
const tracy = trace(@src());
defer tracy.end();
- const decl = mod.declPtr(decl_index);
-
- if (decl.val.getExternFunc(mod)) |_| {
- return;
- }
-
- const gpa = self.base.comp.gpa;
- if (decl.isExtern(mod)) {
- // TODO make this part of getGlobalSymbol
- const variable = decl.getOwnedVariable(mod).?;
- const name = decl.name.toSlice(&mod.intern_pool);
- const lib_name = variable.lib_name.toSlice(&mod.intern_pool);
- const global_index = try self.getGlobalSymbol(name, lib_name);
- try self.need_got_table.put(gpa, global_index, {});
- return;
- }
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
+
+ const init_val = switch (ip.indexToKey(nav.status.resolved.val)) {
+ .variable => |variable| variable.init,
+ .@"extern" => |@"extern"| {
+ if (ip.isFunctionType(nav.typeOf(ip))) return;
+ // TODO make this part of getGlobalSymbol
+ const name = nav.name.toSlice(ip);
+ const lib_name = @"extern".lib_name.toSlice(ip);
+ const global_index = try self.getGlobalSymbol(name, lib_name);
+ try self.need_got_table.put(gpa, global_index, {});
+ return;
+ },
+ else => nav.status.resolved.val,
+ };
- const atom_index = try self.getOrCreateAtomForDecl(decl_index);
+ const atom_index = try self.getOrCreateAtomForNav(nav_index);
Atom.freeRelocations(self, atom_index);
const atom = self.getAtom(atom_index);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
- const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
- const res = try codegen.generateSymbol(&self.base, pt, decl.navSrcLoc(mod), decl_val, &code_buffer, .none, .{
- .parent_atom_index = atom.getSymbolIndex().?,
- });
+ const res = try codegen.generateSymbol(
+ &self.base,
+ pt,
+ zcu.navSrcLoc(nav_index),
+ Value.fromInterned(init_val),
+ &code_buffer,
+ .none,
+ .{ .parent_atom_index = atom.getSymbolIndex().? },
+ );
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try zcu.failed_codegen.put(gpa, nav_index, em);
return;
},
};
- try self.updateDeclCode(pt, decl_index, code, .NULL);
+ try self.updateNavCode(pt, nav_index, code, .NULL);
// Exports will be updated by `Zcu.processExports` after the update.
}
@@ -1317,14 +1264,14 @@ fn updateLazySymbolAtom(
const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{
@tagName(sym.kind),
- sym.ty.fmt(pt),
+ Type.fromInterned(sym.ty).fmt(pt),
});
defer gpa.free(name);
const atom = self.getAtomPtr(atom_index);
const local_sym_index = atom.getSymbolIndex().?;
- const src = sym.ty.srcLocOrNull(mod) orelse Module.LazySrcLoc.unneeded;
+ const src = Type.fromInterned(sym.ty).srcLocOrNull(mod) orelse Module.LazySrcLoc.unneeded;
const res = try codegen.generateLazySymbol(
&self.base,
pt,
@@ -1362,52 +1309,55 @@ fn updateLazySymbolAtom(
try self.writeAtom(atom_index, code);
}
-pub fn getOrCreateAtomForLazySymbol(self: *Coff, pt: Zcu.PerThread, sym: link.File.LazySymbol) !Atom.Index {
- const gpa = self.base.comp.gpa;
- const mod = self.base.comp.module.?;
- const gop = try self.lazy_syms.getOrPut(gpa, sym.getDecl(mod));
+pub fn getOrCreateAtomForLazySymbol(
+ self: *Coff,
+ pt: Zcu.PerThread,
+ lazy_sym: link.File.LazySymbol,
+) !Atom.Index {
+ const gop = try self.lazy_syms.getOrPut(pt.zcu.gpa, lazy_sym.ty);
errdefer _ = if (!gop.found_existing) self.lazy_syms.pop();
if (!gop.found_existing) gop.value_ptr.* = .{};
- const metadata: struct { atom: *Atom.Index, state: *LazySymbolMetadata.State } = switch (sym.kind) {
- .code => .{ .atom = &gop.value_ptr.text_atom, .state = &gop.value_ptr.text_state },
- .const_data => .{ .atom = &gop.value_ptr.rdata_atom, .state = &gop.value_ptr.rdata_state },
+ const atom_ptr, const state_ptr = switch (lazy_sym.kind) {
+ .code => .{ &gop.value_ptr.text_atom, &gop.value_ptr.text_state },
+ .const_data => .{ &gop.value_ptr.rdata_atom, &gop.value_ptr.rdata_state },
};
- switch (metadata.state.*) {
- .unused => metadata.atom.* = try self.createAtom(),
- .pending_flush => return metadata.atom.*,
+ switch (state_ptr.*) {
+ .unused => atom_ptr.* = try self.createAtom(),
+ .pending_flush => return atom_ptr.*,
.flushed => {},
}
- metadata.state.* = .pending_flush;
- const atom = metadata.atom.*;
+ state_ptr.* = .pending_flush;
+ const atom = atom_ptr.*;
// anyerror needs to be deferred until flushModule
- if (sym.getDecl(mod) != .none) try self.updateLazySymbolAtom(pt, sym, atom, switch (sym.kind) {
+ if (lazy_sym.ty != .anyerror_type) try self.updateLazySymbolAtom(pt, lazy_sym, atom, switch (lazy_sym.kind) {
.code => self.text_section_index.?,
.const_data => self.rdata_section_index.?,
});
return atom;
}
-pub fn getOrCreateAtomForDecl(self: *Coff, decl_index: InternPool.DeclIndex) !Atom.Index {
+pub fn getOrCreateAtomForNav(self: *Coff, nav_index: InternPool.Nav.Index) !Atom.Index {
const gpa = self.base.comp.gpa;
- const gop = try self.decls.getOrPut(gpa, decl_index);
+ const gop = try self.navs.getOrPut(gpa, nav_index);
if (!gop.found_existing) {
gop.value_ptr.* = .{
.atom = try self.createAtom(),
- .section = self.getDeclOutputSection(decl_index),
+ .section = self.getNavOutputSection(nav_index),
.exports = .{},
};
}
return gop.value_ptr.atom;
}
-fn getDeclOutputSection(self: *Coff, decl_index: InternPool.DeclIndex) u16 {
- const decl = self.base.comp.module.?.declPtr(decl_index);
- const mod = self.base.comp.module.?;
- const ty = decl.typeOf(mod);
- const zig_ty = ty.zigTypeTag(mod);
- const val = decl.val;
+fn getNavOutputSection(self: *Coff, nav_index: InternPool.Nav.Index) u16 {
+ const zcu = self.base.comp.module.?;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
+ const ty = Type.fromInterned(nav.typeOf(ip));
+ const zig_ty = ty.zigTypeTag(zcu);
+ const val = Value.fromInterned(nav.status.resolved.val);
const index: u16 = blk: {
- if (val.isUndefDeep(mod)) {
+ if (val.isUndefDeep(zcu)) {
// TODO in release-fast and release-small, we should put undef in .bss
break :blk self.data_section_index.?;
}
@@ -1416,7 +1366,7 @@ fn getDeclOutputSection(self: *Coff, decl_index: InternPool.DeclIndex) u16 {
// TODO: what if this is a function pointer?
.Fn => break :blk self.text_section_index.?,
else => {
- if (val.getVariable(mod)) |_| {
+ if (val.getVariable(zcu)) |_| {
break :blk self.data_section_index.?;
}
break :blk self.rdata_section_index.?;
@@ -1426,31 +1376,41 @@ fn getDeclOutputSection(self: *Coff, decl_index: InternPool.DeclIndex) u16 {
return index;
}
-fn updateDeclCode(self: *Coff, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex, code: []u8, complex_type: coff.ComplexType) !void {
- const mod = pt.zcu;
- const decl = mod.declPtr(decl_index);
+fn updateNavCode(
+ self: *Coff,
+ pt: Zcu.PerThread,
+ nav_index: InternPool.Nav.Index,
+ code: []u8,
+ complex_type: coff.ComplexType,
+) !void {
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
- log.debug("updateDeclCode {}{*}", .{ decl.fqn.fmt(&mod.intern_pool), decl });
- const required_alignment: u32 = @intCast(decl.getAlignment(pt).toByteUnits() orelse 0);
+ log.debug("updateNavCode {} 0x{x}", .{ nav.fqn.fmt(ip), nav_index });
- const decl_metadata = self.decls.get(decl_index).?;
- const atom_index = decl_metadata.atom;
+ const required_alignment = pt.navAlignment(nav_index).max(
+ target_util.minFunctionAlignment(zcu.navFileScope(nav_index).mod.resolved_target.result),
+ );
+
+ const nav_metadata = self.navs.get(nav_index).?;
+ const atom_index = nav_metadata.atom;
const atom = self.getAtom(atom_index);
const sym_index = atom.getSymbolIndex().?;
- const sect_index = decl_metadata.section;
+ const sect_index = nav_metadata.section;
const code_len = @as(u32, @intCast(code.len));
if (atom.size != 0) {
const sym = atom.getSymbolPtr(self);
- try self.setSymbolName(sym, decl.fqn.toSlice(&mod.intern_pool));
+ try self.setSymbolName(sym, nav.fqn.toSlice(ip));
sym.section_number = @as(coff.SectionNumber, @enumFromInt(sect_index + 1));
sym.type = .{ .complex_type = complex_type, .base_type = .NULL };
const capacity = atom.capacity(self);
- const need_realloc = code.len > capacity or !mem.isAlignedGeneric(u64, sym.value, required_alignment);
+ const need_realloc = code.len > capacity or !required_alignment.check(sym.value);
if (need_realloc) {
- const vaddr = try self.growAtom(atom_index, code_len, required_alignment);
- log.debug("growing {} from 0x{x} to 0x{x}", .{ decl.fqn.fmt(&mod.intern_pool), sym.value, vaddr });
+ const vaddr = try self.growAtom(atom_index, code_len, @intCast(required_alignment.toByteUnits() orelse 0));
+ log.debug("growing {} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), sym.value, vaddr });
log.debug(" (required alignment 0x{x}", .{required_alignment});
if (vaddr != sym.value) {
@@ -1466,13 +1426,13 @@ fn updateDeclCode(self: *Coff, pt: Zcu.PerThread, decl_index: InternPool.DeclInd
self.getAtomPtr(atom_index).size = code_len;
} else {
const sym = atom.getSymbolPtr(self);
- try self.setSymbolName(sym, decl.fqn.toSlice(&mod.intern_pool));
+ try self.setSymbolName(sym, nav.fqn.toSlice(ip));
sym.section_number = @as(coff.SectionNumber, @enumFromInt(sect_index + 1));
sym.type = .{ .complex_type = complex_type, .base_type = .NULL };
- const vaddr = try self.allocateAtom(atom_index, code_len, required_alignment);
+ const vaddr = try self.allocateAtom(atom_index, code_len, @intCast(required_alignment.toByteUnits() orelse 0));
errdefer self.freeAtom(atom_index);
- log.debug("allocated atom for {} at 0x{x}", .{ decl.fqn.fmt(&mod.intern_pool), vaddr });
+ log.debug("allocated atom for {} at 0x{x}", .{ nav.fqn.fmt(ip), vaddr });
self.getAtomPtr(atom_index).size = code_len;
sym.value = vaddr;
@@ -1482,28 +1442,15 @@ fn updateDeclCode(self: *Coff, pt: Zcu.PerThread, decl_index: InternPool.DeclInd
try self.writeAtom(atom_index, code);
}
-fn freeUnnamedConsts(self: *Coff, decl_index: InternPool.DeclIndex) void {
- const gpa = self.base.comp.gpa;
- const unnamed_consts = self.unnamed_const_atoms.getPtr(decl_index) orelse return;
- for (unnamed_consts.items) |atom_index| {
- self.freeAtom(atom_index);
- }
- unnamed_consts.clearAndFree(gpa);
-}
-
-pub fn freeDecl(self: *Coff, decl_index: InternPool.DeclIndex) void {
- if (self.llvm_object) |llvm_object| return llvm_object.freeDecl(decl_index);
+pub fn freeNav(self: *Coff, nav_index: InternPool.NavIndex) void {
+ if (self.llvm_object) |llvm_object| return llvm_object.freeNav(nav_index);
const gpa = self.base.comp.gpa;
- const mod = self.base.comp.module.?;
- const decl = mod.declPtr(decl_index);
+ log.debug("freeDecl 0x{x}", .{nav_index});
- log.debug("freeDecl {*}", .{decl});
-
- if (self.decls.fetchOrderedRemove(decl_index)) |const_kv| {
+ if (self.decls.fetchOrderedRemove(nav_index)) |const_kv| {
var kv = const_kv;
self.freeAtom(kv.value.atom);
- self.freeUnnamedConsts(decl_index);
kv.value.exports.deinit(gpa);
}
}
@@ -1528,20 +1475,21 @@ pub fn updateExports(
// detect the default subsystem.
for (export_indices) |export_idx| {
const exp = mod.all_exports.items[export_idx];
- const exported_decl_index = switch (exp.exported) {
- .decl_index => |i| i,
- .value => continue,
+ const exported_nav_index = switch (exp.exported) {
+ .nav => |nav| nav,
+ .uav => continue,
};
- const exported_decl = mod.declPtr(exported_decl_index);
- if (exported_decl.getOwnedFunction(mod) == null) continue;
- const winapi_cc = switch (target.cpu.arch) {
- .x86 => std.builtin.CallingConvention.Stdcall,
- else => std.builtin.CallingConvention.C,
+ const exported_nav = ip.getNav(exported_nav_index);
+ const exported_ty = exported_nav.typeOf(ip);
+ if (!ip.isFunctionType(exported_ty)) continue;
+ const winapi_cc: std.builtin.CallingConvention = switch (target.cpu.arch) {
+ .x86 => .Stdcall,
+ else => .C,
};
- const decl_cc = exported_decl.typeOf(mod).fnCallingConvention(mod);
- if (decl_cc == .C and exp.opts.name.eqlSlice("main", ip) and comp.config.link_libc) {
+ const exported_cc = Type.fromInterned(exported_ty).fnCallingConvention(mod);
+ if (exported_cc == .C and exp.opts.name.eqlSlice("main", ip) and comp.config.link_libc) {
mod.stage1_flags.have_c_main = true;
- } else if (decl_cc == winapi_cc and target.os.tag == .windows) {
+ } else if (exported_cc == winapi_cc and target.os.tag == .windows) {
if (exp.opts.name.eqlSlice("WinMain", ip)) {
mod.stage1_flags.have_winmain = true;
} else if (exp.opts.name.eqlSlice("wWinMain", ip)) {
@@ -1562,15 +1510,15 @@ pub fn updateExports(
const gpa = comp.gpa;
const metadata = switch (exported) {
- .decl_index => |decl_index| blk: {
- _ = try self.getOrCreateAtomForDecl(decl_index);
- break :blk self.decls.getPtr(decl_index).?;
+ .nav => |nav| blk: {
+ _ = try self.getOrCreateAtomForNav(nav);
+ break :blk self.navs.getPtr(nav).?;
},
- .value => |value| self.anon_decls.getPtr(value) orelse blk: {
+ .uav => |uav| self.uavs.getPtr(uav) orelse blk: {
const first_exp = mod.all_exports.items[export_indices[0]];
- const res = try self.lowerAnonDecl(pt, value, .none, first_exp.src);
+ const res = try self.lowerUav(pt, uav, .none, first_exp.src);
switch (res) {
- .ok => {},
+ .mcv => {},
.fail => |em| {
// TODO maybe it's enough to return an error here and let Module.processExportsInner
// handle the error?
@@ -1579,7 +1527,7 @@ pub fn updateExports(
return;
},
}
- break :blk self.anon_decls.getPtr(value).?;
+ break :blk self.uavs.getPtr(uav).?;
},
};
const atom_index = metadata.atom;
@@ -1654,9 +1602,9 @@ pub fn deleteExport(
) void {
if (self.llvm_object) |_| return;
const metadata = switch (exported) {
- .decl_index => |decl_index| self.decls.getPtr(decl_index) orelse return,
- .value => |value| self.anon_decls.getPtr(value) orelse return,
- };
+ .nav => |nav| self.navs.getPtr(nav),
+ .uav => |uav| self.uavs.getPtr(uav),
+ } orelse return;
const mod = self.base.comp.module.?;
const name_slice = name.toSlice(&mod.intern_pool);
const sym_index = metadata.getExportPtr(self, name_slice) orelse return;
@@ -1748,7 +1696,7 @@ pub fn flushModule(self: *Coff, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
// anyerror needs to wait for everything to be flushed.
if (metadata.text_state != .unused) self.updateLazySymbolAtom(
pt,
- link.File.LazySymbol.initDecl(.code, null, pt.zcu),
+ .{ .kind = .code, .ty = .anyerror_type },
metadata.text_atom,
self.text_section_index.?,
) catch |err| return switch (err) {
@@ -1757,7 +1705,7 @@ pub fn flushModule(self: *Coff, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
};
if (metadata.rdata_state != .unused) self.updateLazySymbolAtom(
pt,
- link.File.LazySymbol.initDecl(.const_data, null, pt.zcu),
+ .{ .kind = .const_data, .ty = .anyerror_type },
metadata.rdata_atom,
self.rdata_section_index.?,
) catch |err| return switch (err) {
@@ -1856,22 +1804,20 @@ pub fn flushModule(self: *Coff, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
assert(!self.imports_count_dirty);
}
-pub fn getDeclVAddr(self: *Coff, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex, reloc_info: link.File.RelocInfo) !u64 {
+pub fn getNavVAddr(
+ self: *Coff,
+ pt: Zcu.PerThread,
+ nav_index: InternPool.Nav.Index,
+ reloc_info: link.File.RelocInfo,
+) !u64 {
assert(self.llvm_object == null);
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
- const decl = zcu.declPtr(decl_index);
- log.debug("getDeclVAddr {}({d})", .{ decl.fqn.fmt(ip), decl_index });
- const sym_index = if (decl.isExtern(zcu)) blk: {
- const name = decl.name.toSlice(ip);
- const lib_name = if (decl.getOwnedExternFunc(zcu)) |ext_fn|
- ext_fn.lib_name.toSlice(ip)
- else
- decl.getOwnedVariable(zcu).?.lib_name.toSlice(ip);
- break :blk try self.getGlobalSymbol(name, lib_name);
- } else blk: {
- const this_atom_index = try self.getOrCreateAtomForDecl(decl_index);
- break :blk self.getAtom(this_atom_index).getSymbolIndex().?;
+ const nav = ip.getNav(nav_index);
+ log.debug("getNavVAddr {}({d})", .{ nav.fqn.fmt(ip), nav_index });
+ const sym_index = switch (ip.indexToKey(nav.status.resolved.val)) {
+ .@"extern" => |@"extern"| try self.getGlobalSymbol(nav.name.toSlice(ip), @"extern".lib_name.toSlice(ip)),
+ else => self.getAtom(try self.getOrCreateAtomForNav(nav_index)).getSymbolIndex().?,
};
const atom_index = self.getAtomIndexForSymbol(.{ .sym_index = reloc_info.parent_atom_index, .file = null }).?;
const target = SymbolWithLoc{ .sym_index = sym_index, .file = null };
@@ -1888,36 +1834,36 @@ pub fn getDeclVAddr(self: *Coff, pt: Zcu.PerThread, decl_index: InternPool.DeclI
return 0;
}
-pub fn lowerAnonDecl(
+pub fn lowerUav(
self: *Coff,
pt: Zcu.PerThread,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
explicit_alignment: InternPool.Alignment,
src_loc: Module.LazySrcLoc,
-) !codegen.Result {
- const gpa = self.base.comp.gpa;
- const mod = self.base.comp.module.?;
- const ty = Type.fromInterned(mod.intern_pool.typeOf(decl_val));
- const decl_alignment = switch (explicit_alignment) {
- .none => ty.abiAlignment(pt),
+) !codegen.GenResult {
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const val = Value.fromInterned(uav);
+ const uav_alignment = switch (explicit_alignment) {
+ .none => val.typeOf(zcu).abiAlignment(pt),
else => explicit_alignment,
};
- if (self.anon_decls.get(decl_val)) |metadata| {
- const existing_addr = self.getAtom(metadata.atom).getSymbol(self).value;
- if (decl_alignment.check(existing_addr))
- return .ok;
+ if (self.uavs.get(uav)) |metadata| {
+ const atom = self.getAtom(metadata.atom);
+ const existing_addr = atom.getSymbol(self).value;
+ if (uav_alignment.check(existing_addr))
+ return .{ .mcv = .{ .load_direct = atom.getSymbolIndex().? } };
}
- const val = Value.fromInterned(decl_val);
var name_buf: [32]u8 = undefined;
const name = std.fmt.bufPrint(&name_buf, "__anon_{d}", .{
- @intFromEnum(decl_val),
+ @intFromEnum(uav),
}) catch unreachable;
const res = self.lowerConst(
pt,
name,
val,
- decl_alignment,
+ uav_alignment,
self.rdata_section_index.?,
src_loc,
) catch |err| switch (err) {
@@ -1933,14 +1879,23 @@ pub fn lowerAnonDecl(
.ok => |atom_index| atom_index,
.fail => |em| return .{ .fail = em },
};
- try self.anon_decls.put(gpa, decl_val, .{ .atom = atom_index, .section = self.rdata_section_index.? });
- return .ok;
+ try self.uavs.put(gpa, uav, .{
+ .atom = atom_index,
+ .section = self.rdata_section_index.?,
+ });
+ return .{ .mcv = .{
+ .load_direct = self.getAtom(atom_index).getSymbolIndex().?,
+ } };
}
-pub fn getAnonDeclVAddr(self: *Coff, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
+pub fn getUavVAddr(
+ self: *Coff,
+ uav: InternPool.Index,
+ reloc_info: link.File.RelocInfo,
+) !u64 {
assert(self.llvm_object == null);
- const this_atom_index = self.anon_decls.get(decl_val).?.atom;
+ const this_atom_index = self.uavs.get(uav).?.atom;
const sym_index = self.getAtom(this_atom_index).getSymbolIndex().?;
const atom_index = self.getAtomIndexForSymbol(.{ .sym_index = reloc_info.parent_atom_index, .file = null }).?;
const target = SymbolWithLoc{ .sym_index = sym_index, .file = null };
@@ -2760,6 +2715,7 @@ const Allocator = std.mem.Allocator;
const codegen = @import("../codegen.zig");
const link = @import("../link.zig");
const lld = @import("Coff/lld.zig");
+const target_util = @import("../target.zig");
const trace = @import("../tracy.zig").trace;
const Air = @import("../Air.zig");
@@ -2781,6 +2737,4 @@ const Value = @import("../Value.zig");
const AnalUnit = InternPool.AnalUnit;
const dev = @import("../dev.zig");
-pub const base_tag: link.File.Tag = .coff;
-
const msdos_stub = @embedFile("msdos-stub.bin");
diff --git a/src/link/Dwarf.zig b/src/link/Dwarf.zig
index 9f2781549c..30b286cac4 100644
--- a/src/link/Dwarf.zig
+++ b/src/link/Dwarf.zig
@@ -9,7 +9,7 @@ src_fn_free_list: std.AutoHashMapUnmanaged(Atom.Index, void) = .{},
src_fn_first_index: ?Atom.Index = null,
src_fn_last_index: ?Atom.Index = null,
src_fns: std.ArrayListUnmanaged(Atom) = .{},
-src_fn_decls: AtomTable = .{},
+src_fn_navs: AtomTable = .{},
/// A list of `Atom`s whose corresponding .debug_info tags have surplus capacity.
/// This is the same concept as `text_block_free_list`; see those doc comments.
@@ -17,7 +17,7 @@ di_atom_free_list: std.AutoHashMapUnmanaged(Atom.Index, void) = .{},
di_atom_first_index: ?Atom.Index = null,
di_atom_last_index: ?Atom.Index = null,
di_atoms: std.ArrayListUnmanaged(Atom) = .{},
-di_atom_decls: AtomTable = .{},
+di_atom_navs: AtomTable = .{},
dbg_line_header: DbgLineHeader,
@@ -27,7 +27,7 @@ abbrev_table_offset: ?u64 = null,
/// Table of debug symbol names.
strtab: StringTable = .{},
-/// Quick lookup array of all defined source files referenced by at least one Decl.
+/// Quick lookup array of all defined source files referenced by at least one Nav.
/// They will end up in the DWARF debug_line header as two lists:
/// * []include_directory
/// * []file_names
@@ -35,13 +35,13 @@ di_files: std.AutoArrayHashMapUnmanaged(*const Zcu.File, void) = .{},
global_abbrev_relocs: std.ArrayListUnmanaged(AbbrevRelocation) = .{},
-const AtomTable = std.AutoHashMapUnmanaged(InternPool.DeclIndex, Atom.Index);
+const AtomTable = std.AutoHashMapUnmanaged(InternPool.Nav.Index, Atom.Index);
const Atom = struct {
- /// Offset into .debug_info pointing to the tag for this Decl, or
+ /// Offset into .debug_info pointing to the tag for this Nav, or
/// offset from the beginning of the Debug Line Program header that contains this function.
off: u32,
- /// Size of the .debug_info tag for this Decl, not including padding, or
+ /// Size of the .debug_info tag for this Nav, not including padding, or
/// size of the line number program component belonging to this function, not
/// including padding.
len: u32,
@@ -61,14 +61,14 @@ const DbgLineHeader = struct {
opcode_base: u8,
};
-/// Represents state of the analysed Decl.
-/// Includes Decl's abbrev table of type Types, matching arena
+/// Represents state of the analysed Nav.
+/// Includes Nav's abbrev table of type Types, matching arena
/// and a set of relocations that will be resolved once this
-/// Decl's inner Atom is assigned an offset within the DWARF section.
-pub const DeclState = struct {
+/// Nav's inner Atom is assigned an offset within the DWARF section.
+pub const NavState = struct {
dwarf: *Dwarf,
pt: Zcu.PerThread,
- di_atom_decls: *const AtomTable,
+ di_atom_navs: *const AtomTable,
dbg_line_func: InternPool.Index,
dbg_line: std.ArrayList(u8),
dbg_info: std.ArrayList(u8),
@@ -78,20 +78,20 @@ pub const DeclState = struct {
abbrev_relocs: std.ArrayListUnmanaged(AbbrevRelocation),
exprloc_relocs: std.ArrayListUnmanaged(ExprlocRelocation),
- pub fn deinit(self: *DeclState) void {
- const gpa = self.dwarf.allocator;
- self.dbg_line.deinit();
- self.dbg_info.deinit();
- self.abbrev_type_arena.deinit();
- self.abbrev_table.deinit(gpa);
- self.abbrev_resolver.deinit(gpa);
- self.abbrev_relocs.deinit(gpa);
- self.exprloc_relocs.deinit(gpa);
+ pub fn deinit(ns: *NavState) void {
+ const gpa = ns.dwarf.allocator;
+ ns.dbg_line.deinit();
+ ns.dbg_info.deinit();
+ ns.abbrev_type_arena.deinit();
+ ns.abbrev_table.deinit(gpa);
+ ns.abbrev_resolver.deinit(gpa);
+ ns.abbrev_relocs.deinit(gpa);
+ ns.exprloc_relocs.deinit(gpa);
}
/// Adds local type relocation of the form: @offset => @this + addend
/// @this signifies the offset within the .debug_abbrev section of the containing atom.
- fn addTypeRelocLocal(self: *DeclState, atom_index: Atom.Index, offset: u32, addend: u32) !void {
+ fn addTypeRelocLocal(self: *NavState, atom_index: Atom.Index, offset: u32, addend: u32) !void {
log.debug("{x}: @this + {x}", .{ offset, addend });
try self.abbrev_relocs.append(self.dwarf.allocator, .{
.target = null,
@@ -104,7 +104,7 @@ pub const DeclState = struct {
/// Adds global type relocation of the form: @offset => @symbol + 0
/// @symbol signifies a type abbreviation posititioned somewhere in the .debug_abbrev section
/// which we use as our target of the relocation.
- fn addTypeRelocGlobal(self: *DeclState, atom_index: Atom.Index, ty: Type, offset: u32) !void {
+ fn addTypeRelocGlobal(self: *NavState, atom_index: Atom.Index, ty: Type, offset: u32) !void {
const gpa = self.dwarf.allocator;
const resolv = self.abbrev_resolver.get(ty.toIntern()) orelse blk: {
const sym_index: u32 = @intCast(self.abbrev_table.items.len);
@@ -127,7 +127,7 @@ pub const DeclState = struct {
}
fn addDbgInfoType(
- self: *DeclState,
+ self: *NavState,
pt: Zcu.PerThread,
atom_index: Atom.Index,
ty: Type,
@@ -550,15 +550,15 @@ pub const DeclState = struct {
};
pub fn genArgDbgInfo(
- self: *DeclState,
+ self: *NavState,
name: [:0]const u8,
ty: Type,
- owner_decl: InternPool.DeclIndex,
+ owner_nav: InternPool.Nav.Index,
loc: DbgInfoLoc,
) error{OutOfMemory}!void {
const pt = self.pt;
const dbg_info = &self.dbg_info;
- const atom_index = self.di_atom_decls.get(owner_decl).?;
+ const atom_index = self.di_atom_navs.get(owner_nav).?;
const name_with_null = name.ptr[0 .. name.len + 1];
switch (loc) {
@@ -639,6 +639,7 @@ pub const DeclState = struct {
leb128.writeIleb128(dbg_info.writer(), info.offset) catch unreachable;
},
.wasm_local => |value| {
+ @import("../dev.zig").check(.wasm_linker);
const leb_size = link.File.Wasm.getUleb128Size(value);
try dbg_info.ensureUnusedCapacity(3 + leb_size);
// wasm locations are encoded as follow:
@@ -665,15 +666,15 @@ pub const DeclState = struct {
}
pub fn genVarDbgInfo(
- self: *DeclState,
+ self: *NavState,
name: [:0]const u8,
ty: Type,
- owner_decl: InternPool.DeclIndex,
+ owner_nav: InternPool.Nav.Index,
is_ptr: bool,
loc: DbgInfoLoc,
) error{OutOfMemory}!void {
const dbg_info = &self.dbg_info;
- const atom_index = self.di_atom_decls.get(owner_decl).?;
+ const atom_index = self.di_atom_navs.get(owner_nav).?;
const name_with_null = name.ptr[0 .. name.len + 1];
try dbg_info.append(@intFromEnum(AbbrevCode.variable));
const gpa = self.dwarf.allocator;
@@ -881,7 +882,7 @@ pub const DeclState = struct {
}
pub fn advancePCAndLine(
- self: *DeclState,
+ self: *NavState,
delta_line: i33,
delta_pc: u64,
) error{OutOfMemory}!void {
@@ -921,21 +922,21 @@ pub const DeclState = struct {
}
}
- pub fn setColumn(self: *DeclState, column: u32) error{OutOfMemory}!void {
+ pub fn setColumn(self: *NavState, column: u32) error{OutOfMemory}!void {
try self.dbg_line.ensureUnusedCapacity(1 + 5);
self.dbg_line.appendAssumeCapacity(DW.LNS.set_column);
leb128.writeUleb128(self.dbg_line.writer(), column + 1) catch unreachable;
}
- pub fn setPrologueEnd(self: *DeclState) error{OutOfMemory}!void {
+ pub fn setPrologueEnd(self: *NavState) error{OutOfMemory}!void {
try self.dbg_line.append(DW.LNS.set_prologue_end);
}
- pub fn setEpilogueBegin(self: *DeclState) error{OutOfMemory}!void {
+ pub fn setEpilogueBegin(self: *NavState) error{OutOfMemory}!void {
try self.dbg_line.append(DW.LNS.set_epilogue_begin);
}
- pub fn setInlineFunc(self: *DeclState, func: InternPool.Index) error{OutOfMemory}!void {
+ pub fn setInlineFunc(self: *NavState, func: InternPool.Index) error{OutOfMemory}!void {
const zcu = self.pt.zcu;
if (self.dbg_line_func == func) return;
@@ -944,15 +945,15 @@ pub const DeclState = struct {
const old_func_info = zcu.funcInfo(self.dbg_line_func);
const new_func_info = zcu.funcInfo(func);
- const old_file = try self.dwarf.addDIFile(zcu, old_func_info.owner_decl);
- const new_file = try self.dwarf.addDIFile(zcu, new_func_info.owner_decl);
+ const old_file = try self.dwarf.addDIFile(zcu, old_func_info.owner_nav);
+ const new_file = try self.dwarf.addDIFile(zcu, new_func_info.owner_nav);
if (old_file != new_file) {
self.dbg_line.appendAssumeCapacity(DW.LNS.set_file);
leb128.writeUnsignedFixed(4, self.dbg_line.addManyAsArrayAssumeCapacity(4), new_file);
}
- const old_src_line: i33 = zcu.declPtr(old_func_info.owner_decl).navSrcLine(zcu);
- const new_src_line: i33 = zcu.declPtr(new_func_info.owner_decl).navSrcLine(zcu);
+ const old_src_line: i33 = zcu.navSrcLine(old_func_info.owner_nav);
+ const new_src_line: i33 = zcu.navSrcLine(new_func_info.owner_nav);
if (new_src_line != old_src_line) {
self.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeSignedFixed(5, self.dbg_line.addManyAsArrayAssumeCapacity(5), new_src_line - old_src_line);
@@ -1064,31 +1065,31 @@ pub fn deinit(self: *Dwarf) void {
self.src_fn_free_list.deinit(gpa);
self.src_fns.deinit(gpa);
- self.src_fn_decls.deinit(gpa);
+ self.src_fn_navs.deinit(gpa);
self.di_atom_free_list.deinit(gpa);
self.di_atoms.deinit(gpa);
- self.di_atom_decls.deinit(gpa);
+ self.di_atom_navs.deinit(gpa);
self.strtab.deinit(gpa);
self.di_files.deinit(gpa);
self.global_abbrev_relocs.deinit(gpa);
}
-/// Initializes Decl's state and its matching output buffers.
-/// Call this before `commitDeclState`.
-pub fn initDeclState(self: *Dwarf, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !DeclState {
+/// Initializes Nav's state and its matching output buffers.
+/// Call this before `commitNavState`.
+pub fn initNavState(self: *Dwarf, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !NavState {
const tracy = trace(@src());
defer tracy.end();
- const decl = pt.zcu.declPtr(decl_index);
- log.debug("initDeclState {}{*}", .{ decl.fqn.fmt(&pt.zcu.intern_pool), decl });
+ const nav = pt.zcu.intern_pool.getNav(nav_index);
+ log.debug("initNavState {}", .{nav.fqn.fmt(&pt.zcu.intern_pool)});
const gpa = self.allocator;
- var decl_state: DeclState = .{
+ var nav_state: NavState = .{
.dwarf = self,
.pt = pt,
- .di_atom_decls = &self.di_atom_decls,
+ .di_atom_navs = &self.di_atom_navs,
.dbg_line_func = undefined,
.dbg_line = std.ArrayList(u8).init(gpa),
.dbg_info = std.ArrayList(u8).init(gpa),
@@ -1098,30 +1099,30 @@ pub fn initDeclState(self: *Dwarf, pt: Zcu.PerThread, decl_index: InternPool.Dec
.abbrev_relocs = .{},
.exprloc_relocs = .{},
};
- errdefer decl_state.deinit();
- const dbg_line_buffer = &decl_state.dbg_line;
- const dbg_info_buffer = &decl_state.dbg_info;
+ errdefer nav_state.deinit();
+ const dbg_line_buffer = &nav_state.dbg_line;
+ const dbg_info_buffer = &nav_state.dbg_info;
- const di_atom_index = try self.getOrCreateAtomForDecl(.di_atom, decl_index);
+ const di_atom_index = try self.getOrCreateAtomForNav(.di_atom, nav_index);
- assert(decl.has_tv);
+ const nav_val = Value.fromInterned(nav.status.resolved.val);
- switch (decl.typeOf(pt.zcu).zigTypeTag(pt.zcu)) {
+ switch (nav_val.typeOf(pt.zcu).zigTypeTag(pt.zcu)) {
.Fn => {
- _ = try self.getOrCreateAtomForDecl(.src_fn, decl_index);
+ _ = try self.getOrCreateAtomForNav(.src_fn, nav_index);
// For functions we need to add a prologue to the debug line program.
const ptr_width_bytes = self.ptrWidthBytes();
try dbg_line_buffer.ensureTotalCapacity((3 + ptr_width_bytes) + (1 + 4) + (1 + 4) + (1 + 5) + 1);
- decl_state.dbg_line_func = decl.val.toIntern();
- const func = decl.val.getFunction(pt.zcu).?;
- log.debug("decl.src_line={d}, func.lbrace_line={d}, func.rbrace_line={d}", .{
- decl.navSrcLine(pt.zcu),
+ nav_state.dbg_line_func = nav_val.toIntern();
+ const func = nav_val.getFunction(pt.zcu).?;
+ log.debug("src_line={d}, func.lbrace_line={d}, func.rbrace_line={d}", .{
+ pt.zcu.navSrcLine(nav_index),
func.lbrace_line,
func.rbrace_line,
});
- const line: u28 = @intCast(decl.navSrcLine(pt.zcu) + func.lbrace_line);
+ const line: u28 = @intCast(pt.zcu.navSrcLine(nav_index) + func.lbrace_line);
dbg_line_buffer.appendSliceAssumeCapacity(&.{
DW.LNS.extended_op,
@@ -1143,7 +1144,7 @@ pub fn initDeclState(self: *Dwarf, pt: Zcu.PerThread, decl_index: InternPool.Dec
assert(self.getRelocDbgFileIndex() == dbg_line_buffer.items.len);
// Once we support more than one source file, this will have the ability to be more
// than one possible value.
- const file_index = try self.addDIFile(pt.zcu, decl_index);
+ const file_index = try self.addDIFile(pt.zcu, nav_index);
leb128.writeUnsignedFixed(4, dbg_line_buffer.addManyAsArrayAssumeCapacity(4), file_index);
dbg_line_buffer.appendAssumeCapacity(DW.LNS.set_column);
@@ -1154,12 +1155,12 @@ pub fn initDeclState(self: *Dwarf, pt: Zcu.PerThread, decl_index: InternPool.Dec
dbg_line_buffer.appendAssumeCapacity(DW.LNS.copy);
// .debug_info subprogram
- const decl_name_slice = decl.name.toSlice(&pt.zcu.intern_pool);
- const decl_linkage_name_slice = decl.fqn.toSlice(&pt.zcu.intern_pool);
+ const nav_name_slice = nav.name.toSlice(&pt.zcu.intern_pool);
+ const nav_linkage_name_slice = nav.fqn.toSlice(&pt.zcu.intern_pool);
try dbg_info_buffer.ensureUnusedCapacity(1 + ptr_width_bytes + 4 + 4 +
- (decl_name_slice.len + 1) + (decl_linkage_name_slice.len + 1));
+ (nav_name_slice.len + 1) + (nav_linkage_name_slice.len + 1));
- const fn_ret_type = decl.typeOf(pt.zcu).fnReturnType(pt.zcu);
+ const fn_ret_type = nav_val.typeOf(pt.zcu).fnReturnType(pt.zcu);
const fn_ret_has_bits = fn_ret_type.hasRuntimeBits(pt);
dbg_info_buffer.appendAssumeCapacity(@intFromEnum(
@as(AbbrevCode, if (fn_ret_has_bits) .subprogram else .subprogram_retvoid),
@@ -1172,14 +1173,14 @@ pub fn initDeclState(self: *Dwarf, pt: Zcu.PerThread, decl_index: InternPool.Dec
assert(self.getRelocDbgInfoSubprogramHighPC() == dbg_info_buffer.items.len);
dbg_info_buffer.appendNTimesAssumeCapacity(0, 4); // DW.AT.high_pc, DW.FORM.data4
if (fn_ret_has_bits) {
- try decl_state.addTypeRelocGlobal(di_atom_index, fn_ret_type, @intCast(dbg_info_buffer.items.len));
+ try nav_state.addTypeRelocGlobal(di_atom_index, fn_ret_type, @intCast(dbg_info_buffer.items.len));
dbg_info_buffer.appendNTimesAssumeCapacity(0, 4); // DW.AT.type, DW.FORM.ref4
}
dbg_info_buffer.appendSliceAssumeCapacity(
- decl_name_slice[0 .. decl_name_slice.len + 1],
+ nav_name_slice[0 .. nav_name_slice.len + 1],
); // DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity(
- decl_linkage_name_slice[0 .. decl_linkage_name_slice.len + 1],
+ nav_linkage_name_slice[0 .. nav_linkage_name_slice.len + 1],
); // DW.AT.linkage_name, DW.FORM.string
},
else => {
@@ -1187,37 +1188,36 @@ pub fn initDeclState(self: *Dwarf, pt: Zcu.PerThread, decl_index: InternPool.Dec
},
}
- return decl_state;
+ return nav_state;
}
-pub fn commitDeclState(
+pub fn commitNavState(
self: *Dwarf,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
sym_addr: u64,
sym_size: u64,
- decl_state: *DeclState,
+ nav_state: *NavState,
) !void {
const tracy = trace(@src());
defer tracy.end();
const gpa = self.allocator;
const zcu = pt.zcu;
- const decl = zcu.declPtr(decl_index);
const ip = &zcu.intern_pool;
- const namespace = zcu.namespacePtr(decl.src_namespace);
- const target = namespace.fileScope(zcu).mod.resolved_target.result;
+ const nav = ip.getNav(nav_index);
+ const target = zcu.navFileScope(nav_index).mod.resolved_target.result;
const target_endian = target.cpu.arch.endian();
- var dbg_line_buffer = &decl_state.dbg_line;
- var dbg_info_buffer = &decl_state.dbg_info;
+ var dbg_line_buffer = &nav_state.dbg_line;
+ var dbg_info_buffer = &nav_state.dbg_info;
- assert(decl.has_tv);
- switch (decl.typeOf(zcu).zigTypeTag(zcu)) {
+ const nav_val = Value.fromInterned(nav.status.resolved.val);
+ switch (nav_val.typeOf(zcu).zigTypeTag(zcu)) {
.Fn => {
- try decl_state.setInlineFunc(decl.val.toIntern());
+ try nav_state.setInlineFunc(nav_val.toIntern());
- // Since the Decl is a function, we need to update the .debug_line program.
+ // Since the Nav is a function, we need to update the .debug_line program.
// Perform the relocations based on vaddr.
switch (self.ptr_width) {
.p32 => {
@@ -1254,10 +1254,10 @@ pub fn commitDeclState(
// Now we have the full contents and may allocate a region to store it.
- // This logic is nearly identical to the logic below in `updateDeclDebugInfo` for
+ // This logic is nearly identical to the logic below in `updateNavDebugInfo` for
// `TextBlock` and the .debug_info. If you are editing this logic, you
// probably need to edit that logic too.
- const src_fn_index = self.src_fn_decls.get(decl_index).?;
+ const src_fn_index = self.src_fn_navs.get(nav_index).?;
const src_fn = self.getAtomPtr(.src_fn, src_fn_index);
src_fn.len = @intCast(dbg_line_buffer.items.len);
@@ -1275,33 +1275,26 @@ pub fn commitDeclState(
next.prev_index = src_fn.prev_index;
src_fn.next_index = null;
// Populate where it used to be with NOPs.
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const debug_line_sect = &elf_file.shdrs.items[elf_file.debug_line_section_index.?];
- const file_pos = debug_line_sect.sh_offset + src_fn.off;
- try pwriteDbgLineNops(elf_file.base.file.?, file_pos, 0, &[0]u8{}, src_fn.len);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const debug_line_sect = &macho_file.sections.items(.header)[macho_file.debug_line_sect_index.?];
- const file_pos = debug_line_sect.offset + src_fn.off;
- try pwriteDbgLineNops(macho_file.base.file.?, file_pos, 0, &[0]u8{}, src_fn.len);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const debug_line_sect = d_sym.getSectionPtr(d_sym.debug_line_section_index.?);
- const file_pos = debug_line_sect.offset + src_fn.off;
- try pwriteDbgLineNops(d_sym.file, file_pos, 0, &[0]u8{}, src_fn.len);
- }
- },
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const debug_line = wasm_file.getAtomPtr(wasm_file.debug_line_atom.?).code;
- // writeDbgLineNopsBuffered(debug_line.items, src_fn.off, 0, &.{}, src_fn.len);
- },
- else => unreachable,
- }
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const debug_line_sect = &elf_file.shdrs.items[elf_file.debug_line_section_index.?];
+ const file_pos = debug_line_sect.sh_offset + src_fn.off;
+ try pwriteDbgLineNops(elf_file.base.file.?, file_pos, 0, &[0]u8{}, src_fn.len);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const debug_line_sect = &macho_file.sections.items(.header)[macho_file.debug_line_sect_index.?];
+ const file_pos = debug_line_sect.offset + src_fn.off;
+ try pwriteDbgLineNops(macho_file.base.file.?, file_pos, 0, &[0]u8{}, src_fn.len);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const debug_line_sect = d_sym.getSectionPtr(d_sym.debug_line_section_index.?);
+ const file_pos = debug_line_sect.offset + src_fn.off;
+ try pwriteDbgLineNops(d_sym.file, file_pos, 0, &[0]u8{}, src_fn.len);
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const debug_line = wasm_file.getAtomPtr(wasm_file.debug_line_atom.?).code;
+ // writeDbgLineNopsBuffered(debug_line.items, src_fn.off, 0, &.{}, src_fn.len);
+ } else unreachable;
// TODO Look at the free list before appending at the end.
src_fn.prev_index = last_index;
const last = self.getAtomPtr(.src_fn, last_index);
@@ -1342,76 +1335,67 @@ pub fn commitDeclState(
// We only have support for one compilation unit so far, so the offsets are directly
// from the .debug_line section.
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const shdr_index = elf_file.debug_line_section_index.?;
- try elf_file.growNonAllocSection(shdr_index, needed_size, 1, true);
- const debug_line_sect = elf_file.shdrs.items[shdr_index];
- const file_pos = debug_line_sect.sh_offset + src_fn.off;
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const shdr_index = elf_file.debug_line_section_index.?;
+ try elf_file.growNonAllocSection(shdr_index, needed_size, 1, true);
+ const debug_line_sect = elf_file.shdrs.items[shdr_index];
+ const file_pos = debug_line_sect.sh_offset + src_fn.off;
+ try pwriteDbgLineNops(
+ elf_file.base.file.?,
+ file_pos,
+ prev_padding_size,
+ dbg_line_buffer.items,
+ next_padding_size,
+ );
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const sect_index = macho_file.debug_line_sect_index.?;
+ try macho_file.growSection(sect_index, needed_size);
+ const sect = macho_file.sections.items(.header)[sect_index];
+ const file_pos = sect.offset + src_fn.off;
try pwriteDbgLineNops(
- elf_file.base.file.?,
+ macho_file.base.file.?,
file_pos,
prev_padding_size,
dbg_line_buffer.items,
next_padding_size,
);
- },
-
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const sect_index = macho_file.debug_line_sect_index.?;
- try macho_file.growSection(sect_index, needed_size);
- const sect = macho_file.sections.items(.header)[sect_index];
- const file_pos = sect.offset + src_fn.off;
- try pwriteDbgLineNops(
- macho_file.base.file.?,
- file_pos,
- prev_padding_size,
- dbg_line_buffer.items,
- next_padding_size,
- );
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const sect_index = d_sym.debug_line_section_index.?;
- try d_sym.growSection(sect_index, needed_size, true, macho_file);
- const sect = d_sym.getSection(sect_index);
- const file_pos = sect.offset + src_fn.off;
- try pwriteDbgLineNops(
- d_sym.file,
- file_pos,
- prev_padding_size,
- dbg_line_buffer.items,
- next_padding_size,
- );
- }
- },
-
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const atom = wasm_file.getAtomPtr(wasm_file.debug_line_atom.?);
- // const debug_line = &atom.code;
- // const segment_size = debug_line.items.len;
- // if (needed_size != segment_size) {
- // log.debug(" needed size does not equal allocated size: {d}", .{needed_size});
- // if (needed_size > segment_size) {
- // log.debug(" allocating {d} bytes for 'debug line' information", .{needed_size - segment_size});
- // try debug_line.resize(self.allocator, needed_size);
- // @memset(debug_line.items[segment_size..], 0);
- // }
- // debug_line.items.len = needed_size;
- // }
- // writeDbgLineNopsBuffered(
- // debug_line.items,
- // src_fn.off,
- // prev_padding_size,
- // dbg_line_buffer.items,
- // next_padding_size,
- // );
- },
- else => unreachable,
- }
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const sect_index = d_sym.debug_line_section_index.?;
+ try d_sym.growSection(sect_index, needed_size, true, macho_file);
+ const sect = d_sym.getSection(sect_index);
+ const file_pos = sect.offset + src_fn.off;
+ try pwriteDbgLineNops(
+ d_sym.file,
+ file_pos,
+ prev_padding_size,
+ dbg_line_buffer.items,
+ next_padding_size,
+ );
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const atom = wasm_file.getAtomPtr(wasm_file.debug_line_atom.?);
+ // const debug_line = &atom.code;
+ // const segment_size = debug_line.items.len;
+ // if (needed_size != segment_size) {
+ // log.debug(" needed size does not equal allocated size: {d}", .{needed_size});
+ // if (needed_size > segment_size) {
+ // log.debug(" allocating {d} bytes for 'debug line' information", .{needed_size - segment_size});
+ // try debug_line.resize(self.allocator, needed_size);
+ // @memset(debug_line.items[segment_size..], 0);
+ // }
+ // debug_line.items.len = needed_size;
+ // }
+ // writeDbgLineNopsBuffered(
+ // debug_line.items,
+ // src_fn.off,
+ // prev_padding_size,
+ // dbg_line_buffer.items,
+ // next_padding_size,
+ // );
+ } else unreachable;
// .debug_info - End the TAG.subprogram children.
try dbg_info_buffer.append(0);
@@ -1422,27 +1406,27 @@ pub fn commitDeclState(
if (dbg_info_buffer.items.len == 0)
return;
- const di_atom_index = self.di_atom_decls.get(decl_index).?;
- if (decl_state.abbrev_table.items.len > 0) {
- // Now we emit the .debug_info types of the Decl. These will count towards the size of
+ const di_atom_index = self.di_atom_navs.get(nav_index).?;
+ if (nav_state.abbrev_table.items.len > 0) {
+ // Now we emit the .debug_info types of the Nav. These will count towards the size of
// the buffer, so we have to do it before computing the offset, and we can't perform the actual
// relocations yet.
var sym_index: usize = 0;
- while (sym_index < decl_state.abbrev_table.items.len) : (sym_index += 1) {
- const symbol = &decl_state.abbrev_table.items[sym_index];
+ while (sym_index < nav_state.abbrev_table.items.len) : (sym_index += 1) {
+ const symbol = &nav_state.abbrev_table.items[sym_index];
const ty = symbol.type;
if (ip.isErrorSetType(ty.toIntern())) continue;
symbol.offset = @intCast(dbg_info_buffer.items.len);
- try decl_state.addDbgInfoType(pt, di_atom_index, ty);
+ try nav_state.addDbgInfoType(pt, di_atom_index, ty);
}
}
- try self.updateDeclDebugInfoAllocation(di_atom_index, @intCast(dbg_info_buffer.items.len));
+ try self.updateNavDebugInfoAllocation(di_atom_index, @intCast(dbg_info_buffer.items.len));
- while (decl_state.abbrev_relocs.popOrNull()) |reloc| {
+ while (nav_state.abbrev_relocs.popOrNull()) |reloc| {
if (reloc.target) |reloc_target| {
- const symbol = decl_state.abbrev_table.items[reloc_target];
+ const symbol = nav_state.abbrev_table.items[reloc_target];
const ty = symbol.type;
if (ip.isErrorSetType(ty.toIntern())) {
log.debug("resolving %{d} deferred until flush", .{reloc_target});
@@ -1479,38 +1463,35 @@ pub fn commitDeclState(
}
}
- while (decl_state.exprloc_relocs.popOrNull()) |reloc| {
- switch (self.bin_file.tag) {
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- // TODO
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- try d_sym.relocs.append(d_sym.allocator, .{
- .type = switch (reloc.type) {
- .direct_load => .direct_load,
- .got_load => .got_load,
- },
- .target = reloc.target,
- .offset = reloc.offset + self.getAtom(.di_atom, di_atom_index).off,
- .addend = 0,
- });
- }
- },
- .elf => {}, // TODO
- else => unreachable,
- }
+ while (nav_state.exprloc_relocs.popOrNull()) |reloc| {
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ _ = elf_file; // TODO
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ // TODO
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ try d_sym.relocs.append(d_sym.allocator, .{
+ .type = switch (reloc.type) {
+ .direct_load => .direct_load,
+ .got_load => .got_load,
+ },
+ .target = reloc.target,
+ .offset = reloc.offset + self.getAtom(.di_atom, di_atom_index).off,
+ .addend = 0,
+ });
+ }
+ } else unreachable;
}
- try self.writeDeclDebugInfo(di_atom_index, dbg_info_buffer.items);
+ try self.writeNavDebugInfo(di_atom_index, dbg_info_buffer.items);
}
-fn updateDeclDebugInfoAllocation(self: *Dwarf, atom_index: Atom.Index, len: u32) !void {
+fn updateNavDebugInfoAllocation(self: *Dwarf, atom_index: Atom.Index, len: u32) !void {
const tracy = trace(@src());
defer tracy.end();
- // This logic is nearly identical to the logic above in `updateDecl` for
+ // This logic is nearly identical to the logic above in `updateNav` for
// `SrcFn` and the line number programs. If you are editing this logic, you
// probably need to edit that logic too.
const gpa = self.allocator;
@@ -1521,7 +1502,7 @@ fn updateDeclDebugInfoAllocation(self: *Dwarf, atom_index: Atom.Index, len: u32)
if (atom_index == last_index) break :blk;
if (atom.next_index) |next_index| {
const next = self.getAtomPtr(.di_atom, next_index);
- // Update existing Decl - non-last item.
+ // Update existing Nav - non-last item.
if (atom.off + atom.len + min_nop_size > next.off) {
// It grew too big, so we move it to a new location.
if (atom.prev_index) |prev_index| {
@@ -1531,34 +1512,27 @@ fn updateDeclDebugInfoAllocation(self: *Dwarf, atom_index: Atom.Index, len: u32)
next.prev_index = atom.prev_index;
atom.next_index = null;
// Populate where it used to be with NOPs.
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const debug_info_sect = &elf_file.shdrs.items[elf_file.debug_info_section_index.?];
- const file_pos = debug_info_sect.sh_offset + atom.off;
- try pwriteDbgInfoNops(elf_file.base.file.?, file_pos, 0, &[0]u8{}, atom.len, false);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const debug_info_sect = macho_file.sections.items(.header)[macho_file.debug_info_sect_index.?];
- const file_pos = debug_info_sect.offset + atom.off;
- try pwriteDbgInfoNops(macho_file.base.file.?, file_pos, 0, &[0]u8{}, atom.len, false);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const debug_info_sect = d_sym.getSectionPtr(d_sym.debug_info_section_index.?);
- const file_pos = debug_info_sect.offset + atom.off;
- try pwriteDbgInfoNops(d_sym.file, file_pos, 0, &[0]u8{}, atom.len, false);
- }
- },
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const debug_info_index = wasm_file.debug_info_atom.?;
- // const debug_info = &wasm_file.getAtomPtr(debug_info_index).code;
- // try writeDbgInfoNopsToArrayList(gpa, debug_info, atom.off, 0, &.{0}, atom.len, false);
- },
- else => unreachable,
- }
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const debug_info_sect = &elf_file.shdrs.items[elf_file.debug_info_section_index.?];
+ const file_pos = debug_info_sect.sh_offset + atom.off;
+ try pwriteDbgInfoNops(elf_file.base.file.?, file_pos, 0, &[0]u8{}, atom.len, false);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const debug_info_sect = macho_file.sections.items(.header)[macho_file.debug_info_sect_index.?];
+ const file_pos = debug_info_sect.offset + atom.off;
+ try pwriteDbgInfoNops(macho_file.base.file.?, file_pos, 0, &[0]u8{}, atom.len, false);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const debug_info_sect = d_sym.getSectionPtr(d_sym.debug_info_section_index.?);
+ const file_pos = debug_info_sect.offset + atom.off;
+ try pwriteDbgInfoNops(d_sym.file, file_pos, 0, &[0]u8{}, atom.len, false);
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const debug_info_index = wasm_file.debug_info_atom.?;
+ // const debug_info = &wasm_file.getAtomPtr(debug_info_index).code;
+ // try writeDbgInfoNopsToArrayList(gpa, debug_info, atom.off, 0, &.{0}, atom.len, false);
+ } else unreachable;
// TODO Look at the free list before appending at the end.
atom.prev_index = last_index;
const last = self.getAtomPtr(.di_atom, last_index);
@@ -1568,7 +1542,7 @@ fn updateDeclDebugInfoAllocation(self: *Dwarf, atom_index: Atom.Index, len: u32)
atom.off = last.off + padToIdeal(last.len);
}
} else if (atom.prev_index == null) {
- // Append new Decl.
+ // Append new Nav.
// TODO Look at the free list before appending at the end.
atom.prev_index = last_index;
const last = self.getAtomPtr(.di_atom, last_index);
@@ -1578,7 +1552,7 @@ fn updateDeclDebugInfoAllocation(self: *Dwarf, atom_index: Atom.Index, len: u32)
atom.off = last.off + padToIdeal(last.len);
}
} else {
- // This is the first Decl of the .debug_info
+ // This is the first Nav of the .debug_info
self.di_atom_first_index = atom_index;
self.di_atom_last_index = atom_index;
@@ -1586,19 +1560,19 @@ fn updateDeclDebugInfoAllocation(self: *Dwarf, atom_index: Atom.Index, len: u32)
}
}
-fn writeDeclDebugInfo(self: *Dwarf, atom_index: Atom.Index, dbg_info_buf: []const u8) !void {
+fn writeNavDebugInfo(self: *Dwarf, atom_index: Atom.Index, dbg_info_buf: []const u8) !void {
const tracy = trace(@src());
defer tracy.end();
- // This logic is nearly identical to the logic above in `updateDecl` for
+ // This logic is nearly identical to the logic above in `updateNav` for
// `SrcFn` and the line number programs. If you are editing this logic, you
// probably need to edit that logic too.
const atom = self.getAtom(.di_atom, atom_index);
- const last_decl_index = self.di_atom_last_index.?;
- const last_decl = self.getAtom(.di_atom, last_decl_index);
- // +1 for a trailing zero to end the children of the decl tag.
- const needed_size = last_decl.off + last_decl.len + 1;
+ const last_nav_index = self.di_atom_last_index.?;
+ const last_nav = self.getAtom(.di_atom, last_nav_index);
+ // +1 for a trailing zero to end the children of the nav tag.
+ const needed_size = last_nav.off + last_nav.len + 1;
const prev_padding_size: u32 = if (atom.prev_index) |prev_index| blk: {
const prev = self.getAtom(.di_atom, prev_index);
break :blk atom.off - (prev.off + prev.len);
@@ -1608,107 +1582,99 @@ fn writeDeclDebugInfo(self: *Dwarf, atom_index: Atom.Index, dbg_info_buf: []cons
break :blk next.off - (atom.off + atom.len);
} else 0;
- // To end the children of the decl tag.
+ // To end the children of the nav tag.
const trailing_zero = atom.next_index == null;
// We only have support for one compilation unit so far, so the offsets are directly
// from the .debug_info section.
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const shdr_index = elf_file.debug_info_section_index.?;
- try elf_file.growNonAllocSection(shdr_index, needed_size, 1, true);
- const debug_info_sect = &elf_file.shdrs.items[shdr_index];
- const file_pos = debug_info_sect.sh_offset + atom.off;
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const shdr_index = elf_file.debug_info_section_index.?;
+ try elf_file.growNonAllocSection(shdr_index, needed_size, 1, true);
+ const debug_info_sect = &elf_file.shdrs.items[shdr_index];
+ const file_pos = debug_info_sect.sh_offset + atom.off;
+ try pwriteDbgInfoNops(
+ elf_file.base.file.?,
+ file_pos,
+ prev_padding_size,
+ dbg_info_buf,
+ next_padding_size,
+ trailing_zero,
+ );
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const sect_index = macho_file.debug_info_sect_index.?;
+ try macho_file.growSection(sect_index, needed_size);
+ const sect = macho_file.sections.items(.header)[sect_index];
+ const file_pos = sect.offset + atom.off;
try pwriteDbgInfoNops(
- elf_file.base.file.?,
+ macho_file.base.file.?,
file_pos,
prev_padding_size,
dbg_info_buf,
next_padding_size,
trailing_zero,
);
- },
-
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const sect_index = macho_file.debug_info_sect_index.?;
- try macho_file.growSection(sect_index, needed_size);
- const sect = macho_file.sections.items(.header)[sect_index];
- const file_pos = sect.offset + atom.off;
- try pwriteDbgInfoNops(
- macho_file.base.file.?,
- file_pos,
- prev_padding_size,
- dbg_info_buf,
- next_padding_size,
- trailing_zero,
- );
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const sect_index = d_sym.debug_info_section_index.?;
- try d_sym.growSection(sect_index, needed_size, true, macho_file);
- const sect = d_sym.getSection(sect_index);
- const file_pos = sect.offset + atom.off;
- try pwriteDbgInfoNops(
- d_sym.file,
- file_pos,
- prev_padding_size,
- dbg_info_buf,
- next_padding_size,
- trailing_zero,
- );
- }
- },
-
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const info_atom = wasm_file.debug_info_atom.?;
- // const debug_info = &wasm_file.getAtomPtr(info_atom).code;
- // const segment_size = debug_info.items.len;
- // if (needed_size != segment_size) {
- // log.debug(" needed size does not equal allocated size: {d}", .{needed_size});
- // if (needed_size > segment_size) {
- // log.debug(" allocating {d} bytes for 'debug info' information", .{needed_size - segment_size});
- // try debug_info.resize(self.allocator, needed_size);
- // @memset(debug_info.items[segment_size..], 0);
- // }
- // debug_info.items.len = needed_size;
- // }
- // log.debug(" writeDbgInfoNopsToArrayList debug_info_len={d} offset={d} content_len={d} next_padding_size={d}", .{
- // debug_info.items.len, atom.off, dbg_info_buf.len, next_padding_size,
- // });
- // try writeDbgInfoNopsToArrayList(
- // gpa,
- // debug_info,
- // atom.off,
- // prev_padding_size,
- // dbg_info_buf,
- // next_padding_size,
- // trailing_zero,
- // );
- },
- else => unreachable,
- }
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const sect_index = d_sym.debug_info_section_index.?;
+ try d_sym.growSection(sect_index, needed_size, true, macho_file);
+ const sect = d_sym.getSection(sect_index);
+ const file_pos = sect.offset + atom.off;
+ try pwriteDbgInfoNops(
+ d_sym.file,
+ file_pos,
+ prev_padding_size,
+ dbg_info_buf,
+ next_padding_size,
+ trailing_zero,
+ );
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const info_atom = wasm_file.debug_info_atom.?;
+ // const debug_info = &wasm_file.getAtomPtr(info_atom).code;
+ // const segment_size = debug_info.items.len;
+ // if (needed_size != segment_size) {
+ // log.debug(" needed size does not equal allocated size: {d}", .{needed_size});
+ // if (needed_size > segment_size) {
+ // log.debug(" allocating {d} bytes for 'debug info' information", .{needed_size - segment_size});
+ // try debug_info.resize(self.allocator, needed_size);
+ // @memset(debug_info.items[segment_size..], 0);
+ // }
+ // debug_info.items.len = needed_size;
+ // }
+ // log.debug(" writeDbgInfoNopsToArrayList debug_info_len={d} offset={d} content_len={d} next_padding_size={d}", .{
+ // debug_info.items.len, atom.off, dbg_info_buf.len, next_padding_size,
+ // });
+ // try writeDbgInfoNopsToArrayList(
+ // gpa,
+ // debug_info,
+ // atom.off,
+ // prev_padding_size,
+ // dbg_info_buf,
+ // next_padding_size,
+ // trailing_zero,
+ // );
+ } else unreachable;
}
-pub fn updateDeclLineNumber(self: *Dwarf, zcu: *Zcu, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNavLineNumber(self: *Dwarf, zcu: *Zcu, nav_index: InternPool.Nav.Index) !void {
const tracy = trace(@src());
defer tracy.end();
- const atom_index = try self.getOrCreateAtomForDecl(.src_fn, decl_index);
+ const atom_index = try self.getOrCreateAtomForNav(.src_fn, nav_index);
const atom = self.getAtom(.src_fn, atom_index);
if (atom.len == 0) return;
- const decl = zcu.declPtr(decl_index);
- const func = decl.val.getFunction(zcu).?;
- log.debug("decl.src_line={d}, func.lbrace_line={d}, func.rbrace_line={d}", .{
- decl.navSrcLine(zcu),
+ const nav = zcu.intern_pool.getNav(nav_index);
+ const nav_val = Value.fromInterned(nav.status.resolved.val);
+ const func = nav_val.getFunction(zcu).?;
+ log.debug("src_line={d}, func.lbrace_line={d}, func.rbrace_line={d}", .{
+ zcu.navSrcLine(nav_index),
func.lbrace_line,
func.rbrace_line,
});
- const line: u28 = @intCast(decl.navSrcLine(zcu) + func.lbrace_line);
+ const line: u28 = @intCast(zcu.navSrcLine(nav_index) + func.lbrace_line);
var data: [4]u8 = undefined;
leb128.writeUnsignedFixed(4, &data, line);
@@ -1742,11 +1708,11 @@ pub fn updateDeclLineNumber(self: *Dwarf, zcu: *Zcu, decl_index: InternPool.Decl
}
}
-pub fn freeDecl(self: *Dwarf, decl_index: InternPool.DeclIndex) void {
+pub fn freeNav(self: *Dwarf, nav_index: InternPool.Nav.Index) void {
const gpa = self.allocator;
// Free SrcFn atom
- if (self.src_fn_decls.fetchRemove(decl_index)) |kv| {
+ if (self.src_fn_navs.fetchRemove(nav_index)) |kv| {
const src_fn_index = kv.value;
const src_fn = self.getAtom(.src_fn, src_fn_index);
_ = self.src_fn_free_list.remove(src_fn_index);
@@ -1773,7 +1739,7 @@ pub fn freeDecl(self: *Dwarf, decl_index: InternPool.DeclIndex) void {
}
// Free DI atom
- if (self.di_atom_decls.fetchRemove(decl_index)) |kv| {
+ if (self.di_atom_navs.fetchRemove(nav_index)) |kv| {
const di_atom_index = kv.value;
const di_atom = self.getAtomPtr(.di_atom, di_atom_index);
@@ -1930,40 +1896,33 @@ pub fn writeDbgAbbrev(self: *Dwarf) !void {
self.abbrev_table_offset = abbrev_offset;
const needed_size = abbrev_buf.len;
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const shdr_index = elf_file.debug_abbrev_section_index.?;
- try elf_file.growNonAllocSection(shdr_index, needed_size, 1, false);
- const debug_abbrev_sect = &elf_file.shdrs.items[shdr_index];
- const file_pos = debug_abbrev_sect.sh_offset + abbrev_offset;
- try elf_file.base.file.?.pwriteAll(&abbrev_buf, file_pos);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const sect_index = macho_file.debug_abbrev_sect_index.?;
- try macho_file.growSection(sect_index, needed_size);
- const sect = macho_file.sections.items(.header)[sect_index];
- const file_pos = sect.offset + abbrev_offset;
- try macho_file.base.file.?.pwriteAll(&abbrev_buf, file_pos);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const sect_index = d_sym.debug_abbrev_section_index.?;
- try d_sym.growSection(sect_index, needed_size, false, macho_file);
- const sect = d_sym.getSection(sect_index);
- const file_pos = sect.offset + abbrev_offset;
- try d_sym.file.pwriteAll(&abbrev_buf, file_pos);
- }
- },
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const debug_abbrev = &wasm_file.getAtomPtr(wasm_file.debug_abbrev_atom.?).code;
- // try debug_abbrev.resize(gpa, needed_size);
- // debug_abbrev.items[0..abbrev_buf.len].* = abbrev_buf;
- },
- else => unreachable,
- }
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const shdr_index = elf_file.debug_abbrev_section_index.?;
+ try elf_file.growNonAllocSection(shdr_index, needed_size, 1, false);
+ const debug_abbrev_sect = &elf_file.shdrs.items[shdr_index];
+ const file_pos = debug_abbrev_sect.sh_offset + abbrev_offset;
+ try elf_file.base.file.?.pwriteAll(&abbrev_buf, file_pos);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const sect_index = macho_file.debug_abbrev_sect_index.?;
+ try macho_file.growSection(sect_index, needed_size);
+ const sect = macho_file.sections.items(.header)[sect_index];
+ const file_pos = sect.offset + abbrev_offset;
+ try macho_file.base.file.?.pwriteAll(&abbrev_buf, file_pos);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const sect_index = d_sym.debug_abbrev_section_index.?;
+ try d_sym.growSection(sect_index, needed_size, false, macho_file);
+ const sect = d_sym.getSection(sect_index);
+ const file_pos = sect.offset + abbrev_offset;
+ try d_sym.file.pwriteAll(&abbrev_buf, file_pos);
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const debug_abbrev = &wasm_file.getAtomPtr(wasm_file.debug_abbrev_atom.?).code;
+ // try debug_abbrev.resize(gpa, needed_size);
+ // debug_abbrev.items[0..abbrev_buf.len].* = abbrev_buf;
+ } else unreachable;
}
fn dbgInfoHeaderBytes(self: *Dwarf) usize {
@@ -2027,37 +1986,30 @@ pub fn writeDbgInfoHeader(self: *Dwarf, zcu: *Zcu, low_pc: u64, high_pc: u64) !v
mem.writeInt(u16, di_buf.addManyAsArrayAssumeCapacity(2), DW.LANG.C99, target_endian);
if (di_buf.items.len > first_dbg_info_off) {
- // Move the first N decls to the end to make more padding for the header.
+ // Move the first N navs to the end to make more padding for the header.
@panic("TODO: handle .debug_info header exceeding its padding");
}
const jmp_amt = first_dbg_info_off - di_buf.items.len;
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const debug_info_sect = &elf_file.shdrs.items[elf_file.debug_info_section_index.?];
- const file_pos = debug_info_sect.sh_offset;
- try pwriteDbgInfoNops(elf_file.base.file.?, file_pos, 0, di_buf.items, jmp_amt, false);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const debug_info_sect = macho_file.sections.items(.header)[macho_file.debug_info_sect_index.?];
- const file_pos = debug_info_sect.offset;
- try pwriteDbgInfoNops(macho_file.base.file.?, file_pos, 0, di_buf.items, jmp_amt, false);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const debug_info_sect = d_sym.getSection(d_sym.debug_info_section_index.?);
- const file_pos = debug_info_sect.offset;
- try pwriteDbgInfoNops(d_sym.file, file_pos, 0, di_buf.items, jmp_amt, false);
- }
- },
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const debug_info = &wasm_file.getAtomPtr(wasm_file.debug_info_atom.?).code;
- // try writeDbgInfoNopsToArrayList(self.allocator, debug_info, 0, 0, di_buf.items, jmp_amt, false);
- },
- else => unreachable,
- }
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const debug_info_sect = &elf_file.shdrs.items[elf_file.debug_info_section_index.?];
+ const file_pos = debug_info_sect.sh_offset;
+ try pwriteDbgInfoNops(elf_file.base.file.?, file_pos, 0, di_buf.items, jmp_amt, false);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const debug_info_sect = macho_file.sections.items(.header)[macho_file.debug_info_sect_index.?];
+ const file_pos = debug_info_sect.offset;
+ try pwriteDbgInfoNops(macho_file.base.file.?, file_pos, 0, di_buf.items, jmp_amt, false);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const debug_info_sect = d_sym.getSection(d_sym.debug_info_section_index.?);
+ const file_pos = debug_info_sect.offset;
+ try pwriteDbgInfoNops(d_sym.file, file_pos, 0, di_buf.items, jmp_amt, false);
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const debug_info = &wasm_file.getAtomPtr(wasm_file.debug_info_atom.?).code;
+ // try writeDbgInfoNopsToArrayList(self.allocator, debug_info, 0, 0, di_buf.items, jmp_amt, false);
+ } else unreachable;
}
fn resolveCompilationDir(zcu: *Zcu, buffer: *[std.fs.max_path_bytes]u8) []const u8 {
@@ -2360,40 +2312,33 @@ pub fn writeDbgAranges(self: *Dwarf, addr: u64, size: u64) !void {
}
const needed_size: u32 = @intCast(di_buf.items.len);
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const shdr_index = elf_file.debug_aranges_section_index.?;
- try elf_file.growNonAllocSection(shdr_index, needed_size, 16, false);
- const debug_aranges_sect = &elf_file.shdrs.items[shdr_index];
- const file_pos = debug_aranges_sect.sh_offset;
- try elf_file.base.file.?.pwriteAll(di_buf.items, file_pos);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const sect_index = macho_file.debug_aranges_sect_index.?;
- try macho_file.growSection(sect_index, needed_size);
- const sect = macho_file.sections.items(.header)[sect_index];
- const file_pos = sect.offset;
- try macho_file.base.file.?.pwriteAll(di_buf.items, file_pos);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const sect_index = d_sym.debug_aranges_section_index.?;
- try d_sym.growSection(sect_index, needed_size, false, macho_file);
- const sect = d_sym.getSection(sect_index);
- const file_pos = sect.offset;
- try d_sym.file.pwriteAll(di_buf.items, file_pos);
- }
- },
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const debug_ranges = &wasm_file.getAtomPtr(wasm_file.debug_ranges_atom.?).code;
- // try debug_ranges.resize(gpa, needed_size);
- // @memcpy(debug_ranges.items[0..di_buf.items.len], di_buf.items);
- },
- else => unreachable,
- }
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const shdr_index = elf_file.debug_aranges_section_index.?;
+ try elf_file.growNonAllocSection(shdr_index, needed_size, 16, false);
+ const debug_aranges_sect = &elf_file.shdrs.items[shdr_index];
+ const file_pos = debug_aranges_sect.sh_offset;
+ try elf_file.base.file.?.pwriteAll(di_buf.items, file_pos);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const sect_index = macho_file.debug_aranges_sect_index.?;
+ try macho_file.growSection(sect_index, needed_size);
+ const sect = macho_file.sections.items(.header)[sect_index];
+ const file_pos = sect.offset;
+ try macho_file.base.file.?.pwriteAll(di_buf.items, file_pos);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const sect_index = d_sym.debug_aranges_section_index.?;
+ try d_sym.growSection(sect_index, needed_size, false, macho_file);
+ const sect = d_sym.getSection(sect_index);
+ const file_pos = sect.offset;
+ try d_sym.file.pwriteAll(di_buf.items, file_pos);
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const debug_ranges = &wasm_file.getAtomPtr(wasm_file.debug_ranges_atom.?).code;
+ // try debug_ranges.resize(gpa, needed_size);
+ // @memcpy(debug_ranges.items[0..di_buf.items.len], di_buf.items);
+ } else unreachable;
}
pub fn writeDbgLineHeader(self: *Dwarf) !void {
@@ -2502,60 +2447,52 @@ pub fn writeDbgLineHeader(self: *Dwarf) !void {
var src_fn_index = first_fn_index;
- var buffer = try gpa.alloc(u8, last_fn.off + last_fn.len - first_fn.off);
+ const buffer = try gpa.alloc(u8, last_fn.off + last_fn.len - first_fn.off);
defer gpa.free(buffer);
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const shdr_index = elf_file.debug_line_section_index.?;
- const needed_size = elf_file.shdrs.items[shdr_index].sh_size + delta;
- try elf_file.growNonAllocSection(shdr_index, needed_size, 1, true);
- const file_pos = elf_file.shdrs.items[shdr_index].sh_offset + first_fn.off;
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const shdr_index = elf_file.debug_line_section_index.?;
+ const needed_size = elf_file.shdrs.items[shdr_index].sh_size + delta;
+ try elf_file.growNonAllocSection(shdr_index, needed_size, 1, true);
+ const file_pos = elf_file.shdrs.items[shdr_index].sh_offset + first_fn.off;
- const amt = try elf_file.base.file.?.preadAll(buffer, file_pos);
- if (amt != buffer.len) return error.InputOutput;
+ const amt = try elf_file.base.file.?.preadAll(buffer, file_pos);
+ if (amt != buffer.len) return error.InputOutput;
- try elf_file.base.file.?.pwriteAll(buffer, file_pos + delta);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const sect_index = macho_file.debug_line_sect_index.?;
- const needed_size: u32 = @intCast(macho_file.sections.items(.header)[sect_index].size + delta);
- try macho_file.growSection(sect_index, needed_size);
- const file_pos = macho_file.sections.items(.header)[sect_index].offset + first_fn.off;
+ try elf_file.base.file.?.pwriteAll(buffer, file_pos + delta);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const sect_index = macho_file.debug_line_sect_index.?;
+ const needed_size: u32 = @intCast(macho_file.sections.items(.header)[sect_index].size + delta);
+ try macho_file.growSection(sect_index, needed_size);
+ const file_pos = macho_file.sections.items(.header)[sect_index].offset + first_fn.off;
- const amt = try macho_file.base.file.?.preadAll(buffer, file_pos);
- if (amt != buffer.len) return error.InputOutput;
+ const amt = try macho_file.base.file.?.preadAll(buffer, file_pos);
+ if (amt != buffer.len) return error.InputOutput;
- try macho_file.base.file.?.pwriteAll(buffer, file_pos + delta);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const sect_index = d_sym.debug_line_section_index.?;
- const needed_size: u32 = @intCast(d_sym.getSection(sect_index).size + delta);
- try d_sym.growSection(sect_index, needed_size, true, macho_file);
- const file_pos = d_sym.getSection(sect_index).offset + first_fn.off;
+ try macho_file.base.file.?.pwriteAll(buffer, file_pos + delta);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const sect_index = d_sym.debug_line_section_index.?;
+ const needed_size: u32 = @intCast(d_sym.getSection(sect_index).size + delta);
+ try d_sym.growSection(sect_index, needed_size, true, macho_file);
+ const file_pos = d_sym.getSection(sect_index).offset + first_fn.off;
- const amt = try d_sym.file.preadAll(buffer, file_pos);
- if (amt != buffer.len) return error.InputOutput;
+ const amt = try d_sym.file.preadAll(buffer, file_pos);
+ if (amt != buffer.len) return error.InputOutput;
- try d_sym.file.pwriteAll(buffer, file_pos + delta);
- }
- },
- .wasm => {
- _ = &buffer;
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const debug_line = &wasm_file.getAtomPtr(wasm_file.debug_line_atom.?).code;
- // {
- // const src = debug_line.items[first_fn.off..];
- // @memcpy(buffer[0..src.len], src);
- // }
- // try debug_line.resize(self.allocator, debug_line.items.len + delta);
- // @memcpy(debug_line.items[first_fn.off + delta ..][0..buffer.len], buffer);
- },
- else => unreachable,
- }
+ try d_sym.file.pwriteAll(buffer, file_pos + delta);
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const debug_line = &wasm_file.getAtomPtr(wasm_file.debug_line_atom.?).code;
+ // {
+ // const src = debug_line.items[first_fn.off..];
+ // @memcpy(buffer[0..src.len], src);
+ // }
+ // try debug_line.resize(self.allocator, debug_line.items.len + delta);
+ // @memcpy(debug_line.items[first_fn.off + delta ..][0..buffer.len], buffer);
+ } else unreachable;
while (true) {
const src_fn = self.getAtomPtr(.src_fn, src_fn_index);
@@ -2580,33 +2517,26 @@ pub fn writeDbgLineHeader(self: *Dwarf) !void {
// We use NOPs because consumers empirically do not respect the header length field.
const jmp_amt = self.getDebugLineProgramOff().? - di_buf.items.len;
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const debug_line_sect = &elf_file.shdrs.items[elf_file.debug_line_section_index.?];
- const file_pos = debug_line_sect.sh_offset;
- try pwriteDbgLineNops(elf_file.base.file.?, file_pos, 0, di_buf.items, jmp_amt);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const debug_line_sect = macho_file.sections.items(.header)[macho_file.debug_line_sect_index.?];
- const file_pos = debug_line_sect.offset;
- try pwriteDbgLineNops(macho_file.base.file.?, file_pos, 0, di_buf.items, jmp_amt);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const debug_line_sect = d_sym.getSection(d_sym.debug_line_section_index.?);
- const file_pos = debug_line_sect.offset;
- try pwriteDbgLineNops(d_sym.file, file_pos, 0, di_buf.items, jmp_amt);
- }
- },
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const debug_line = &wasm_file.getAtomPtr(wasm_file.debug_line_atom.?).code;
- // writeDbgLineNopsBuffered(debug_line.items, 0, 0, di_buf.items, jmp_amt);
- },
- else => unreachable,
- }
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ const debug_line_sect = &elf_file.shdrs.items[elf_file.debug_line_section_index.?];
+ const file_pos = debug_line_sect.sh_offset;
+ try pwriteDbgLineNops(elf_file.base.file.?, file_pos, 0, di_buf.items, jmp_amt);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ const debug_line_sect = macho_file.sections.items(.header)[macho_file.debug_line_sect_index.?];
+ const file_pos = debug_line_sect.offset;
+ try pwriteDbgLineNops(macho_file.base.file.?, file_pos, 0, di_buf.items, jmp_amt);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const debug_line_sect = d_sym.getSection(d_sym.debug_line_section_index.?);
+ const file_pos = debug_line_sect.offset;
+ try pwriteDbgLineNops(d_sym.file, file_pos, 0, di_buf.items, jmp_amt);
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const debug_line = &wasm_file.getAtomPtr(wasm_file.debug_line_atom.?).code;
+ // writeDbgLineNopsBuffered(debug_line.items, 0, 0, di_buf.items, jmp_amt);
+ } else unreachable;
}
fn getDebugInfoOff(self: Dwarf) ?u32 {
@@ -2704,85 +2634,66 @@ pub fn flushModule(self: *Dwarf, pt: Zcu.PerThread) !void {
);
const di_atom_index = try self.createAtom(.di_atom);
- log.debug("updateDeclDebugInfoAllocation in flushModule", .{});
- try self.updateDeclDebugInfoAllocation(di_atom_index, @intCast(dbg_info_buffer.items.len));
- log.debug("writeDeclDebugInfo in flushModule", .{});
- try self.writeDeclDebugInfo(di_atom_index, dbg_info_buffer.items);
-
- const file_pos = switch (self.bin_file.tag) {
- .elf => pos: {
- const elf_file = self.bin_file.cast(File.Elf).?;
- const debug_info_sect = &elf_file.shdrs.items[elf_file.debug_info_section_index.?];
- break :pos debug_info_sect.sh_offset;
- },
- .macho => pos: {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- const debug_info_sect = &macho_file.sections.items(.header)[macho_file.debug_info_sect_index.?];
- break :pos debug_info_sect.offset;
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- const debug_info_sect = d_sym.getSectionPtr(d_sym.debug_info_section_index.?);
- break :pos debug_info_sect.offset;
- }
- },
+ log.debug("updateNavDebugInfoAllocation in flushModule", .{});
+ try self.updateNavDebugInfoAllocation(di_atom_index, @intCast(dbg_info_buffer.items.len));
+ log.debug("writeNavDebugInfo in flushModule", .{});
+ try self.writeNavDebugInfo(di_atom_index, dbg_info_buffer.items);
+
+ const file_pos = if (self.bin_file.cast(.elf)) |elf_file| pos: {
+ const debug_info_sect = &elf_file.shdrs.items[elf_file.debug_info_section_index.?];
+ break :pos debug_info_sect.sh_offset;
+ } else if (self.bin_file.cast(.macho)) |macho_file| pos: {
+ if (macho_file.base.isRelocatable()) {
+ const debug_info_sect = &macho_file.sections.items(.header)[macho_file.debug_info_sect_index.?];
+ break :pos debug_info_sect.offset;
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ const debug_info_sect = d_sym.getSectionPtr(d_sym.debug_info_section_index.?);
+ break :pos debug_info_sect.offset;
+ }
+ } else if (self.bin_file.cast(.wasm)) |_|
// for wasm, the offset is always 0 as we write to memory first
- .wasm => 0,
- else => unreachable,
- };
+ 0
+ else
+ unreachable;
var buf: [@sizeOf(u32)]u8 = undefined;
mem.writeInt(u32, &buf, self.getAtom(.di_atom, di_atom_index).off, target.cpu.arch.endian());
while (self.global_abbrev_relocs.popOrNull()) |reloc| {
const atom = self.getAtom(.di_atom, reloc.atom_index);
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- try elf_file.base.file.?.pwriteAll(&buf, file_pos + atom.off + reloc.offset);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- try macho_file.base.file.?.pwriteAll(&buf, file_pos + atom.off + reloc.offset);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- try d_sym.file.pwriteAll(&buf, file_pos + atom.off + reloc.offset);
- }
- },
- .wasm => {
- // const wasm_file = self.bin_file.cast(File.Wasm).?;
- // const debug_info = wasm_file.getAtomPtr(wasm_file.debug_info_atom.?).code;
- // debug_info.items[atom.off + reloc.offset ..][0..buf.len].* = buf;
- },
- else => unreachable,
- }
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ try elf_file.base.file.?.pwriteAll(&buf, file_pos + atom.off + reloc.offset);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ try macho_file.base.file.?.pwriteAll(&buf, file_pos + atom.off + reloc.offset);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ try d_sym.file.pwriteAll(&buf, file_pos + atom.off + reloc.offset);
+ }
+ } else if (self.bin_file.cast(.wasm)) |wasm_file| {
+ _ = wasm_file;
+ // const debug_info = wasm_file.getAtomPtr(wasm_file.debug_info_atom.?).code;
+ // debug_info.items[atom.off + reloc.offset ..][0..buf.len].* = buf;
+ } else unreachable;
}
}
}
-fn addDIFile(self: *Dwarf, zcu: *Zcu, decl_index: InternPool.DeclIndex) !u28 {
- const decl = zcu.declPtr(decl_index);
- const file_scope = decl.getFileScope(zcu);
+fn addDIFile(self: *Dwarf, zcu: *Zcu, nav_index: InternPool.Nav.Index) !u28 {
+ const file_scope = zcu.navFileScope(nav_index);
const gop = try self.di_files.getOrPut(self.allocator, file_scope);
if (!gop.found_existing) {
- switch (self.bin_file.tag) {
- .elf => {
- const elf_file = self.bin_file.cast(File.Elf).?;
- elf_file.markDirty(elf_file.debug_line_section_index.?);
- },
- .macho => {
- const macho_file = self.bin_file.cast(File.MachO).?;
- if (macho_file.base.isRelocatable()) {
- macho_file.markDirty(macho_file.debug_line_sect_index.?);
- } else {
- const d_sym = macho_file.getDebugSymbols().?;
- d_sym.markDirty(d_sym.debug_line_section_index.?, macho_file);
- }
- },
- .wasm => {},
- else => unreachable,
- }
+ if (self.bin_file.cast(.elf)) |elf_file| {
+ elf_file.markDirty(elf_file.debug_line_section_index.?);
+ } else if (self.bin_file.cast(.macho)) |macho_file| {
+ if (macho_file.base.isRelocatable()) {
+ macho_file.markDirty(macho_file.debug_line_sect_index.?);
+ } else {
+ const d_sym = macho_file.getDebugSymbols().?;
+ d_sym.markDirty(d_sym.debug_line_section_index.?, macho_file);
+ }
+ } else if (self.bin_file.cast(.wasm)) |_| {} else unreachable;
}
return @intCast(gop.index + 1);
}
@@ -2909,17 +2820,17 @@ fn createAtom(self: *Dwarf, comptime kind: Kind) !Atom.Index {
return index;
}
-fn getOrCreateAtomForDecl(self: *Dwarf, comptime kind: Kind, decl_index: InternPool.DeclIndex) !Atom.Index {
+fn getOrCreateAtomForNav(self: *Dwarf, comptime kind: Kind, nav_index: InternPool.Nav.Index) !Atom.Index {
switch (kind) {
.src_fn => {
- const gop = try self.src_fn_decls.getOrPut(self.allocator, decl_index);
+ const gop = try self.src_fn_navs.getOrPut(self.allocator, nav_index);
if (!gop.found_existing) {
gop.value_ptr.* = try self.createAtom(kind);
}
return gop.value_ptr.*;
},
.di_atom => {
- const gop = try self.di_atom_decls.getOrPut(self.allocator, decl_index);
+ const gop = try self.di_atom_navs.getOrPut(self.allocator, nav_index);
if (!gop.found_existing) {
gop.value_ptr.* = try self.createAtom(kind);
}
diff --git a/src/link/Elf.zig b/src/link/Elf.zig
index 16f8739b02..103c69202b 100644
--- a/src/link/Elf.zig
+++ b/src/link/Elf.zig
@@ -478,24 +478,24 @@ pub fn deinit(self: *Elf) void {
self.comdat_group_sections.deinit(gpa);
}
-pub fn getDeclVAddr(self: *Elf, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex, reloc_info: link.File.RelocInfo) !u64 {
+pub fn getNavVAddr(self: *Elf, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index, reloc_info: link.File.RelocInfo) !u64 {
assert(self.llvm_object == null);
- return self.zigObjectPtr().?.getDeclVAddr(self, pt, decl_index, reloc_info);
+ return self.zigObjectPtr().?.getNavVAddr(self, pt, nav_index, reloc_info);
}
-pub fn lowerAnonDecl(
+pub fn lowerUav(
self: *Elf,
pt: Zcu.PerThread,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
explicit_alignment: InternPool.Alignment,
src_loc: Zcu.LazySrcLoc,
-) !codegen.Result {
- return self.zigObjectPtr().?.lowerAnonDecl(self, pt, decl_val, explicit_alignment, src_loc);
+) !codegen.GenResult {
+ return self.zigObjectPtr().?.lowerUav(self, pt, uav, explicit_alignment, src_loc);
}
-pub fn getAnonDeclVAddr(self: *Elf, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
+pub fn getUavVAddr(self: *Elf, uav: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
assert(self.llvm_object == null);
- return self.zigObjectPtr().?.getAnonDeclVAddr(self, decl_val, reloc_info);
+ return self.zigObjectPtr().?.getUavVAddr(self, uav, reloc_info);
}
/// Returns end pos of collision, if any.
@@ -2913,9 +2913,9 @@ pub fn writeElfHeader(self: *Elf) !void {
try self.base.file.?.pwriteAll(hdr_buf[0..index], 0);
}
-pub fn freeDecl(self: *Elf, decl_index: InternPool.DeclIndex) void {
- if (self.llvm_object) |llvm_object| return llvm_object.freeDecl(decl_index);
- return self.zigObjectPtr().?.freeDecl(self, decl_index);
+pub fn freeNav(self: *Elf, nav: InternPool.Nav.Index) void {
+ if (self.llvm_object) |llvm_object| return llvm_object.freeNav(nav);
+ return self.zigObjectPtr().?.freeNav(self, nav);
}
pub fn updateFunc(self: *Elf, pt: Zcu.PerThread, func_index: InternPool.Index, air: Air, liveness: Liveness) !void {
@@ -2926,20 +2926,16 @@ pub fn updateFunc(self: *Elf, pt: Zcu.PerThread, func_index: InternPool.Index, a
return self.zigObjectPtr().?.updateFunc(self, pt, func_index, air, liveness);
}
-pub fn updateDecl(
+pub fn updateNav(
self: *Elf,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
-) link.File.UpdateDeclError!void {
+ nav: InternPool.Nav.Index,
+) link.File.UpdateNavError!void {
if (build_options.skip_non_native and builtin.object_format != .elf) {
@panic("Attempted to compile for object format that was disabled by build configuration");
}
- if (self.llvm_object) |llvm_object| return llvm_object.updateDecl(pt, decl_index);
- return self.zigObjectPtr().?.updateDecl(self, pt, decl_index);
-}
-
-pub fn lowerUnnamedConst(self: *Elf, pt: Zcu.PerThread, val: Value, decl_index: InternPool.DeclIndex) !u32 {
- return self.zigObjectPtr().?.lowerUnnamedConst(self, pt, val, decl_index);
+ if (self.llvm_object) |llvm_object| return llvm_object.updateNav(pt, nav);
+ return self.zigObjectPtr().?.updateNav(self, pt, nav);
}
pub fn updateExports(
@@ -2955,9 +2951,9 @@ pub fn updateExports(
return self.zigObjectPtr().?.updateExports(self, pt, exported, export_indices);
}
-pub fn updateDeclLineNumber(self: *Elf, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNavLineNumber(self: *Elf, pt: Zcu.PerThread, nav: InternPool.Nav.Index) !void {
if (self.llvm_object) |_| return;
- return self.zigObjectPtr().?.updateDeclLineNumber(pt, decl_index);
+ return self.zigObjectPtr().?.updateNavLineNumber(pt, nav);
}
pub fn deleteExport(
diff --git a/src/link/Elf/ZigObject.zig b/src/link/Elf/ZigObject.zig
index 8384399eb9..ef3e2ed77c 100644
--- a/src/link/Elf/ZigObject.zig
+++ b/src/link/Elf/ZigObject.zig
@@ -32,35 +32,14 @@ dwarf: ?Dwarf = null,
/// Table of tracked LazySymbols.
lazy_syms: LazySymbolTable = .{},
-/// Table of tracked Decls.
-decls: DeclTable = .{},
+/// Table of tracked `Nav`s.
+navs: NavTable = .{},
/// TLS variables indexed by Atom.Index.
tls_variables: TlsTable = .{},
-/// Table of unnamed constants associated with a parent `Decl`.
-/// We store them here so that we can free the constants whenever the `Decl`
-/// needs updating or is freed.
-///
-/// For example,
-///
-/// ```zig
-/// const Foo = struct{
-/// a: u8,
-/// };
-///
-/// pub fn main() void {
-/// var foo = Foo{ .a = 1 };
-/// _ = foo;
-/// }
-/// ```
-///
-/// value assigned to label `foo` is an unnamed constant belonging/associated
-/// with `Decl` `main`, and lives as long as that `Decl`.
-unnamed_consts: UnnamedConstTable = .{},
-
-/// Table of tracked AnonDecls.
-anon_decls: AnonDeclTable = .{},
+/// Table of tracked `Uav`s.
+uavs: UavTable = .{},
debug_strtab_dirty: bool = false,
debug_abbrev_section_dirty: bool = false,
@@ -124,29 +103,21 @@ pub fn deinit(self: *ZigObject, allocator: Allocator) void {
self.relocs.deinit(allocator);
{
- var it = self.decls.iterator();
+ var it = self.navs.iterator();
while (it.next()) |entry| {
entry.value_ptr.exports.deinit(allocator);
}
- self.decls.deinit(allocator);
+ self.navs.deinit(allocator);
}
self.lazy_syms.deinit(allocator);
{
- var it = self.unnamed_consts.valueIterator();
- while (it.next()) |syms| {
- syms.deinit(allocator);
- }
- self.unnamed_consts.deinit(allocator);
- }
-
- {
- var it = self.anon_decls.iterator();
+ var it = self.uavs.iterator();
while (it.next()) |entry| {
entry.value_ptr.exports.deinit(allocator);
}
- self.anon_decls.deinit(allocator);
+ self.uavs.deinit(allocator);
}
for (self.tls_variables.values()) |*tlv| {
@@ -161,7 +132,7 @@ pub fn deinit(self: *ZigObject, allocator: Allocator) void {
pub fn flushModule(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
// Handle any lazy symbols that were emitted by incremental compilation.
- if (self.lazy_syms.getPtr(.none)) |metadata| {
+ if (self.lazy_syms.getPtr(.anyerror_type)) |metadata| {
const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.module.?, .tid = tid };
// Most lazy symbols can be updated on first use, but
@@ -169,7 +140,7 @@ pub fn flushModule(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !voi
if (metadata.text_state != .unused) self.updateLazySymbol(
elf_file,
pt,
- link.File.LazySymbol.initDecl(.code, null, pt.zcu),
+ .{ .kind = .code, .ty = .anyerror_type },
metadata.text_symbol_index,
) catch |err| return switch (err) {
error.CodegenFail => error.FlushFailure,
@@ -178,7 +149,7 @@ pub fn flushModule(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !voi
if (metadata.rodata_state != .unused) self.updateLazySymbol(
elf_file,
pt,
- link.File.LazySymbol.initDecl(.const_data, null, pt.zcu),
+ .{ .kind = .const_data, .ty = .anyerror_type },
metadata.rodata_symbol_index,
) catch |err| return switch (err) {
error.CodegenFail => error.FlushFailure,
@@ -661,25 +632,25 @@ pub fn codeAlloc(self: *ZigObject, elf_file: *Elf, atom_index: Atom.Index) ![]u8
return code;
}
-pub fn getDeclVAddr(
+pub fn getNavVAddr(
self: *ZigObject,
elf_file: *Elf,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
reloc_info: link.File.RelocInfo,
) !u64 {
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
- const decl = zcu.declPtr(decl_index);
- log.debug("getDeclVAddr {}({d})", .{ decl.fqn.fmt(ip), decl_index });
- const this_sym_index = if (decl.isExtern(zcu)) blk: {
- const name = decl.name.toSlice(ip);
- const lib_name = if (decl.getOwnedExternFunc(zcu)) |ext_fn|
- ext_fn.lib_name.toSlice(ip)
- else
- decl.getOwnedVariable(zcu).?.lib_name.toSlice(ip);
- break :blk try self.getGlobalSymbol(elf_file, name, lib_name);
- } else try self.getOrCreateMetadataForDecl(elf_file, decl_index);
+ const nav = ip.getNav(nav_index);
+ log.debug("getNavVAddr {}({d})", .{ nav.fqn.fmt(ip), nav_index });
+ const this_sym_index = switch (ip.indexToKey(nav.status.resolved.val)) {
+ .@"extern" => |@"extern"| try self.getGlobalSymbol(
+ elf_file,
+ nav.name.toSlice(ip),
+ @"extern".lib_name.toSlice(ip),
+ ),
+ else => try self.getOrCreateMetadataForNav(elf_file, nav_index),
+ };
const this_sym = self.symbol(this_sym_index);
const vaddr = this_sym.address(.{}, elf_file);
const parent_atom = self.symbol(reloc_info.parent_atom_index).atom(elf_file).?;
@@ -692,13 +663,13 @@ pub fn getDeclVAddr(
return @intCast(vaddr);
}
-pub fn getAnonDeclVAddr(
+pub fn getUavVAddr(
self: *ZigObject,
elf_file: *Elf,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
reloc_info: link.File.RelocInfo,
) !u64 {
- const sym_index = self.anon_decls.get(decl_val).?.symbol_index;
+ const sym_index = self.uavs.get(uav).?.symbol_index;
const sym = self.symbol(sym_index);
const vaddr = sym.address(.{}, elf_file);
const parent_atom = self.symbol(reloc_info.parent_atom_index).atom(elf_file).?;
@@ -711,43 +682,43 @@ pub fn getAnonDeclVAddr(
return @intCast(vaddr);
}
-pub fn lowerAnonDecl(
+pub fn lowerUav(
self: *ZigObject,
elf_file: *Elf,
pt: Zcu.PerThread,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
explicit_alignment: InternPool.Alignment,
- src_loc: Module.LazySrcLoc,
-) !codegen.Result {
- const gpa = elf_file.base.comp.gpa;
- const mod = elf_file.base.comp.module.?;
- const ty = Type.fromInterned(mod.intern_pool.typeOf(decl_val));
- const decl_alignment = switch (explicit_alignment) {
- .none => ty.abiAlignment(pt),
+ src_loc: Zcu.LazySrcLoc,
+) !codegen.GenResult {
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const val = Value.fromInterned(uav);
+ const uav_alignment = switch (explicit_alignment) {
+ .none => val.typeOf(zcu).abiAlignment(pt),
else => explicit_alignment,
};
- if (self.anon_decls.get(decl_val)) |metadata| {
- const existing_alignment = self.symbol(metadata.symbol_index).atom(elf_file).?.alignment;
- if (decl_alignment.order(existing_alignment).compare(.lte))
- return .ok;
+ if (self.uavs.get(uav)) |metadata| {
+ const sym = self.symbol(metadata.symbol_index);
+ const existing_alignment = sym.atom(elf_file).?.alignment;
+ if (uav_alignment.order(existing_alignment).compare(.lte))
+ return .{ .mcv = .{ .load_symbol = metadata.symbol_index } };
}
- const val = Value.fromInterned(decl_val);
var name_buf: [32]u8 = undefined;
const name = std.fmt.bufPrint(&name_buf, "__anon_{d}", .{
- @intFromEnum(decl_val),
+ @intFromEnum(uav),
}) catch unreachable;
const res = self.lowerConst(
elf_file,
pt,
name,
val,
- decl_alignment,
+ uav_alignment,
elf_file.zig_data_rel_ro_section_index.?,
src_loc,
) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
- else => |e| return .{ .fail = try Module.ErrorMsg.create(
+ else => |e| return .{ .fail = try Zcu.ErrorMsg.create(
gpa,
src_loc,
"unable to lower constant value: {s}",
@@ -758,8 +729,8 @@ pub fn lowerAnonDecl(
.ok => |sym_index| sym_index,
.fail => |em| return .{ .fail = em },
};
- try self.anon_decls.put(gpa, decl_val, .{ .symbol_index = sym_index });
- return .ok;
+ try self.uavs.put(gpa, uav, .{ .symbol_index = sym_index });
+ return .{ .mcv = .{ .load_symbol = sym_index } };
}
pub fn getOrCreateMetadataForLazySymbol(
@@ -768,51 +739,32 @@ pub fn getOrCreateMetadataForLazySymbol(
pt: Zcu.PerThread,
lazy_sym: link.File.LazySymbol,
) !Symbol.Index {
- const mod = pt.zcu;
- const gpa = mod.gpa;
- const gop = try self.lazy_syms.getOrPut(gpa, lazy_sym.getDecl(mod));
+ const gop = try self.lazy_syms.getOrPut(pt.zcu.gpa, lazy_sym.ty);
errdefer _ = if (!gop.found_existing) self.lazy_syms.pop();
if (!gop.found_existing) gop.value_ptr.* = .{};
- const metadata: struct {
- symbol_index: *Symbol.Index,
- state: *LazySymbolMetadata.State,
- } = switch (lazy_sym.kind) {
- .code => .{
- .symbol_index = &gop.value_ptr.text_symbol_index,
- .state = &gop.value_ptr.text_state,
- },
- .const_data => .{
- .symbol_index = &gop.value_ptr.rodata_symbol_index,
- .state = &gop.value_ptr.rodata_state,
- },
+ const symbol_index_ptr, const state_ptr = switch (lazy_sym.kind) {
+ .code => .{ &gop.value_ptr.text_symbol_index, &gop.value_ptr.text_state },
+ .const_data => .{ &gop.value_ptr.rodata_symbol_index, &gop.value_ptr.rodata_state },
};
- switch (metadata.state.*) {
+ switch (state_ptr.*) {
.unused => {
+ const gpa = elf_file.base.comp.gpa;
const symbol_index = try self.newSymbolWithAtom(gpa, 0);
const sym = self.symbol(symbol_index);
sym.flags.needs_zig_got = true;
- metadata.symbol_index.* = symbol_index;
+ symbol_index_ptr.* = symbol_index;
},
- .pending_flush => return metadata.symbol_index.*,
+ .pending_flush => return symbol_index_ptr.*,
.flushed => {},
}
- metadata.state.* = .pending_flush;
- const symbol_index = metadata.symbol_index.*;
+ state_ptr.* = .pending_flush;
+ const symbol_index = symbol_index_ptr.*;
// anyerror needs to be deferred until flushModule
- if (lazy_sym.getDecl(mod) != .none) try self.updateLazySymbol(elf_file, pt, lazy_sym, symbol_index);
+ if (lazy_sym.ty != .anyerror_type) try self.updateLazySymbol(elf_file, pt, lazy_sym, symbol_index);
return symbol_index;
}
-fn freeUnnamedConsts(self: *ZigObject, elf_file: *Elf, decl_index: InternPool.DeclIndex) void {
- const gpa = elf_file.base.comp.gpa;
- const unnamed_consts = self.unnamed_consts.getPtr(decl_index) orelse return;
- for (unnamed_consts.items) |sym_index| {
- self.freeDeclMetadata(elf_file, sym_index);
- }
- unnamed_consts.clearAndFree(gpa);
-}
-
-fn freeDeclMetadata(self: *ZigObject, elf_file: *Elf, sym_index: Symbol.Index) void {
+fn freeNavMetadata(self: *ZigObject, elf_file: *Elf, sym_index: Symbol.Index) void {
const sym = self.symbol(sym_index);
sym.atom(elf_file).?.free(elf_file);
log.debug("adding %{d} to local symbols free list", .{sym_index});
@@ -820,38 +772,37 @@ fn freeDeclMetadata(self: *ZigObject, elf_file: *Elf, sym_index: Symbol.Index) v
// TODO free GOT entry here
}
-pub fn freeDecl(self: *ZigObject, elf_file: *Elf, decl_index: InternPool.DeclIndex) void {
+pub fn freeNav(self: *ZigObject, elf_file: *Elf, nav_index: InternPool.Nav.Index) void {
const gpa = elf_file.base.comp.gpa;
- log.debug("freeDecl ({d})", .{decl_index});
+ log.debug("freeNav ({d})", .{nav_index});
- if (self.decls.fetchRemove(decl_index)) |const_kv| {
+ if (self.navs.fetchRemove(nav_index)) |const_kv| {
var kv = const_kv;
const sym_index = kv.value.symbol_index;
- self.freeDeclMetadata(elf_file, sym_index);
- self.freeUnnamedConsts(elf_file, decl_index);
+ self.freeNavMetadata(elf_file, sym_index);
kv.value.exports.deinit(gpa);
}
if (self.dwarf) |*dw| {
- dw.freeDecl(decl_index);
+ dw.freeNav(nav_index);
}
}
-pub fn getOrCreateMetadataForDecl(
+pub fn getOrCreateMetadataForNav(
self: *ZigObject,
elf_file: *Elf,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
) !Symbol.Index {
const gpa = elf_file.base.comp.gpa;
- const gop = try self.decls.getOrPut(gpa, decl_index);
+ const gop = try self.navs.getOrPut(gpa, nav_index);
if (!gop.found_existing) {
const any_non_single_threaded = elf_file.base.comp.config.any_non_single_threaded;
const symbol_index = try self.newSymbolWithAtom(gpa, 0);
- const mod = elf_file.base.comp.module.?;
- const decl = mod.declPtr(decl_index);
+ const zcu = elf_file.base.comp.module.?;
+ const nav_val = Value.fromInterned(zcu.intern_pool.getNav(nav_index).status.resolved.val);
const sym = self.symbol(symbol_index);
- if (decl.getOwnedVariable(mod)) |variable| {
+ if (nav_val.getVariable(zcu)) |variable| {
if (variable.is_threadlocal and any_non_single_threaded) {
sym.flags.is_tls = true;
}
@@ -864,89 +815,81 @@ pub fn getOrCreateMetadataForDecl(
return gop.value_ptr.symbol_index;
}
-fn getDeclShdrIndex(
+fn getNavShdrIndex(
self: *ZigObject,
elf_file: *Elf,
- decl: *const Module.Decl,
+ zcu: *Zcu,
+ nav_index: InternPool.Nav.Index,
code: []const u8,
) error{OutOfMemory}!u32 {
_ = self;
- const mod = elf_file.base.comp.module.?;
+ const ip = &zcu.intern_pool;
const any_non_single_threaded = elf_file.base.comp.config.any_non_single_threaded;
- const shdr_index = switch (decl.typeOf(mod).zigTypeTag(mod)) {
- .Fn => elf_file.zig_text_section_index.?,
- else => blk: {
- if (decl.getOwnedVariable(mod)) |variable| {
- if (variable.is_threadlocal and any_non_single_threaded) {
- const is_all_zeroes = for (code) |byte| {
- if (byte != 0) break false;
- } else true;
- if (is_all_zeroes) break :blk elf_file.sectionByName(".tbss") orelse try elf_file.addSection(.{
- .type = elf.SHT_NOBITS,
- .flags = elf.SHF_ALLOC | elf.SHF_WRITE | elf.SHF_TLS,
- .name = try elf_file.insertShString(".tbss"),
- .offset = std.math.maxInt(u64),
- });
-
- break :blk elf_file.sectionByName(".tdata") orelse try elf_file.addSection(.{
- .type = elf.SHT_PROGBITS,
- .flags = elf.SHF_ALLOC | elf.SHF_WRITE | elf.SHF_TLS,
- .name = try elf_file.insertShString(".tdata"),
- .offset = std.math.maxInt(u64),
- });
- }
- if (variable.is_const) break :blk elf_file.zig_data_rel_ro_section_index.?;
- if (Value.fromInterned(variable.init).isUndefDeep(mod)) {
- // TODO: get the optimize_mode from the Module that owns the decl instead
- // of using the root module here.
- break :blk switch (elf_file.base.comp.root_mod.optimize_mode) {
- .Debug, .ReleaseSafe => elf_file.zig_data_section_index.?,
- .ReleaseFast, .ReleaseSmall => elf_file.zig_bss_section_index.?,
- };
- }
- // TODO I blatantly copied the logic from the Wasm linker, but is there a less
- // intrusive check for all zeroes than this?
- const is_all_zeroes = for (code) |byte| {
- if (byte != 0) break false;
- } else true;
- if (is_all_zeroes) break :blk elf_file.zig_bss_section_index.?;
- break :blk elf_file.zig_data_section_index.?;
- }
- break :blk elf_file.zig_data_rel_ro_section_index.?;
- },
+ const nav_val = zcu.navValue(nav_index);
+ if (ip.isFunctionType(nav_val.typeOf(zcu).toIntern())) return elf_file.zig_text_section_index.?;
+ const is_const, const is_threadlocal, const nav_init = switch (ip.indexToKey(nav_val.toIntern())) {
+ .variable => |variable| .{ false, variable.is_threadlocal, variable.init },
+ .@"extern" => |@"extern"| .{ @"extern".is_const, @"extern".is_threadlocal, .none },
+ else => .{ true, false, nav_val.toIntern() },
};
- return shdr_index;
+ if (any_non_single_threaded and is_threadlocal) {
+ for (code) |byte| {
+ if (byte != 0) break;
+ } else return elf_file.sectionByName(".tbss") orelse try elf_file.addSection(.{
+ .type = elf.SHT_NOBITS,
+ .flags = elf.SHF_ALLOC | elf.SHF_WRITE | elf.SHF_TLS,
+ .name = try elf_file.insertShString(".tbss"),
+ .offset = std.math.maxInt(u64),
+ });
+ return elf_file.sectionByName(".tdata") orelse try elf_file.addSection(.{
+ .type = elf.SHT_PROGBITS,
+ .flags = elf.SHF_ALLOC | elf.SHF_WRITE | elf.SHF_TLS,
+ .name = try elf_file.insertShString(".tdata"),
+ .offset = std.math.maxInt(u64),
+ });
+ }
+ if (is_const) return elf_file.zig_data_rel_ro_section_index.?;
+ if (nav_init != .none and Value.fromInterned(nav_init).isUndefDeep(zcu))
+ return switch (zcu.navFileScope(nav_index).mod.optimize_mode) {
+ .Debug, .ReleaseSafe => elf_file.zig_data_section_index.?,
+ .ReleaseFast, .ReleaseSmall => elf_file.zig_bss_section_index.?,
+ };
+ for (code) |byte| {
+ if (byte != 0) break;
+ } else return elf_file.zig_bss_section_index.?;
+ return elf_file.zig_data_section_index.?;
}
-fn updateDeclCode(
+fn updateNavCode(
self: *ZigObject,
elf_file: *Elf,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
sym_index: Symbol.Index,
shdr_index: u32,
code: []const u8,
stt_bits: u8,
) !void {
- const gpa = elf_file.base.comp.gpa;
- const mod = pt.zcu;
- const ip = &mod.intern_pool;
- const decl = mod.declPtr(decl_index);
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
- log.debug("updateDeclCode {}({d})", .{ decl.fqn.fmt(ip), decl_index });
+ log.debug("updateNavCode {}({d})", .{ nav.fqn.fmt(ip), nav_index });
- const required_alignment = decl.getAlignment(pt).max(
- target_util.minFunctionAlignment(mod.getTarget()),
+ const required_alignment = pt.navAlignment(nav_index).max(
+ target_util.minFunctionAlignment(zcu.navFileScope(nav_index).mod.resolved_target.result),
);
const sym = self.symbol(sym_index);
const esym = &self.symtab.items(.elf_sym)[sym.esym_index];
const atom_ptr = sym.atom(elf_file).?;
- const name_offset = try self.strtab.insert(gpa, decl.fqn.toSlice(ip));
+ const name_offset = try self.strtab.insert(gpa, nav.fqn.toSlice(ip));
atom_ptr.alive = true;
atom_ptr.name_offset = name_offset;
atom_ptr.output_section_index = shdr_index;
+
sym.name_offset = name_offset;
esym.st_name = name_offset;
esym.st_info |= stt_bits;
@@ -962,7 +905,7 @@ fn updateDeclCode(
const need_realloc = code.len > capacity or !required_alignment.check(@intCast(atom_ptr.value));
if (need_realloc) {
try atom_ptr.grow(elf_file);
- log.debug("growing {} from 0x{x} to 0x{x}", .{ decl.fqn.fmt(ip), old_vaddr, atom_ptr.value });
+ log.debug("growing {} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), old_vaddr, atom_ptr.value });
if (old_vaddr != atom_ptr.value) {
sym.value = 0;
esym.st_value = 0;
@@ -979,7 +922,7 @@ fn updateDeclCode(
}
} else {
try atom_ptr.allocate(elf_file);
- errdefer self.freeDeclMetadata(elf_file, sym_index);
+ errdefer self.freeNavMetadata(elf_file, sym_index);
sym.value = 0;
sym.flags.needs_zig_got = true;
@@ -1023,24 +966,24 @@ fn updateTlv(
self: *ZigObject,
elf_file: *Elf,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
sym_index: Symbol.Index,
shndx: u32,
code: []const u8,
) !void {
- const mod = pt.zcu;
- const ip = &mod.intern_pool;
- const gpa = mod.gpa;
- const decl = mod.declPtr(decl_index);
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
+ const gpa = zcu.gpa;
+ const nav = ip.getNav(nav_index);
- log.debug("updateTlv {}({d})", .{ decl.fqn.fmt(ip), decl_index });
+ log.debug("updateTlv {}({d})", .{ nav.fqn.fmt(ip), nav_index });
- const required_alignment = decl.getAlignment(pt);
+ const required_alignment = pt.navAlignment(nav_index);
const sym = self.symbol(sym_index);
const esym = &self.symtab.items(.elf_sym)[sym.esym_index];
const atom_ptr = sym.atom(elf_file).?;
- const name_offset = try self.strtab.insert(gpa, decl.fqn.toSlice(ip));
+ const name_offset = try self.strtab.insert(gpa, nav.fqn.toSlice(ip));
sym.value = 0;
sym.name_offset = name_offset;
@@ -1049,6 +992,7 @@ fn updateTlv(
atom_ptr.alive = true;
atom_ptr.name_offset = name_offset;
+ sym.name_offset = name_offset;
esym.st_value = 0;
esym.st_name = name_offset;
esym.st_info = elf.STT_TLS;
@@ -1086,53 +1030,49 @@ pub fn updateFunc(
const tracy = trace(@src());
defer tracy.end();
- const mod = pt.zcu;
- const ip = &mod.intern_pool;
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
const gpa = elf_file.base.comp.gpa;
- const func = mod.funcInfo(func_index);
- const decl_index = func.owner_decl;
- const decl = mod.declPtr(decl_index);
+ const func = zcu.funcInfo(func_index);
- log.debug("updateFunc {}({d})", .{ decl.fqn.fmt(ip), decl_index });
+ log.debug("updateFunc {}({d})", .{ ip.getNav(func.owner_nav).fqn.fmt(ip), func.owner_nav });
- const sym_index = try self.getOrCreateMetadataForDecl(elf_file, decl_index);
- self.freeUnnamedConsts(elf_file, decl_index);
+ const sym_index = try self.getOrCreateMetadataForNav(elf_file, func.owner_nav);
self.symbol(sym_index).atom(elf_file).?.freeRelocs(elf_file);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
- var decl_state: ?Dwarf.DeclState = if (self.dwarf) |*dw| try dw.initDeclState(pt, decl_index) else null;
- defer if (decl_state) |*ds| ds.deinit();
+ var dwarf_state = if (self.dwarf) |*dw| try dw.initNavState(pt, func.owner_nav) else null;
+ defer if (dwarf_state) |*ds| ds.deinit();
const res = try codegen.generateFunction(
&elf_file.base,
pt,
- decl.navSrcLoc(mod),
+ zcu.navSrcLoc(func.owner_nav),
func_index,
air,
liveness,
&code_buffer,
- if (decl_state) |*ds| .{ .dwarf = ds } else .none,
+ if (dwarf_state) |*ds| .{ .dwarf = ds } else .none,
);
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
- func.setAnalysisState(&mod.intern_pool, .codegen_failure);
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try zcu.failed_codegen.put(gpa, func.owner_nav, em);
return;
},
};
- const shndx = try self.getDeclShdrIndex(elf_file, decl, code);
- try self.updateDeclCode(elf_file, pt, decl_index, sym_index, shndx, code, elf.STT_FUNC);
+ const shndx = try self.getNavShdrIndex(elf_file, zcu, func.owner_nav, code);
+ try self.updateNavCode(elf_file, pt, func.owner_nav, sym_index, shndx, code, elf.STT_FUNC);
- if (decl_state) |*ds| {
+ if (dwarf_state) |*ds| {
const sym = self.symbol(sym_index);
- try self.dwarf.?.commitDeclState(
+ try self.dwarf.?.commitNavState(
pt,
- decl_index,
+ func.owner_nav,
@intCast(sym.address(.{}, elf_file)),
sym.atom(elf_file).?.size,
ds,
@@ -1142,78 +1082,80 @@ pub fn updateFunc(
// Exports will be updated by `Zcu.processExports` after the update.
}
-pub fn updateDecl(
+pub fn updateNav(
self: *ZigObject,
elf_file: *Elf,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
-) link.File.UpdateDeclError!void {
+ nav_index: InternPool.Nav.Index,
+) link.File.UpdateNavError!void {
const tracy = trace(@src());
defer tracy.end();
- const mod = pt.zcu;
- const ip = &mod.intern_pool;
- const decl = mod.declPtr(decl_index);
-
- log.debug("updateDecl {}({d})", .{ decl.fqn.fmt(ip), decl_index });
-
- if (decl.val.getExternFunc(mod)) |_| return;
- if (decl.isExtern(mod)) {
- // Extern variable gets a .got entry only.
- const variable = decl.getOwnedVariable(mod).?;
- const name = decl.name.toSlice(&mod.intern_pool);
- const lib_name = variable.lib_name.toSlice(&mod.intern_pool);
- const sym_index = try self.getGlobalSymbol(elf_file, name, lib_name);
- self.symbol(sym_index).flags.needs_got = true;
- return;
- }
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
+
+ log.debug("updateNav {}({d})", .{ nav.fqn.fmt(ip), nav_index });
+
+ const nav_val = zcu.navValue(nav_index);
+ const nav_init = switch (ip.indexToKey(nav_val.toIntern())) {
+ .variable => |variable| Value.fromInterned(variable.init),
+ .@"extern" => |@"extern"| {
+ if (ip.isFunctionType(@"extern".ty)) return;
+ // Extern variable gets a .got entry only.
+ const sym_index = try self.getGlobalSymbol(
+ elf_file,
+ nav.name.toSlice(ip),
+ @"extern".lib_name.toSlice(ip),
+ );
+ self.symbol(sym_index).flags.needs_got = true;
+ return;
+ },
+ else => nav_val,
+ };
- const sym_index = try self.getOrCreateMetadataForDecl(elf_file, decl_index);
+ const sym_index = try self.getOrCreateMetadataForNav(elf_file, nav_index);
self.symbol(sym_index).atom(elf_file).?.freeRelocs(elf_file);
- const gpa = elf_file.base.comp.gpa;
- var code_buffer = std.ArrayList(u8).init(gpa);
+ var code_buffer = std.ArrayList(u8).init(zcu.gpa);
defer code_buffer.deinit();
- var decl_state: ?Dwarf.DeclState = if (self.dwarf) |*dw| try dw.initDeclState(pt, decl_index) else null;
- defer if (decl_state) |*ds| ds.deinit();
+ var nav_state: ?Dwarf.NavState = if (self.dwarf) |*dw| try dw.initNavState(pt, nav_index) else null;
+ defer if (nav_state) |*ns| ns.deinit();
// TODO implement .debug_info for global variables
- const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
- const res = if (decl_state) |*ds|
- try codegen.generateSymbol(&elf_file.base, pt, decl.navSrcLoc(mod), decl_val, &code_buffer, .{
- .dwarf = ds,
- }, .{
- .parent_atom_index = sym_index,
- })
- else
- try codegen.generateSymbol(&elf_file.base, pt, decl.navSrcLoc(mod), decl_val, &code_buffer, .none, .{
- .parent_atom_index = sym_index,
- });
+ const res = try codegen.generateSymbol(
+ &elf_file.base,
+ pt,
+ zcu.navSrcLoc(nav_index),
+ nav_init,
+ &code_buffer,
+ if (nav_state) |*ns| .{ .dwarf = ns } else .none,
+ .{ .parent_atom_index = sym_index },
+ );
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try zcu.failed_codegen.put(zcu.gpa, nav_index, em);
return;
},
};
- const shndx = try self.getDeclShdrIndex(elf_file, decl, code);
+ const shndx = try self.getNavShdrIndex(elf_file, zcu, nav_index, code);
if (elf_file.shdrs.items[shndx].sh_flags & elf.SHF_TLS != 0)
- try self.updateTlv(elf_file, pt, decl_index, sym_index, shndx, code)
+ try self.updateTlv(elf_file, pt, nav_index, sym_index, shndx, code)
else
- try self.updateDeclCode(elf_file, pt, decl_index, sym_index, shndx, code, elf.STT_OBJECT);
+ try self.updateNavCode(elf_file, pt, nav_index, sym_index, shndx, code, elf.STT_OBJECT);
- if (decl_state) |*ds| {
+ if (nav_state) |*ns| {
const sym = self.symbol(sym_index);
- try self.dwarf.?.commitDeclState(
+ try self.dwarf.?.commitNavState(
pt,
- decl_index,
+ nav_index,
@intCast(sym.address(.{}, elf_file)),
sym.atom(elf_file).?.size,
- ds,
+ ns,
);
}
@@ -1237,13 +1179,13 @@ fn updateLazySymbol(
const name_str_index = blk: {
const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{
@tagName(sym.kind),
- sym.ty.fmt(pt),
+ Type.fromInterned(sym.ty).fmt(pt),
});
defer gpa.free(name);
break :blk try self.strtab.insert(gpa, name);
};
- const src = sym.ty.srcLocOrNull(mod) orelse Module.LazySrcLoc.unneeded;
+ const src = Type.fromInterned(sym.ty).srcLocOrNull(mod) orelse Zcu.LazySrcLoc.unneeded;
const res = try codegen.generateLazySymbol(
&elf_file.base,
pt,
@@ -1280,7 +1222,7 @@ fn updateLazySymbol(
atom_ptr.output_section_index = output_section_index;
try atom_ptr.allocate(elf_file);
- errdefer self.freeDeclMetadata(elf_file, symbol_index);
+ errdefer self.freeNavMetadata(elf_file, symbol_index);
local_sym.value = 0;
local_sym.flags.needs_zig_got = true;
@@ -1296,49 +1238,9 @@ fn updateLazySymbol(
try elf_file.base.file.?.pwriteAll(code, file_offset);
}
-pub fn lowerUnnamedConst(
- self: *ZigObject,
- elf_file: *Elf,
- pt: Zcu.PerThread,
- val: Value,
- decl_index: InternPool.DeclIndex,
-) !u32 {
- const gpa = elf_file.base.comp.gpa;
- const mod = elf_file.base.comp.module.?;
- const gop = try self.unnamed_consts.getOrPut(gpa, decl_index);
- if (!gop.found_existing) {
- gop.value_ptr.* = .{};
- }
- const unnamed_consts = gop.value_ptr;
- const decl = mod.declPtr(decl_index);
- const index = unnamed_consts.items.len;
- const name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{ decl.fqn.fmt(&mod.intern_pool), index });
- defer gpa.free(name);
- const ty = val.typeOf(mod);
- const sym_index = switch (try self.lowerConst(
- elf_file,
- pt,
- name,
- val,
- ty.abiAlignment(pt),
- elf_file.zig_data_rel_ro_section_index.?,
- decl.navSrcLoc(mod),
- )) {
- .ok => |sym_index| sym_index,
- .fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
- log.err("{s}", .{em.msg});
- return error.CodegenFail;
- },
- };
- try unnamed_consts.append(gpa, sym_index);
- return sym_index;
-}
-
const LowerConstResult = union(enum) {
ok: Symbol.Index,
- fail: *Module.ErrorMsg,
+ fail: *Zcu.ErrorMsg,
};
fn lowerConst(
@@ -1349,7 +1251,7 @@ fn lowerConst(
val: Value,
required_alignment: InternPool.Alignment,
output_section_index: u32,
- src_loc: Module.LazySrcLoc,
+ src_loc: Zcu.LazySrcLoc,
) !LowerConstResult {
const gpa = pt.zcu.gpa;
@@ -1384,7 +1286,8 @@ fn lowerConst(
atom_ptr.output_section_index = output_section_index;
try atom_ptr.allocate(elf_file);
- errdefer self.freeDeclMetadata(elf_file, sym_index);
+ // TODO rename and re-audit this method
+ errdefer self.freeNavMetadata(elf_file, sym_index);
const shdr = elf_file.shdrs.items[output_section_index];
const file_offset = shdr.sh_offset + @as(u64, @intCast(atom_ptr.value));
@@ -1397,7 +1300,7 @@ pub fn updateExports(
self: *ZigObject,
elf_file: *Elf,
pt: Zcu.PerThread,
- exported: Module.Exported,
+ exported: Zcu.Exported,
export_indices: []const u32,
) link.File.UpdateExportsError!void {
const tracy = trace(@src());
@@ -1406,24 +1309,24 @@ pub fn updateExports(
const mod = pt.zcu;
const gpa = elf_file.base.comp.gpa;
const metadata = switch (exported) {
- .decl_index => |decl_index| blk: {
- _ = try self.getOrCreateMetadataForDecl(elf_file, decl_index);
- break :blk self.decls.getPtr(decl_index).?;
+ .nav => |nav| blk: {
+ _ = try self.getOrCreateMetadataForNav(elf_file, nav);
+ break :blk self.navs.getPtr(nav).?;
},
- .value => |value| self.anon_decls.getPtr(value) orelse blk: {
+ .uav => |uav| self.uavs.getPtr(uav) orelse blk: {
const first_exp = mod.all_exports.items[export_indices[0]];
- const res = try self.lowerAnonDecl(elf_file, pt, value, .none, first_exp.src);
+ const res = try self.lowerUav(elf_file, pt, uav, .none, first_exp.src);
switch (res) {
- .ok => {},
+ .mcv => {},
.fail => |em| {
- // TODO maybe it's enough to return an error here and let Module.processExportsInner
+ // TODO maybe it's enough to return an error here and let Zcu.processExportsInner
// handle the error?
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
mod.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
return;
},
}
- break :blk self.anon_decls.getPtr(value).?;
+ break :blk self.uavs.getPtr(uav).?;
},
};
const sym_index = metadata.symbol_index;
@@ -1436,7 +1339,7 @@ pub fn updateExports(
if (exp.opts.section.unwrap()) |section_name| {
if (!section_name.eqlSlice(".text", &mod.intern_pool)) {
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
- mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Module.ErrorMsg.create(
+ mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
gpa,
exp.src,
"Unimplemented: ExportOptions.section",
@@ -1451,7 +1354,7 @@ pub fn updateExports(
.weak => elf.STB_WEAK,
.link_once => {
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
- mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Module.ErrorMsg.create(
+ mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
gpa,
exp.src,
"Unimplemented: GlobalLinkage.LinkOnce",
@@ -1487,21 +1390,22 @@ pub fn updateExports(
}
}
-/// Must be called only after a successful call to `updateDecl`.
-pub fn updateDeclLineNumber(
+/// Must be called only after a successful call to `updateNav`.
+pub fn updateNavLineNumber(
self: *ZigObject,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
) !void {
const tracy = trace(@src());
defer tracy.end();
- const decl = pt.zcu.declPtr(decl_index);
+ const ip = &pt.zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
- log.debug("updateDeclLineNumber {}({d})", .{ decl.fqn.fmt(&pt.zcu.intern_pool), decl_index });
+ log.debug("updateNavLineNumber {}({d})", .{ nav.fqn.fmt(ip), nav_index });
if (self.dwarf) |*dw| {
- try dw.updateDeclLineNumber(pt.zcu, decl_index);
+ try dw.updateNavLineNumber(pt.zcu, nav_index);
}
}
@@ -1512,9 +1416,9 @@ pub fn deleteExport(
name: InternPool.NullTerminatedString,
) void {
const metadata = switch (exported) {
- .decl_index => |decl_index| self.decls.getPtr(decl_index) orelse return,
- .value => |value| self.anon_decls.getPtr(value) orelse return,
- };
+ .nav => |nav| self.navs.getPtr(nav),
+ .uav => |uav| self.uavs.getPtr(uav),
+ } orelse return;
const mod = elf_file.base.comp.module.?;
const exp_name = name.toSlice(&mod.intern_pool);
const esym_index = metadata.@"export"(self, exp_name) orelse return;
@@ -1754,14 +1658,14 @@ const LazySymbolMetadata = struct {
rodata_state: State = .unused,
};
-const DeclMetadata = struct {
+const AvMetadata = struct {
symbol_index: Symbol.Index,
- /// A list of all exports aliases of this Decl.
+ /// A list of all exports aliases of this Av.
exports: std.ArrayListUnmanaged(Symbol.Index) = .{},
- fn @"export"(m: DeclMetadata, zo: *ZigObject, name: []const u8) ?*u32 {
+ fn @"export"(m: AvMetadata, zig_object: *ZigObject, name: []const u8) ?*u32 {
for (m.exports.items) |*exp| {
- const exp_name = zo.getString(zo.symbol(exp.*).name_offset);
+ const exp_name = zig_object.getString(zig_object.symbol(exp.*).name_offset);
if (mem.eql(u8, name, exp_name)) return exp;
}
return null;
@@ -1778,10 +1682,9 @@ const TlsVariable = struct {
};
const AtomList = std.ArrayListUnmanaged(Atom.Index);
-const UnnamedConstTable = std.AutoHashMapUnmanaged(InternPool.DeclIndex, std.ArrayListUnmanaged(Symbol.Index));
-const DeclTable = std.AutoHashMapUnmanaged(InternPool.DeclIndex, DeclMetadata);
-const AnonDeclTable = std.AutoHashMapUnmanaged(InternPool.Index, DeclMetadata);
-const LazySymbolTable = std.AutoArrayHashMapUnmanaged(InternPool.OptionalDeclIndex, LazySymbolMetadata);
+const NavTable = std.AutoHashMapUnmanaged(InternPool.Nav.Index, AvMetadata);
+const UavTable = std.AutoHashMapUnmanaged(InternPool.Index, AvMetadata);
+const LazySymbolTable = std.AutoArrayHashMapUnmanaged(InternPool.Index, LazySymbolMetadata);
const TlsTable = std.AutoArrayHashMapUnmanaged(Atom.Index, TlsVariable);
const assert = std.debug.assert;
@@ -1792,8 +1695,8 @@ const link = @import("../../link.zig");
const log = std.log.scoped(.link);
const mem = std.mem;
const relocation = @import("relocation.zig");
-const trace = @import("../../tracy.zig").trace;
const target_util = @import("../../target.zig");
+const trace = @import("../../tracy.zig").trace;
const std = @import("std");
const Air = @import("../../Air.zig");
@@ -1806,8 +1709,6 @@ const File = @import("file.zig").File;
const InternPool = @import("../../InternPool.zig");
const Liveness = @import("../../Liveness.zig");
const Zcu = @import("../../Zcu.zig");
-/// Deprecated.
-const Module = Zcu;
const Object = @import("Object.zig");
const Symbol = @import("Symbol.zig");
const StringTable = @import("../StringTable.zig");
diff --git a/src/link/MachO.zig b/src/link/MachO.zig
index d57a7ff7c0..7c0b79a0f1 100644
--- a/src/link/MachO.zig
+++ b/src/link/MachO.zig
@@ -2998,21 +2998,17 @@ pub fn updateFunc(self: *MachO, pt: Zcu.PerThread, func_index: InternPool.Index,
return self.getZigObject().?.updateFunc(self, pt, func_index, air, liveness);
}
-pub fn lowerUnnamedConst(self: *MachO, pt: Zcu.PerThread, val: Value, decl_index: InternPool.DeclIndex) !u32 {
- return self.getZigObject().?.lowerUnnamedConst(self, pt, val, decl_index);
-}
-
-pub fn updateDecl(self: *MachO, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNav(self: *MachO, pt: Zcu.PerThread, nav: InternPool.Nav.Index) !void {
if (build_options.skip_non_native and builtin.object_format != .macho) {
@panic("Attempted to compile for object format that was disabled by build configuration");
}
- if (self.llvm_object) |llvm_object| return llvm_object.updateDecl(pt, decl_index);
- return self.getZigObject().?.updateDecl(self, pt, decl_index);
+ if (self.llvm_object) |llvm_object| return llvm_object.updateNav(pt, nav);
+ return self.getZigObject().?.updateNav(self, pt, nav);
}
-pub fn updateDeclLineNumber(self: *MachO, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNavLineNumber(self: *MachO, pt: Zcu.PerThread, nav: InternPool.NavIndex) !void {
if (self.llvm_object) |_| return;
- return self.getZigObject().?.updateDeclLineNumber(pt, decl_index);
+ return self.getZigObject().?.updateNavLineNumber(pt, nav);
}
pub fn updateExports(
@@ -3037,29 +3033,29 @@ pub fn deleteExport(
return self.getZigObject().?.deleteExport(self, exported, name);
}
-pub fn freeDecl(self: *MachO, decl_index: InternPool.DeclIndex) void {
- if (self.llvm_object) |llvm_object| return llvm_object.freeDecl(decl_index);
- return self.getZigObject().?.freeDecl(decl_index);
+pub fn freeNav(self: *MachO, nav: InternPool.Nav.Index) void {
+ if (self.llvm_object) |llvm_object| return llvm_object.freeNav(nav);
+ return self.getZigObject().?.freeNav(nav);
}
-pub fn getDeclVAddr(self: *MachO, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex, reloc_info: link.File.RelocInfo) !u64 {
+pub fn getNavVAddr(self: *MachO, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index, reloc_info: link.File.RelocInfo) !u64 {
assert(self.llvm_object == null);
- return self.getZigObject().?.getDeclVAddr(self, pt, decl_index, reloc_info);
+ return self.getZigObject().?.getNavVAddr(self, pt, nav_index, reloc_info);
}
-pub fn lowerAnonDecl(
+pub fn lowerUav(
self: *MachO,
pt: Zcu.PerThread,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
explicit_alignment: InternPool.Alignment,
src_loc: Module.LazySrcLoc,
-) !codegen.Result {
- return self.getZigObject().?.lowerAnonDecl(self, pt, decl_val, explicit_alignment, src_loc);
+) !codegen.GenResult {
+ return self.getZigObject().?.lowerUav(self, pt, uav, explicit_alignment, src_loc);
}
-pub fn getAnonDeclVAddr(self: *MachO, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
+pub fn getUavVAddr(self: *MachO, uav: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
assert(self.llvm_object == null);
- return self.getZigObject().?.getAnonDeclVAddr(self, decl_val, reloc_info);
+ return self.getZigObject().?.getUavVAddr(self, uav, reloc_info);
}
pub fn getGlobalSymbol(self: *MachO, name: []const u8, lib_name: ?[]const u8) !u32 {
@@ -4051,8 +4047,6 @@ const is_hot_update_compatible = switch (builtin.target.os.tag) {
const default_entry_symbol_name = "_main";
-pub const base_tag: link.File.Tag = link.File.Tag.macho;
-
const Section = struct {
header: macho.section_64,
segment_id: u8,
diff --git a/src/link/MachO/Atom.zig b/src/link/MachO/Atom.zig
index 8a47a30264..d0e0fe377d 100644
--- a/src/link/MachO/Atom.zig
+++ b/src/link/MachO/Atom.zig
@@ -992,6 +992,8 @@ pub fn writeRelocs(self: Atom, macho_file: *MachO, code: []u8, buffer: []macho.r
const tracy = trace(@src());
defer tracy.end();
+ relocs_log.debug("{x}: {s}", .{ self.getAddress(macho_file), self.getName(macho_file) });
+
const cpu_arch = macho_file.getTarget().cpu.arch;
const relocs = self.getRelocs(macho_file);
@@ -1015,6 +1017,24 @@ pub fn writeRelocs(self: Atom, macho_file: *MachO, code: []u8, buffer: []macho.r
addend += target;
}
+ switch (rel.tag) {
+ .local => relocs_log.debug(" {}: [{x} => {d}({s},{s})] + {x}", .{
+ rel.fmtPretty(cpu_arch),
+ r_address,
+ r_symbolnum,
+ macho_file.sections.items(.header)[r_symbolnum - 1].segName(),
+ macho_file.sections.items(.header)[r_symbolnum - 1].sectName(),
+ addend,
+ }),
+ .@"extern" => relocs_log.debug(" {}: [{x} => {d}({s})] + {x}", .{
+ rel.fmtPretty(cpu_arch),
+ r_address,
+ r_symbolnum,
+ rel.getTargetSymbol(self, macho_file).getName(macho_file),
+ addend,
+ }),
+ }
+
switch (cpu_arch) {
.aarch64 => {
if (rel.type == .unsigned) switch (rel.meta.length) {
diff --git a/src/link/MachO/ZigObject.zig b/src/link/MachO/ZigObject.zig
index 896bcf7afc..7d69e4ad76 100644
--- a/src/link/MachO/ZigObject.zig
+++ b/src/link/MachO/ZigObject.zig
@@ -19,32 +19,11 @@ atoms_extra: std.ArrayListUnmanaged(u32) = .{},
/// Table of tracked LazySymbols.
lazy_syms: LazySymbolTable = .{},
-/// Table of tracked Decls.
-decls: DeclTable = .{},
-
-/// Table of unnamed constants associated with a parent `Decl`.
-/// We store them here so that we can free the constants whenever the `Decl`
-/// needs updating or is freed.
-///
-/// For example,
-///
-/// ```zig
-/// const Foo = struct{
-/// a: u8,
-/// };
-///
-/// pub fn main() void {
-/// var foo = Foo{ .a = 1 };
-/// _ = foo;
-/// }
-/// ```
-///
-/// value assigned to label `foo` is an unnamed constant belonging/associated
-/// with `Decl` `main`, and lives as long as that `Decl`.
-unnamed_consts: UnnamedConstTable = .{},
-
-/// Table of tracked AnonDecls.
-anon_decls: AnonDeclTable = .{},
+/// Table of tracked Navs.
+navs: NavTable = .{},
+
+/// Table of tracked Uavs.
+uavs: UavTable = .{},
/// TLV initializers indexed by Atom.Index.
tlv_initializers: TlvInitializerTable = .{},
@@ -100,31 +79,17 @@ pub fn deinit(self: *ZigObject, allocator: Allocator) void {
self.atoms_indexes.deinit(allocator);
self.atoms_extra.deinit(allocator);
- {
- var it = self.decls.iterator();
- while (it.next()) |entry| {
- entry.value_ptr.exports.deinit(allocator);
- }
- self.decls.deinit(allocator);
+ for (self.navs.values()) |*meta| {
+ meta.exports.deinit(allocator);
}
+ self.navs.deinit(allocator);
self.lazy_syms.deinit(allocator);
- {
- var it = self.unnamed_consts.valueIterator();
- while (it.next()) |syms| {
- syms.deinit(allocator);
- }
- self.unnamed_consts.deinit(allocator);
- }
-
- {
- var it = self.anon_decls.iterator();
- while (it.next()) |entry| {
- entry.value_ptr.exports.deinit(allocator);
- }
- self.anon_decls.deinit(allocator);
+ for (self.uavs.values()) |*meta| {
+ meta.exports.deinit(allocator);
}
+ self.uavs.deinit(allocator);
for (self.relocs.items) |*list| {
list.deinit(allocator);
@@ -601,7 +566,7 @@ pub fn getInputSection(self: ZigObject, atom: Atom, macho_file: *MachO) macho.se
pub fn flushModule(self: *ZigObject, macho_file: *MachO, tid: Zcu.PerThread.Id) !void {
// Handle any lazy symbols that were emitted by incremental compilation.
- if (self.lazy_syms.getPtr(.none)) |metadata| {
+ if (self.lazy_syms.getPtr(.anyerror_type)) |metadata| {
const pt: Zcu.PerThread = .{ .zcu = macho_file.base.comp.module.?, .tid = tid };
// Most lazy symbols can be updated on first use, but
@@ -609,7 +574,7 @@ pub fn flushModule(self: *ZigObject, macho_file: *MachO, tid: Zcu.PerThread.Id)
if (metadata.text_state != .unused) self.updateLazySymbol(
macho_file,
pt,
- link.File.LazySymbol.initDecl(.code, null, pt.zcu),
+ .{ .kind = .code, .ty = .anyerror_type },
metadata.text_symbol_index,
) catch |err| return switch (err) {
error.CodegenFail => error.FlushFailure,
@@ -618,7 +583,7 @@ pub fn flushModule(self: *ZigObject, macho_file: *MachO, tid: Zcu.PerThread.Id)
if (metadata.const_state != .unused) self.updateLazySymbol(
macho_file,
pt,
- link.File.LazySymbol.initDecl(.const_data, null, pt.zcu),
+ .{ .kind = .const_data, .ty = .anyerror_type },
metadata.const_symbol_index,
) catch |err| return switch (err) {
error.CodegenFail => error.FlushFailure,
@@ -691,25 +656,25 @@ pub fn flushModule(self: *ZigObject, macho_file: *MachO, tid: Zcu.PerThread.Id)
assert(!self.debug_strtab_dirty);
}
-pub fn getDeclVAddr(
+pub fn getNavVAddr(
self: *ZigObject,
macho_file: *MachO,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
reloc_info: link.File.RelocInfo,
) !u64 {
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
- const decl = zcu.declPtr(decl_index);
- log.debug("getDeclVAddr {}({d})", .{ decl.fqn.fmt(ip), decl_index });
- const sym_index = if (decl.isExtern(zcu)) blk: {
- const name = decl.name.toSlice(ip);
- const lib_name = if (decl.getOwnedExternFunc(zcu)) |ext_fn|
- ext_fn.lib_name.toSlice(ip)
- else
- decl.getOwnedVariable(zcu).?.lib_name.toSlice(ip);
- break :blk try self.getGlobalSymbol(macho_file, name, lib_name);
- } else try self.getOrCreateMetadataForDecl(macho_file, decl_index);
+ const nav = ip.getNav(nav_index);
+ log.debug("getNavVAddr {}({d})", .{ nav.fqn.fmt(ip), nav_index });
+ const sym_index = switch (ip.indexToKey(nav.status.resolved.val)) {
+ .@"extern" => |@"extern"| try self.getGlobalSymbol(
+ macho_file,
+ nav.name.toSlice(ip),
+ @"extern".lib_name.toSlice(ip),
+ ),
+ else => try self.getOrCreateMetadataForNav(macho_file, nav_index),
+ };
const sym = self.symbols.items[sym_index];
const vaddr = sym.getAddress(.{}, macho_file);
const parent_atom = self.symbols.items[reloc_info.parent_atom_index].getAtom(macho_file).?;
@@ -729,13 +694,13 @@ pub fn getDeclVAddr(
return vaddr;
}
-pub fn getAnonDeclVAddr(
+pub fn getUavVAddr(
self: *ZigObject,
macho_file: *MachO,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
reloc_info: link.File.RelocInfo,
) !u64 {
- const sym_index = self.anon_decls.get(decl_val).?.symbol_index;
+ const sym_index = self.uavs.get(uav).?.symbol_index;
const sym = self.symbols.items[sym_index];
const vaddr = sym.getAddress(.{}, macho_file);
const parent_atom = self.symbols.items[reloc_info.parent_atom_index].getAtom(macho_file).?;
@@ -755,42 +720,43 @@ pub fn getAnonDeclVAddr(
return vaddr;
}
-pub fn lowerAnonDecl(
+pub fn lowerUav(
self: *ZigObject,
macho_file: *MachO,
pt: Zcu.PerThread,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
explicit_alignment: Atom.Alignment,
- src_loc: Module.LazySrcLoc,
-) !codegen.Result {
- const gpa = macho_file.base.comp.gpa;
- const mod = macho_file.base.comp.module.?;
- const ty = Type.fromInterned(mod.intern_pool.typeOf(decl_val));
- const decl_alignment = switch (explicit_alignment) {
- .none => ty.abiAlignment(pt),
+ src_loc: Zcu.LazySrcLoc,
+) !codegen.GenResult {
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const val = Value.fromInterned(uav);
+ const uav_alignment = switch (explicit_alignment) {
+ .none => val.typeOf(zcu).abiAlignment(pt),
else => explicit_alignment,
};
- if (self.anon_decls.get(decl_val)) |metadata| {
- const existing_alignment = self.symbols.items[metadata.symbol_index].getAtom(macho_file).?.alignment;
- if (decl_alignment.order(existing_alignment).compare(.lte))
- return .ok;
+ if (self.uavs.get(uav)) |metadata| {
+ const sym = self.symbols.items[metadata.symbol_index];
+ const existing_alignment = sym.getAtom(macho_file).?.alignment;
+ if (uav_alignment.order(existing_alignment).compare(.lte))
+ return .{ .mcv = .{ .load_symbol = sym.nlist_idx } };
}
var name_buf: [32]u8 = undefined;
const name = std.fmt.bufPrint(&name_buf, "__anon_{d}", .{
- @intFromEnum(decl_val),
+ @intFromEnum(uav),
}) catch unreachable;
const res = self.lowerConst(
macho_file,
pt,
name,
- Value.fromInterned(decl_val),
- decl_alignment,
+ val,
+ uav_alignment,
macho_file.zig_const_sect_index.?,
src_loc,
) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
- else => |e| return .{ .fail = try Module.ErrorMsg.create(
+ else => |e| return .{ .fail = try Zcu.ErrorMsg.create(
gpa,
src_loc,
"unable to lower constant value: {s}",
@@ -801,20 +767,13 @@ pub fn lowerAnonDecl(
.ok => |sym_index| sym_index,
.fail => |em| return .{ .fail = em },
};
- try self.anon_decls.put(gpa, decl_val, .{ .symbol_index = sym_index });
- return .ok;
-}
-
-fn freeUnnamedConsts(self: *ZigObject, macho_file: *MachO, decl_index: InternPool.DeclIndex) void {
- const gpa = macho_file.base.comp.gpa;
- const unnamed_consts = self.unnamed_consts.getPtr(decl_index) orelse return;
- for (unnamed_consts.items) |sym_index| {
- self.freeDeclMetadata(macho_file, sym_index);
- }
- unnamed_consts.clearAndFree(gpa);
+ try self.uavs.put(gpa, uav, .{ .symbol_index = sym_index });
+ return .{ .mcv = .{
+ .load_symbol = self.symbols.items[sym_index].nlist_idx,
+ } };
}
-fn freeDeclMetadata(self: *ZigObject, macho_file: *MachO, sym_index: Symbol.Index) void {
+fn freeNavMetadata(self: *ZigObject, macho_file: *MachO, sym_index: Symbol.Index) void {
const sym = self.symbols.items[sym_index];
sym.getAtom(macho_file).?.free(macho_file);
log.debug("adding %{d} to local symbols free list", .{sym_index});
@@ -822,18 +781,14 @@ fn freeDeclMetadata(self: *ZigObject, macho_file: *MachO, sym_index: Symbol.Inde
// TODO free GOT entry here
}
-pub fn freeDecl(self: *ZigObject, macho_file: *MachO, decl_index: InternPool.DeclIndex) void {
+pub fn freeNav(self: *ZigObject, macho_file: *MachO, nav_index: InternPool.Nav.Index) void {
const gpa = macho_file.base.comp.gpa;
- const mod = macho_file.base.comp.module.?;
- const decl = mod.declPtr(decl_index);
+ log.debug("freeNav 0x{x}", .{nav_index});
- log.debug("freeDecl {*}", .{decl});
-
- if (self.decls.fetchRemove(decl_index)) |const_kv| {
+ if (self.navs.fetchRemove(nav_index)) |const_kv| {
var kv = const_kv;
const sym_index = kv.value.symbol_index;
- self.freeDeclMetadata(macho_file, sym_index);
- self.freeUnnamedConsts(macho_file, decl_index);
+ self.freeNavMetadata(macho_file, sym_index);
kv.value.exports.deinit(gpa);
}
@@ -851,51 +806,46 @@ pub fn updateFunc(
const tracy = trace(@src());
defer tracy.end();
- const mod = pt.zcu;
- const gpa = mod.gpa;
- const func = mod.funcInfo(func_index);
- const decl_index = func.owner_decl;
- const decl = mod.declPtr(decl_index);
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const func = zcu.funcInfo(func_index);
- const sym_index = try self.getOrCreateMetadataForDecl(macho_file, decl_index);
- self.freeUnnamedConsts(macho_file, decl_index);
+ const sym_index = try self.getOrCreateMetadataForNav(macho_file, func.owner_nav);
self.symbols.items[sym_index].getAtom(macho_file).?.freeRelocs(macho_file);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
- var decl_state: ?Dwarf.DeclState = if (self.dwarf) |*dw| try dw.initDeclState(pt, decl_index) else null;
- defer if (decl_state) |*ds| ds.deinit();
+ var dwarf_state = if (self.dwarf) |*dw| try dw.initNavState(pt, func.owner_nav) else null;
+ defer if (dwarf_state) |*ds| ds.deinit();
- const dio: codegen.DebugInfoOutput = if (decl_state) |*ds| .{ .dwarf = ds } else .none;
const res = try codegen.generateFunction(
&macho_file.base,
pt,
- decl.navSrcLoc(mod),
+ zcu.navSrcLoc(func.owner_nav),
func_index,
air,
liveness,
&code_buffer,
- dio,
+ if (dwarf_state) |*ds| .{ .dwarf = ds } else .none,
);
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
- func.setAnalysisState(&mod.intern_pool, .codegen_failure);
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try zcu.failed_codegen.put(gpa, func.owner_nav, em);
return;
},
};
- const sect_index = try self.getDeclOutputSection(macho_file, decl, code);
- try self.updateDeclCode(macho_file, pt, decl_index, sym_index, sect_index, code);
+ const sect_index = try self.getNavOutputSection(macho_file, zcu, func.owner_nav, code);
+ try self.updateNavCode(macho_file, pt, func.owner_nav, sym_index, sect_index, code);
- if (decl_state) |*ds| {
+ if (dwarf_state) |*ds| {
const sym = self.symbols.items[sym_index];
- try self.dwarf.?.commitDeclState(
+ try self.dwarf.?.commitNavState(
pt,
- decl_index,
+ func.owner_nav,
sym.getAddress(.{}, macho_file),
sym.getAtom(macho_file).?.size,
ds,
@@ -905,96 +855,98 @@ pub fn updateFunc(
// Exports will be updated by `Zcu.processExports` after the update.
}
-pub fn updateDecl(
+pub fn updateNav(
self: *ZigObject,
macho_file: *MachO,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
-) link.File.UpdateDeclError!void {
+ nav_index: InternPool.Nav.Index,
+) link.File.UpdateNavError!void {
const tracy = trace(@src());
defer tracy.end();
- const mod = pt.zcu;
- const decl = mod.declPtr(decl_index);
-
- if (decl.val.getExternFunc(mod)) |_| {
- return;
- }
-
- if (decl.isExtern(mod)) {
- // Extern variable gets a __got entry only
- const variable = decl.getOwnedVariable(mod).?;
- const name = decl.name.toSlice(&mod.intern_pool);
- const lib_name = variable.lib_name.toSlice(&mod.intern_pool);
- const index = try self.getGlobalSymbol(macho_file, name, lib_name);
- const sym = &self.symbols.items[index];
- sym.setSectionFlags(.{ .needs_got = true });
- return;
- }
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
+ const nav_val = zcu.navValue(nav_index);
+ const nav_init = switch (ip.indexToKey(nav_val.toIntern())) {
+ .variable => |variable| Value.fromInterned(variable.init),
+ .@"extern" => |@"extern"| {
+ if (ip.isFunctionType(@"extern".ty)) return;
+ // Extern variable gets a __got entry only
+ const name = @"extern".name.toSlice(ip);
+ const lib_name = @"extern".lib_name.toSlice(ip);
+ const index = try self.getGlobalSymbol(macho_file, name, lib_name);
+ const sym = &self.symbols.items[index];
+ sym.setSectionFlags(.{ .needs_got = true });
+ return;
+ },
+ else => nav_val,
+ };
- const sym_index = try self.getOrCreateMetadataForDecl(macho_file, decl_index);
+ const sym_index = try self.getOrCreateMetadataForNav(macho_file, nav_index);
self.symbols.items[sym_index].getAtom(macho_file).?.freeRelocs(macho_file);
- const gpa = macho_file.base.comp.gpa;
- var code_buffer = std.ArrayList(u8).init(gpa);
+ var code_buffer = std.ArrayList(u8).init(zcu.gpa);
defer code_buffer.deinit();
- var decl_state: ?Dwarf.DeclState = if (self.dwarf) |*dw| try dw.initDeclState(pt, decl_index) else null;
- defer if (decl_state) |*ds| ds.deinit();
+ var nav_state: ?Dwarf.NavState = if (self.dwarf) |*dw| try dw.initNavState(pt, nav_index) else null;
+ defer if (nav_state) |*ns| ns.deinit();
- const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
- const dio: codegen.DebugInfoOutput = if (decl_state) |*ds| .{ .dwarf = ds } else .none;
- const res = try codegen.generateSymbol(&macho_file.base, pt, decl.navSrcLoc(mod), decl_val, &code_buffer, dio, .{
- .parent_atom_index = sym_index,
- });
+ const res = try codegen.generateSymbol(
+ &macho_file.base,
+ pt,
+ zcu.navSrcLoc(nav_index),
+ nav_init,
+ &code_buffer,
+ if (nav_state) |*ns| .{ .dwarf = ns } else .none,
+ .{ .parent_atom_index = sym_index },
+ );
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try zcu.failed_codegen.put(zcu.gpa, nav_index, em);
return;
},
};
- if (isThreadlocal(macho_file, decl_index)) {
- const sect_index = try self.getDeclOutputSection(macho_file, decl, code);
- try self.updateTlv(macho_file, pt, decl_index, sym_index, sect_index, code);
- } else {
- const sect_index = try self.getDeclOutputSection(macho_file, decl, code);
- try self.updateDeclCode(macho_file, pt, decl_index, sym_index, sect_index, code);
- }
+ const sect_index = try self.getNavOutputSection(macho_file, zcu, nav_index, code);
+ if (isThreadlocal(macho_file, nav_index))
+ try self.updateTlv(macho_file, pt, nav_index, sym_index, sect_index, code)
+ else
+ try self.updateNavCode(macho_file, pt, nav_index, sym_index, sect_index, code);
- if (decl_state) |*ds| {
+ if (nav_state) |*ns| {
const sym = self.symbols.items[sym_index];
- try self.dwarf.?.commitDeclState(
+ try self.dwarf.?.commitNavState(
pt,
- decl_index,
+ nav_index,
sym.getAddress(.{}, macho_file),
sym.getAtom(macho_file).?.size,
- ds,
+ ns,
);
}
// Exports will be updated by `Zcu.processExports` after the update.
}
-fn updateDeclCode(
+fn updateNavCode(
self: *ZigObject,
macho_file: *MachO,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
sym_index: Symbol.Index,
sect_index: u8,
code: []const u8,
) !void {
- const gpa = macho_file.base.comp.gpa;
- const mod = pt.zcu;
- const ip = &mod.intern_pool;
- const decl = mod.declPtr(decl_index);
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
- log.debug("updateDeclCode {}{*}", .{ decl.fqn.fmt(ip), decl });
+ log.debug("updateNavCode {} 0x{x}", .{ nav.fqn.fmt(ip), nav_index });
- const required_alignment = decl.getAlignment(pt);
+ const required_alignment = pt.navAlignment(nav_index).max(
+ target_util.minFunctionAlignment(zcu.navFileScope(nav_index).mod.resolved_target.result),
+ );
const sect = &macho_file.sections.items(.header)[sect_index];
const sym = &self.symbols.items[sym_index];
@@ -1004,7 +956,7 @@ fn updateDeclCode(
sym.out_n_sect = sect_index;
atom.out_n_sect = sect_index;
- const sym_name = try std.fmt.allocPrintZ(gpa, "_{s}", .{decl.fqn.toSlice(ip)});
+ const sym_name = try std.fmt.allocPrintZ(gpa, "_{s}", .{nav.fqn.toSlice(ip)});
defer gpa.free(sym_name);
sym.name = try self.addString(gpa, sym_name);
atom.setAlive(true);
@@ -1025,7 +977,7 @@ fn updateDeclCode(
if (need_realloc) {
try atom.grow(macho_file);
- log.debug("growing {} from 0x{x} to 0x{x}", .{ decl.fqn.fmt(ip), old_vaddr, atom.value });
+ log.debug("growing {} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), old_vaddr, atom.value });
if (old_vaddr != atom.value) {
sym.value = 0;
nlist.n_value = 0;
@@ -1045,7 +997,7 @@ fn updateDeclCode(
}
} else {
try atom.allocate(macho_file);
- errdefer self.freeDeclMetadata(macho_file, sym_index);
+ errdefer self.freeNavMetadata(macho_file, sym_index);
sym.value = 0;
sym.setSectionFlags(.{ .needs_zig_got = true });
@@ -1070,27 +1022,27 @@ fn updateTlv(
self: *ZigObject,
macho_file: *MachO,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
sym_index: Symbol.Index,
sect_index: u8,
code: []const u8,
) !void {
const ip = &pt.zcu.intern_pool;
- const decl = pt.zcu.declPtr(decl_index);
+ const nav = ip.getNav(nav_index);
- log.debug("updateTlv {} ({*})", .{ decl.fqn.fmt(&pt.zcu.intern_pool), decl });
+ log.debug("updateTlv {} (0x{x})", .{ nav.fqn.fmt(ip), nav_index });
// 1. Lower TLV initializer
const init_sym_index = try self.createTlvInitializer(
macho_file,
- decl.fqn.toSlice(ip),
- decl.getAlignment(pt),
+ nav.fqn.toSlice(ip),
+ pt.navAlignment(nav_index),
sect_index,
code,
);
// 2. Create TLV descriptor
- try self.createTlvDescriptor(macho_file, sym_index, init_sym_index, decl.fqn.toSlice(ip));
+ try self.createTlvDescriptor(macho_file, sym_index, init_sym_index, nav.fqn.toSlice(ip));
}
fn createTlvInitializer(
@@ -1197,102 +1149,52 @@ fn createTlvDescriptor(
});
}
-fn getDeclOutputSection(
+fn getNavOutputSection(
self: *ZigObject,
macho_file: *MachO,
- decl: *const Module.Decl,
+ zcu: *Zcu,
+ nav_index: InternPool.Nav.Index,
code: []const u8,
) error{OutOfMemory}!u8 {
_ = self;
- const mod = macho_file.base.comp.module.?;
+ const ip = &zcu.intern_pool;
const any_non_single_threaded = macho_file.base.comp.config.any_non_single_threaded;
- const sect_id: u8 = switch (decl.typeOf(mod).zigTypeTag(mod)) {
- .Fn => macho_file.zig_text_sect_index.?,
- else => blk: {
- if (decl.getOwnedVariable(mod)) |variable| {
- if (variable.is_threadlocal and any_non_single_threaded) {
- const is_all_zeroes = for (code) |byte| {
- if (byte != 0) break false;
- } else true;
- if (is_all_zeroes) break :blk macho_file.getSectionByName("__DATA", "__thread_bss") orelse try macho_file.addSection(
- "__DATA",
- "__thread_bss",
- .{ .flags = macho.S_THREAD_LOCAL_ZEROFILL },
- );
- break :blk macho_file.getSectionByName("__DATA", "__thread_data") orelse try macho_file.addSection(
- "__DATA",
- "__thread_data",
- .{ .flags = macho.S_THREAD_LOCAL_REGULAR },
- );
- }
-
- if (variable.is_const) break :blk macho_file.zig_const_sect_index.?;
- if (Value.fromInterned(variable.init).isUndefDeep(mod)) {
- // TODO: get the optimize_mode from the Module that owns the decl instead
- // of using the root module here.
- break :blk switch (macho_file.base.comp.root_mod.optimize_mode) {
- .Debug, .ReleaseSafe => macho_file.zig_data_sect_index.?,
- .ReleaseFast, .ReleaseSmall => macho_file.zig_bss_sect_index.?,
- };
- }
-
- // TODO I blatantly copied the logic from the Wasm linker, but is there a less
- // intrusive check for all zeroes than this?
- const is_all_zeroes = for (code) |byte| {
- if (byte != 0) break false;
- } else true;
- if (is_all_zeroes) break :blk macho_file.zig_bss_sect_index.?;
- break :blk macho_file.zig_data_sect_index.?;
- }
- break :blk macho_file.zig_const_sect_index.?;
- },
+ const nav_val = zcu.navValue(nav_index);
+ if (ip.isFunctionType(nav_val.typeOf(zcu).toIntern())) return macho_file.zig_text_sect_index.?;
+ const is_const, const is_threadlocal, const nav_init = switch (ip.indexToKey(nav_val.toIntern())) {
+ .variable => |variable| .{ false, variable.is_threadlocal, variable.init },
+ .@"extern" => |@"extern"| .{ @"extern".is_const, @"extern".is_threadlocal, .none },
+ else => .{ true, false, nav_val.toIntern() },
};
- return sect_id;
-}
-
-pub fn lowerUnnamedConst(
- self: *ZigObject,
- macho_file: *MachO,
- pt: Zcu.PerThread,
- val: Value,
- decl_index: InternPool.DeclIndex,
-) !u32 {
- const mod = pt.zcu;
- const gpa = mod.gpa;
- const gop = try self.unnamed_consts.getOrPut(gpa, decl_index);
- if (!gop.found_existing) {
- gop.value_ptr.* = .{};
+ if (any_non_single_threaded and is_threadlocal) {
+ for (code) |byte| {
+ if (byte != 0) break;
+ } else return macho_file.getSectionByName("__DATA", "__thread_bss") orelse try macho_file.addSection(
+ "__DATA",
+ "__thread_bss",
+ .{ .flags = macho.S_THREAD_LOCAL_ZEROFILL },
+ );
+ return macho_file.getSectionByName("__DATA", "__thread_data") orelse try macho_file.addSection(
+ "__DATA",
+ "__thread_data",
+ .{ .flags = macho.S_THREAD_LOCAL_REGULAR },
+ );
}
- const unnamed_consts = gop.value_ptr;
- const decl = mod.declPtr(decl_index);
- const index = unnamed_consts.items.len;
- const name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{ decl.fqn.fmt(&mod.intern_pool), index });
- defer gpa.free(name);
- const sym_index = switch (try self.lowerConst(
- macho_file,
- pt,
- name,
- val,
- val.typeOf(mod).abiAlignment(pt),
- macho_file.zig_const_sect_index.?,
- decl.navSrcLoc(mod),
- )) {
- .ok => |sym_index| sym_index,
- .fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
- log.err("{s}", .{em.msg});
- return error.CodegenFail;
- },
- };
- const sym = self.symbols.items[sym_index];
- try unnamed_consts.append(gpa, sym.atom_ref.index);
- return sym_index;
+ if (is_const) return macho_file.zig_const_sect_index.?;
+ if (nav_init != .none and Value.fromInterned(nav_init).isUndefDeep(zcu))
+ return switch (zcu.navFileScope(nav_index).mod.optimize_mode) {
+ .Debug, .ReleaseSafe => macho_file.zig_data_sect_index.?,
+ .ReleaseFast, .ReleaseSmall => macho_file.zig_bss_sect_index.?,
+ };
+ for (code) |byte| {
+ if (byte != 0) break;
+ } else return macho_file.zig_bss_sect_index.?;
+ return macho_file.zig_data_sect_index.?;
}
const LowerConstResult = union(enum) {
ok: Symbol.Index,
- fail: *Module.ErrorMsg,
+ fail: *Zcu.ErrorMsg,
};
fn lowerConst(
@@ -1303,7 +1205,7 @@ fn lowerConst(
val: Value,
required_alignment: Atom.Alignment,
output_section_index: u8,
- src_loc: Module.LazySrcLoc,
+ src_loc: Zcu.LazySrcLoc,
) !LowerConstResult {
const gpa = macho_file.base.comp.gpa;
@@ -1338,7 +1240,7 @@ fn lowerConst(
try atom.allocate(macho_file);
// TODO rename and re-audit this method
- errdefer self.freeDeclMetadata(macho_file, sym_index);
+ errdefer self.freeNavMetadata(macho_file, sym_index);
const sect = macho_file.sections.items(.header)[output_section_index];
const file_offset = sect.offset + atom.value;
@@ -1351,7 +1253,7 @@ pub fn updateExports(
self: *ZigObject,
macho_file: *MachO,
pt: Zcu.PerThread,
- exported: Module.Exported,
+ exported: Zcu.Exported,
export_indices: []const u32,
) link.File.UpdateExportsError!void {
const tracy = trace(@src());
@@ -1360,24 +1262,24 @@ pub fn updateExports(
const mod = pt.zcu;
const gpa = macho_file.base.comp.gpa;
const metadata = switch (exported) {
- .decl_index => |decl_index| blk: {
- _ = try self.getOrCreateMetadataForDecl(macho_file, decl_index);
- break :blk self.decls.getPtr(decl_index).?;
+ .nav => |nav| blk: {
+ _ = try self.getOrCreateMetadataForNav(macho_file, nav);
+ break :blk self.navs.getPtr(nav).?;
},
- .value => |value| self.anon_decls.getPtr(value) orelse blk: {
+ .uav => |uav| self.uavs.getPtr(uav) orelse blk: {
const first_exp = mod.all_exports.items[export_indices[0]];
- const res = try self.lowerAnonDecl(macho_file, pt, value, .none, first_exp.src);
+ const res = try self.lowerUav(macho_file, pt, uav, .none, first_exp.src);
switch (res) {
- .ok => {},
+ .mcv => {},
.fail => |em| {
- // TODO maybe it's enough to return an error here and let Module.processExportsInner
+ // TODO maybe it's enough to return an error here and let Zcu.processExportsInner
// handle the error?
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
mod.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
return;
},
}
- break :blk self.anon_decls.getPtr(value).?;
+ break :blk self.uavs.getPtr(uav).?;
},
};
const sym_index = metadata.symbol_index;
@@ -1389,7 +1291,7 @@ pub fn updateExports(
if (exp.opts.section.unwrap()) |section_name| {
if (!section_name.eqlSlice("__text", &mod.intern_pool)) {
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
- mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Module.ErrorMsg.create(
+ mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Zcu.ErrorMsg.create(
gpa,
exp.src,
"Unimplemented: ExportOptions.section",
@@ -1399,7 +1301,7 @@ pub fn updateExports(
}
}
if (exp.opts.linkage == .link_once) {
- try mod.failed_exports.putNoClobber(mod.gpa, export_idx, try Module.ErrorMsg.create(
+ try mod.failed_exports.putNoClobber(mod.gpa, export_idx, try Zcu.ErrorMsg.create(
gpa,
exp.src,
"Unimplemented: GlobalLinkage.link_once",
@@ -1454,8 +1356,8 @@ fn updateLazySymbol(
lazy_sym: link.File.LazySymbol,
symbol_index: Symbol.Index,
) !void {
- const gpa = macho_file.base.comp.gpa;
- const mod = macho_file.base.comp.module.?;
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
var required_alignment: Atom.Alignment = .none;
var code_buffer = std.ArrayList(u8).init(gpa);
@@ -1464,13 +1366,13 @@ fn updateLazySymbol(
const name_str = blk: {
const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{
@tagName(lazy_sym.kind),
- lazy_sym.ty.fmt(pt),
+ Type.fromInterned(lazy_sym.ty).fmt(pt),
});
defer gpa.free(name);
break :blk try self.addString(gpa, name);
};
- const src = lazy_sym.ty.srcLocOrNull(mod) orelse Module.LazySrcLoc.unneeded;
+ const src = Type.fromInterned(lazy_sym.ty).srcLocOrNull(zcu) orelse Zcu.LazySrcLoc.unneeded;
const res = try codegen.generateLazySymbol(
&macho_file.base,
pt,
@@ -1511,7 +1413,7 @@ fn updateLazySymbol(
atom.out_n_sect = output_section_index;
try atom.allocate(macho_file);
- errdefer self.freeDeclMetadata(macho_file, symbol_index);
+ errdefer self.freeNavMetadata(macho_file, symbol_index);
sym.value = 0;
sym.setSectionFlags(.{ .needs_zig_got = true });
@@ -1527,10 +1429,14 @@ fn updateLazySymbol(
try macho_file.base.file.?.pwriteAll(code, file_offset);
}
-/// Must be called only after a successful call to `updateDecl`.
-pub fn updateDeclLineNumber(self: *ZigObject, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+/// Must be called only after a successful call to `updateNav`.
+pub fn updateNavLineNumber(
+ self: *ZigObject,
+ pt: Zcu.PerThread,
+ nav_index: InternPool.Nav.Index,
+) !void {
if (self.dwarf) |*dw| {
- try dw.updateDeclLineNumber(pt.zcu, decl_index);
+ try dw.updateNavLineNumber(pt.zcu, nav_index);
}
}
@@ -1543,9 +1449,9 @@ pub fn deleteExport(
const mod = macho_file.base.comp.module.?;
const metadata = switch (exported) {
- .decl_index => |decl_index| self.decls.getPtr(decl_index) orelse return,
- .value => |value| self.anon_decls.getPtr(value) orelse return,
- };
+ .nav => |nav| self.navs.getPtr(nav),
+ .uav => |uav| self.uavs.getPtr(uav),
+ } orelse return;
const nlist_index = metadata.@"export"(self, name.toSlice(&mod.intern_pool)) orelse return;
log.debug("deleting export '{}'", .{name.fmt(&mod.intern_pool)});
@@ -1577,17 +1483,17 @@ pub fn getGlobalSymbol(self: *ZigObject, macho_file: *MachO, name: []const u8, l
return lookup_gop.value_ptr.*;
}
-pub fn getOrCreateMetadataForDecl(
+pub fn getOrCreateMetadataForNav(
self: *ZigObject,
macho_file: *MachO,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
) !Symbol.Index {
const gpa = macho_file.base.comp.gpa;
- const gop = try self.decls.getOrPut(gpa, decl_index);
+ const gop = try self.navs.getOrPut(gpa, nav_index);
if (!gop.found_existing) {
const sym_index = try self.newSymbolWithAtom(gpa, .{}, macho_file);
const sym = &self.symbols.items[sym_index];
- if (isThreadlocal(macho_file, decl_index)) {
+ if (isThreadlocal(macho_file, nav_index)) {
sym.flags.tlv = true;
} else {
sym.setSectionFlags(.{ .needs_zig_got = true });
@@ -1603,47 +1509,39 @@ pub fn getOrCreateMetadataForLazySymbol(
pt: Zcu.PerThread,
lazy_sym: link.File.LazySymbol,
) !Symbol.Index {
- const mod = pt.zcu;
- const gpa = mod.gpa;
- const gop = try self.lazy_syms.getOrPut(gpa, lazy_sym.getDecl(mod));
+ const gop = try self.lazy_syms.getOrPut(pt.zcu.gpa, lazy_sym.ty);
errdefer _ = if (!gop.found_existing) self.lazy_syms.pop();
if (!gop.found_existing) gop.value_ptr.* = .{};
- const metadata: struct {
- symbol_index: *Symbol.Index,
- state: *LazySymbolMetadata.State,
- } = switch (lazy_sym.kind) {
- .code => .{
- .symbol_index = &gop.value_ptr.text_symbol_index,
- .state = &gop.value_ptr.text_state,
- },
- .const_data => .{
- .symbol_index = &gop.value_ptr.const_symbol_index,
- .state = &gop.value_ptr.const_state,
- },
+ const symbol_index_ptr, const state_ptr = switch (lazy_sym.kind) {
+ .code => .{ &gop.value_ptr.text_symbol_index, &gop.value_ptr.text_state },
+ .const_data => .{ &gop.value_ptr.const_symbol_index, &gop.value_ptr.const_state },
};
- switch (metadata.state.*) {
+ switch (state_ptr.*) {
.unused => {
- const symbol_index = try self.newSymbolWithAtom(gpa, .{}, macho_file);
+ const symbol_index = try self.newSymbolWithAtom(pt.zcu.gpa, .{}, macho_file);
const sym = &self.symbols.items[symbol_index];
sym.setSectionFlags(.{ .needs_zig_got = true });
- metadata.symbol_index.* = symbol_index;
+ symbol_index_ptr.* = symbol_index;
},
- .pending_flush => return metadata.symbol_index.*,
+ .pending_flush => return symbol_index_ptr.*,
.flushed => {},
}
- metadata.state.* = .pending_flush;
- const symbol_index = metadata.symbol_index.*;
+ state_ptr.* = .pending_flush;
+ const symbol_index = symbol_index_ptr.*;
// anyerror needs to be deferred until flushModule
- if (lazy_sym.getDecl(mod) != .none) try self.updateLazySymbol(macho_file, pt, lazy_sym, symbol_index);
+ if (lazy_sym.ty != .anyerror_type) try self.updateLazySymbol(macho_file, pt, lazy_sym, symbol_index);
return symbol_index;
}
-fn isThreadlocal(macho_file: *MachO, decl_index: InternPool.DeclIndex) bool {
- const any_non_single_threaded = macho_file.base.comp.config.any_non_single_threaded;
- const zcu = macho_file.base.comp.module.?;
- const decl = zcu.declPtr(decl_index);
- const variable = decl.getOwnedVariable(zcu) orelse return false;
- return variable.is_threadlocal and any_non_single_threaded;
+fn isThreadlocal(macho_file: *MachO, nav_index: InternPool.Nav.Index) bool {
+ if (!macho_file.base.comp.config.any_non_single_threaded)
+ return false;
+ const ip = &macho_file.base.comp.module.?.intern_pool;
+ return switch (ip.indexToKey(ip.getNav(nav_index).status.resolved.val)) {
+ .variable => |variable| variable.is_threadlocal,
+ .@"extern" => |@"extern"| @"extern".is_threadlocal,
+ else => false,
+ };
}
fn addAtom(self: *ZigObject, allocator: Allocator) !Atom.Index {
@@ -1848,12 +1746,12 @@ fn formatAtoms(
}
}
-const DeclMetadata = struct {
+const AvMetadata = struct {
symbol_index: Symbol.Index,
- /// A list of all exports aliases of this Decl.
+ /// A list of all exports aliases of this Av.
exports: std.ArrayListUnmanaged(Symbol.Index) = .{},
- fn @"export"(m: DeclMetadata, zig_object: *ZigObject, name: []const u8) ?*u32 {
+ fn @"export"(m: AvMetadata, zig_object: *ZigObject, name: []const u8) ?*u32 {
for (m.exports.items) |*exp| {
const nlist = zig_object.symtab.items(.nlist)[exp.*];
const exp_name = zig_object.strtab.getAssumeExists(nlist.n_strx);
@@ -1880,10 +1778,9 @@ const TlvInitializer = struct {
}
};
-const DeclTable = std.AutoHashMapUnmanaged(InternPool.DeclIndex, DeclMetadata);
-const UnnamedConstTable = std.AutoHashMapUnmanaged(InternPool.DeclIndex, std.ArrayListUnmanaged(Symbol.Index));
-const AnonDeclTable = std.AutoHashMapUnmanaged(InternPool.Index, DeclMetadata);
-const LazySymbolTable = std.AutoArrayHashMapUnmanaged(InternPool.OptionalDeclIndex, LazySymbolMetadata);
+const NavTable = std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, AvMetadata);
+const UavTable = std.AutoArrayHashMapUnmanaged(InternPool.Index, AvMetadata);
+const LazySymbolTable = std.AutoArrayHashMapUnmanaged(InternPool.Index, LazySymbolMetadata);
const RelocationTable = std.ArrayListUnmanaged(std.ArrayListUnmanaged(Relocation));
const TlvInitializerTable = std.AutoArrayHashMapUnmanaged(Atom.Index, TlvInitializer);
@@ -1894,6 +1791,7 @@ const link = @import("../../link.zig");
const log = std.log.scoped(.link);
const macho = std.macho;
const mem = std.mem;
+const target_util = @import("../../target.zig");
const trace = @import("../../tracy.zig").trace;
const std = @import("std");
@@ -1908,8 +1806,6 @@ const Liveness = @import("../../Liveness.zig");
const MachO = @import("../MachO.zig");
const Nlist = Object.Nlist;
const Zcu = @import("../../Zcu.zig");
-/// Deprecated.
-const Module = Zcu;
const Object = @import("Object.zig");
const Relocation = @import("Relocation.zig");
const Symbol = @import("Symbol.zig");
diff --git a/src/link/NvPtx.zig b/src/link/NvPtx.zig
index 8caaed05da..cb95779d8e 100644
--- a/src/link/NvPtx.zig
+++ b/src/link/NvPtx.zig
@@ -86,8 +86,8 @@ pub fn updateFunc(self: *NvPtx, pt: Zcu.PerThread, func_index: InternPool.Index,
try self.llvm_object.updateFunc(pt, func_index, air, liveness);
}
-pub fn updateDecl(self: *NvPtx, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
- return self.llvm_object.updateDecl(pt, decl_index);
+pub fn updateNav(self: *NvPtx, pt: Zcu.PerThread, nav: InternPool.Nav.Index) !void {
+ return self.llvm_object.updateNav(pt, nav);
}
pub fn updateExports(
diff --git a/src/link/Plan9.zig b/src/link/Plan9.zig
index e954bf7004..afd4c57ff1 100644
--- a/src/link/Plan9.zig
+++ b/src/link/Plan9.zig
@@ -24,8 +24,6 @@ const Allocator = std.mem.Allocator;
const log = std.log.scoped(.link);
const assert = std.debug.assert;
-pub const base_tag = .plan9;
-
base: link.File,
sixtyfour_bit: bool,
bases: Bases,
@@ -53,40 +51,19 @@ path_arena: std.heap.ArenaAllocator,
/// The debugger looks for the first file (aout.Sym.Type.z) preceeding the text symbol
/// of the function to know what file it came from.
/// If we group the decls by file, it makes it really easy to do this (put the symbol in the correct place)
-fn_decl_table: std.AutoArrayHashMapUnmanaged(
- *Zcu.File,
- struct { sym_index: u32, functions: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, FnDeclOutput) = .{} },
+fn_nav_table: std.AutoArrayHashMapUnmanaged(
+ Zcu.File.Index,
+ struct { sym_index: u32, functions: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, FnNavOutput) = .{} },
) = .{},
/// the code is modified when relocated, so that is why it is mutable
-data_decl_table: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, []u8) = .{},
+data_nav_table: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, []u8) = .{},
/// When `updateExports` is called, we store the export indices here, to be used
/// during flush.
-decl_exports: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, []u32) = .{},
-
-/// Table of unnamed constants associated with a parent `Decl`.
-/// We store them here so that we can free the constants whenever the `Decl`
-/// needs updating or is freed.
-///
-/// For example,
-///
-/// ```zig
-/// const Foo = struct{
-/// a: u8,
-/// };
-///
-/// pub fn main() void {
-/// var foo = Foo{ .a = 1 };
-/// _ = foo;
-/// }
-/// ```
-///
-/// value assigned to label `foo` is an unnamed constant belonging/associated
-/// with `Decl` `main`, and lives as long as that `Decl`.
-unnamed_const_atoms: UnnamedConstTable = .{},
+nav_exports: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, []u32) = .{},
lazy_syms: LazySymbolTable = .{},
-anon_decls: std.AutoHashMapUnmanaged(InternPool.Index, Atom.Index) = .{},
+uavs: std.AutoHashMapUnmanaged(InternPool.Index, Atom.Index) = .{},
relocs: std.AutoHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(Reloc)) = .{},
hdr: aout.ExecHdr = undefined,
@@ -104,7 +81,7 @@ got_index_free_list: std.ArrayListUnmanaged(usize) = .{},
syms_index_free_list: std.ArrayListUnmanaged(usize) = .{},
atoms: std.ArrayListUnmanaged(Atom) = .{},
-decls: std.AutoHashMapUnmanaged(InternPool.DeclIndex, DeclMetadata) = .{},
+navs: std.AutoHashMapUnmanaged(InternPool.Nav.Index, NavMetadata) = .{},
/// Indices of the three "special" symbols into atoms
etext_edata_end_atom_indices: [3]?Atom.Index = .{ null, null, null },
@@ -131,9 +108,7 @@ const Bases = struct {
data: u64,
};
-const UnnamedConstTable = std.AutoHashMapUnmanaged(InternPool.DeclIndex, std.ArrayListUnmanaged(Atom.Index));
-
-const LazySymbolTable = std.AutoArrayHashMapUnmanaged(InternPool.OptionalDeclIndex, LazySymbolMetadata);
+const LazySymbolTable = std.AutoArrayHashMapUnmanaged(InternPool.Index, LazySymbolMetadata);
const LazySymbolMetadata = struct {
const State = enum { unused, pending_flush, flushed };
@@ -161,7 +136,7 @@ pub const Atom = struct {
/// offset into got
got_index: ?usize,
/// We include the code here to be use in relocs
- /// In the case of unnamed_const_atoms and lazy_syms, this atom owns the code.
+ /// In the case of lazy_syms, this atom owns the code.
/// But, in the case of function and data decls, they own the code and this field
/// is just a pointer for convience.
code: CodePtr,
@@ -170,22 +145,23 @@ pub const Atom = struct {
code_ptr: ?[*]u8,
other: union {
code_len: usize,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
},
fn fromSlice(slice: []u8) CodePtr {
return .{ .code_ptr = slice.ptr, .other = .{ .code_len = slice.len } };
}
fn getCode(self: CodePtr, plan9: *const Plan9) []u8 {
- const mod = plan9.base.comp.module.?;
+ const zcu = plan9.base.comp.module.?;
+ const ip = &zcu.intern_pool;
return if (self.code_ptr) |p| p[0..self.other.code_len] else blk: {
- const decl_index = self.other.decl_index;
- const decl = mod.declPtr(decl_index);
- if (decl.typeOf(mod).zigTypeTag(mod) == .Fn) {
- const table = plan9.fn_decl_table.get(decl.getFileScope(mod)).?.functions;
- const output = table.get(decl_index).?;
+ const nav_index = self.other.nav_index;
+ const nav = ip.getNav(nav_index);
+ if (ip.isFunctionType(nav.typeOf(ip))) {
+ const table = plan9.fn_nav_table.get(zcu.navFileScopeIndex(nav_index)).?.functions;
+ const output = table.get(nav_index).?;
break :blk output.code;
} else {
- break :blk plan9.data_decl_table.get(decl_index).?;
+ break :blk plan9.data_nav_table.get(nav_index).?;
}
};
}
@@ -241,11 +217,11 @@ pub const DebugInfoOutput = struct {
pc_quanta: u8,
};
-const DeclMetadata = struct {
+const NavMetadata = struct {
index: Atom.Index,
exports: std.ArrayListUnmanaged(usize) = .{},
- fn getExport(m: DeclMetadata, p9: *const Plan9, name: []const u8) ?usize {
+ fn getExport(m: NavMetadata, p9: *const Plan9, name: []const u8) ?usize {
for (m.exports.items) |exp| {
const sym = p9.syms.items[exp];
if (mem.eql(u8, name, sym.name)) return exp;
@@ -254,7 +230,7 @@ const DeclMetadata = struct {
}
};
-const FnDeclOutput = struct {
+const FnNavOutput = struct {
/// this code is modified when relocated so it is mutable
code: []u8,
/// this might have to be modified in the linker, so thats why its mutable
@@ -338,18 +314,18 @@ pub fn createEmpty(
return self;
}
-fn putFn(self: *Plan9, decl_index: InternPool.DeclIndex, out: FnDeclOutput) !void {
+fn putFn(self: *Plan9, nav_index: InternPool.Nav.Index, out: FnNavOutput) !void {
const gpa = self.base.comp.gpa;
const mod = self.base.comp.module.?;
- const decl = mod.declPtr(decl_index);
- const fn_map_res = try self.fn_decl_table.getOrPut(gpa, decl.getFileScope(mod));
+ const file_scope = mod.navFileScopeIndex(nav_index);
+ const fn_map_res = try self.fn_nav_table.getOrPut(gpa, file_scope);
if (fn_map_res.found_existing) {
- if (try fn_map_res.value_ptr.functions.fetchPut(gpa, decl_index, out)) |old_entry| {
+ if (try fn_map_res.value_ptr.functions.fetchPut(gpa, nav_index, out)) |old_entry| {
gpa.free(old_entry.value.code);
gpa.free(old_entry.value.lineinfo);
}
} else {
- const file = decl.getFileScope(mod);
+ const file = mod.fileByIndex(file_scope);
const arena = self.path_arena.allocator();
// each file gets a symbol
fn_map_res.value_ptr.* = .{
@@ -359,7 +335,7 @@ fn putFn(self: *Plan9, decl_index: InternPool.DeclIndex, out: FnDeclOutput) !voi
break :blk @as(u32, @intCast(self.syms.items.len - 1));
},
};
- try fn_map_res.value_ptr.functions.put(gpa, decl_index, out);
+ try fn_map_res.value_ptr.functions.put(gpa, nav_index, out);
var a = std.ArrayList(u8).init(arena);
errdefer a.deinit();
@@ -418,11 +394,8 @@ pub fn updateFunc(self: *Plan9, pt: Zcu.PerThread, func_index: InternPool.Index,
const gpa = mod.gpa;
const target = self.base.comp.root_mod.resolved_target.result;
const func = mod.funcInfo(func_index);
- const decl_index = func.owner_decl;
- const decl = mod.declPtr(decl_index);
- self.freeUnnamedConsts(decl_index);
- const atom_idx = try self.seeDecl(decl_index);
+ const atom_idx = try self.seeNav(pt, func.owner_nav);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
@@ -439,7 +412,7 @@ pub fn updateFunc(self: *Plan9, pt: Zcu.PerThread, func_index: InternPool.Index,
const res = try codegen.generateFunction(
&self.base,
pt,
- decl.navSrcLoc(mod),
+ mod.navSrcLoc(func.owner_nav),
func_index,
air,
liveness,
@@ -449,128 +422,72 @@ pub fn updateFunc(self: *Plan9, pt: Zcu.PerThread, func_index: InternPool.Index,
const code = switch (res) {
.ok => try code_buffer.toOwnedSlice(),
.fail => |em| {
- func.setAnalysisState(&mod.intern_pool, .codegen_failure);
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try mod.failed_codegen.put(gpa, func.owner_nav, em);
return;
},
};
self.getAtomPtr(atom_idx).code = .{
.code_ptr = null,
- .other = .{ .decl_index = decl_index },
+ .other = .{ .nav_index = func.owner_nav },
};
- const out: FnDeclOutput = .{
+ const out: FnNavOutput = .{
.code = code,
.lineinfo = try dbg_info_output.dbg_line.toOwnedSlice(),
.start_line = dbg_info_output.start_line.?,
.end_line = dbg_info_output.end_line,
};
- try self.putFn(decl_index, out);
- return self.updateFinish(decl_index);
+ try self.putFn(func.owner_nav, out);
+ return self.updateFinish(pt, func.owner_nav);
}
-pub fn lowerUnnamedConst(self: *Plan9, pt: Zcu.PerThread, val: Value, decl_index: InternPool.DeclIndex) !u32 {
- const mod = pt.zcu;
- const gpa = mod.gpa;
- _ = try self.seeDecl(decl_index);
- var code_buffer = std.ArrayList(u8).init(gpa);
- defer code_buffer.deinit();
-
- const decl = mod.declPtr(decl_index);
-
- const gop = try self.unnamed_const_atoms.getOrPut(gpa, decl_index);
- if (!gop.found_existing) {
- gop.value_ptr.* = .{};
- }
- const unnamed_consts = gop.value_ptr;
-
- const index = unnamed_consts.items.len;
- // name is freed when the unnamed const is freed
- const name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{ decl.fqn.fmt(&mod.intern_pool), index });
-
- const sym_index = try self.allocateSymbolIndex();
- const new_atom_idx = try self.createAtom();
- const info: Atom = .{
- .type = .d,
- .offset = null,
- .sym_index = sym_index,
- .got_index = self.allocateGotIndex(),
- .code = undefined, // filled in later
- };
- const sym: aout.Sym = .{
- .value = undefined,
- .type = info.type,
- .name = name,
- };
- self.syms.items[info.sym_index.?] = sym;
-
- const res = try codegen.generateSymbol(&self.base, pt, decl.navSrcLoc(mod), val, &code_buffer, .{
- .none = {},
- }, .{
- .parent_atom_index = new_atom_idx,
- });
- const code = switch (res) {
- .ok => code_buffer.items,
- .fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
- log.err("{s}", .{em.msg});
- return error.CodegenFail;
+pub fn updateNav(self: *Plan9, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !void {
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
+ const nav_val = zcu.navValue(nav_index);
+ const nav_init = switch (ip.indexToKey(nav_val.toIntern())) {
+ .variable => |variable| Value.fromInterned(variable.init),
+ .@"extern" => {
+ log.debug("found extern decl: {}", .{nav.name.fmt(ip)});
+ return;
},
+ else => nav_val,
};
- // duped_code is freed when the unnamed const is freed
- const duped_code = try gpa.dupe(u8, code);
- errdefer gpa.free(duped_code);
- const new_atom = self.getAtomPtr(new_atom_idx);
- new_atom.* = info;
- new_atom.code = .{ .code_ptr = duped_code.ptr, .other = .{ .code_len = duped_code.len } };
- try unnamed_consts.append(gpa, new_atom_idx);
- // we return the new_atom_idx to codegen
- return new_atom_idx;
-}
-
-pub fn updateDecl(self: *Plan9, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
- const gpa = self.base.comp.gpa;
- const mod = pt.zcu;
- const decl = mod.declPtr(decl_index);
-
- if (decl.isExtern(mod)) {
- log.debug("found extern decl: {}", .{decl.name.fmt(&mod.intern_pool)});
- return;
- }
- const atom_idx = try self.seeDecl(decl_index);
+ const atom_idx = try self.seeNav(pt, nav_index);
var code_buffer = std.ArrayList(u8).init(gpa);
defer code_buffer.deinit();
- const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
// TODO we need the symbol index for symbol in the table of locals for the containing atom
- const res = try codegen.generateSymbol(&self.base, pt, decl.navSrcLoc(mod), decl_val, &code_buffer, .{ .none = {} }, .{
- .parent_atom_index = @as(Atom.Index, @intCast(atom_idx)),
+ const res = try codegen.generateSymbol(&self.base, pt, zcu.navSrcLoc(nav_index), nav_init, &code_buffer, .none, .{
+ .parent_atom_index = @intCast(atom_idx),
});
const code = switch (res) {
.ok => code_buffer.items,
.fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try zcu.failed_codegen.put(gpa, nav_index, em);
return;
},
};
- try self.data_decl_table.ensureUnusedCapacity(gpa, 1);
+ try self.data_nav_table.ensureUnusedCapacity(gpa, 1);
const duped_code = try gpa.dupe(u8, code);
- self.getAtomPtr(self.decls.get(decl_index).?.index).code = .{ .code_ptr = null, .other = .{ .decl_index = decl_index } };
- if (self.data_decl_table.fetchPutAssumeCapacity(decl_index, duped_code)) |old_entry| {
+ self.getAtomPtr(self.navs.get(nav_index).?.index).code = .{ .code_ptr = null, .other = .{ .nav_index = nav_index } };
+ if (self.data_nav_table.fetchPutAssumeCapacity(nav_index, duped_code)) |old_entry| {
gpa.free(old_entry.value);
}
- return self.updateFinish(decl_index);
+ return self.updateFinish(pt, nav_index);
}
+
/// called at the end of update{Decl,Func}
-fn updateFinish(self: *Plan9, decl_index: InternPool.DeclIndex) !void {
- const gpa = self.base.comp.gpa;
- const mod = self.base.comp.module.?;
- const decl = mod.declPtr(decl_index);
- const is_fn = (decl.typeOf(mod).zigTypeTag(mod) == .Fn);
+fn updateFinish(self: *Plan9, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !void {
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
+ const is_fn = ip.isFunctionType(nav.typeOf(ip));
const sym_t: aout.Sym.Type = if (is_fn) .t else .d;
- const atom = self.getAtomPtr(self.decls.get(decl_index).?.index);
+ const atom = self.getAtomPtr(self.navs.get(nav_index).?.index);
// write the internal linker metadata
atom.type = sym_t;
// write the symbol
@@ -578,7 +495,7 @@ fn updateFinish(self: *Plan9, decl_index: InternPool.DeclIndex) !void {
const sym: aout.Sym = .{
.value = undefined, // the value of stuff gets filled in in flushModule
.type = atom.type,
- .name = try gpa.dupe(u8, decl.name.toSlice(&mod.intern_pool)),
+ .name = try gpa.dupe(u8, nav.name.toSlice(ip)),
};
if (atom.sym_index) |s| {
@@ -643,29 +560,24 @@ fn externCount(self: *Plan9) usize {
}
return extern_atom_count;
}
-// counts decls, unnamed consts, and lazy syms
+// counts decls, and lazy syms
fn atomCount(self: *Plan9) usize {
- var fn_decl_count: usize = 0;
- var itf_files = self.fn_decl_table.iterator();
+ var fn_nav_count: usize = 0;
+ var itf_files = self.fn_nav_table.iterator();
while (itf_files.next()) |ent| {
// get the submap
var submap = ent.value_ptr.functions;
- fn_decl_count += submap.count();
- }
- const data_decl_count = self.data_decl_table.count();
- var unnamed_const_count: usize = 0;
- var it_unc = self.unnamed_const_atoms.iterator();
- while (it_unc.next()) |unnamed_consts| {
- unnamed_const_count += unnamed_consts.value_ptr.items.len;
+ fn_nav_count += submap.count();
}
+ const data_nav_count = self.data_nav_table.count();
var lazy_atom_count: usize = 0;
var it_lazy = self.lazy_syms.iterator();
while (it_lazy.next()) |kv| {
lazy_atom_count += kv.value_ptr.numberOfAtoms();
}
- const anon_atom_count = self.anon_decls.count();
+ const uav_atom_count = self.uavs.count();
const extern_atom_count = self.externCount();
- return data_decl_count + fn_decl_count + unnamed_const_count + lazy_atom_count + extern_atom_count + anon_atom_count;
+ return data_nav_count + fn_nav_count + lazy_atom_count + extern_atom_count + uav_atom_count;
}
pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: std.Progress.Node) link.File.FlushError!void {
@@ -700,7 +612,7 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
// anyerror needs to wait for everything to be flushed.
if (metadata.text_state != .unused) self.updateLazySymbolAtom(
pt,
- File.LazySymbol.initDecl(.code, null, pt.zcu),
+ .{ .kind = .code, .ty = .anyerror_type },
metadata.text_atom,
) catch |err| return switch (err) {
error.CodegenFail => error.FlushFailure,
@@ -708,7 +620,7 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
};
if (metadata.rodata_state != .unused) self.updateLazySymbolAtom(
pt,
- File.LazySymbol.initDecl(.const_data, null, pt.zcu),
+ .{ .kind = .const_data, .ty = .anyerror_type },
metadata.rodata_atom,
) catch |err| return switch (err) {
error.CodegenFail => error.FlushFailure,
@@ -734,7 +646,7 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
var hdr_buf: [40]u8 = undefined;
// account for the fat header
- const hdr_size = if (self.sixtyfour_bit) @as(usize, 40) else 32;
+ const hdr_size: usize = if (self.sixtyfour_bit) 40 else 32;
const hdr_slice: []u8 = hdr_buf[0..hdr_size];
var foff = hdr_size;
iovecs[0] = .{ .base = hdr_slice.ptr, .len = hdr_slice.len };
@@ -746,13 +658,13 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
// text
{
var linecount: i64 = -1;
- var it_file = self.fn_decl_table.iterator();
+ var it_file = self.fn_nav_table.iterator();
while (it_file.next()) |fentry| {
var it = fentry.value_ptr.functions.iterator();
while (it.next()) |entry| {
- const decl_index = entry.key_ptr.*;
- const decl = pt.zcu.declPtr(decl_index);
- const atom = self.getAtomPtr(self.decls.get(decl_index).?.index);
+ const nav_index = entry.key_ptr.*;
+ const nav = pt.zcu.intern_pool.getNav(nav_index);
+ const atom = self.getAtomPtr(self.navs.get(nav_index).?.index);
const out = entry.value_ptr.*;
{
// connect the previous decl to the next
@@ -771,15 +683,15 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
const off = self.getAddr(text_i, .t);
text_i += out.code.len;
atom.offset = off;
- log.debug("write text decl {*} ({}), lines {d} to {d}.;__GOT+0x{x} vaddr: 0x{x}", .{ decl, decl.name.fmt(&pt.zcu.intern_pool), out.start_line + 1, out.end_line, atom.got_index.? * 8, off });
+ log.debug("write text nav 0x{x} ({}), lines {d} to {d}.;__GOT+0x{x} vaddr: 0x{x}", .{ nav_index, nav.name.fmt(&pt.zcu.intern_pool), out.start_line + 1, out.end_line, atom.got_index.? * 8, off });
if (!self.sixtyfour_bit) {
- mem.writeInt(u32, got_table[atom.got_index.? * 4 ..][0..4], @as(u32, @intCast(off)), target.cpu.arch.endian());
+ mem.writeInt(u32, got_table[atom.got_index.? * 4 ..][0..4], @intCast(off), target.cpu.arch.endian());
} else {
mem.writeInt(u64, got_table[atom.got_index.? * 8 ..][0..8], off, target.cpu.arch.endian());
}
self.syms.items[atom.sym_index.?].value = off;
- if (self.decl_exports.get(decl_index)) |export_indices| {
- try self.addDeclExports(pt.zcu, decl_index, export_indices);
+ if (self.nav_exports.get(nav_index)) |export_indices| {
+ try self.addNavExports(pt.zcu, nav_index, export_indices);
}
}
}
@@ -826,10 +738,10 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
// data
var data_i: u64 = got_size;
{
- var it = self.data_decl_table.iterator();
+ var it = self.data_nav_table.iterator();
while (it.next()) |entry| {
- const decl_index = entry.key_ptr.*;
- const atom = self.getAtomPtr(self.decls.get(decl_index).?.index);
+ const nav_index = entry.key_ptr.*;
+ const atom = self.getAtomPtr(self.navs.get(nav_index).?.index);
const code = entry.value_ptr.*;
foff += code.len;
@@ -844,35 +756,13 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
mem.writeInt(u64, got_table[atom.got_index.? * 8 ..][0..8], off, target.cpu.arch.endian());
}
self.syms.items[atom.sym_index.?].value = off;
- if (self.decl_exports.get(decl_index)) |export_indices| {
- try self.addDeclExports(pt.zcu, decl_index, export_indices);
+ if (self.nav_exports.get(nav_index)) |export_indices| {
+ try self.addNavExports(pt.zcu, nav_index, export_indices);
}
}
- // write the unnamed constants after the other data decls
- var it_unc = self.unnamed_const_atoms.iterator();
- while (it_unc.next()) |unnamed_consts| {
- for (unnamed_consts.value_ptr.items) |atom_idx| {
- const atom = self.getAtomPtr(atom_idx);
- const code = atom.code.getOwnedCode().?; // unnamed consts must own their code
- log.debug("write unnamed const: ({s})", .{self.syms.items[atom.sym_index.?].name});
- foff += code.len;
- iovecs[iovecs_i] = .{ .base = code.ptr, .len = code.len };
- iovecs_i += 1;
- const off = self.getAddr(data_i, .d);
- data_i += code.len;
- atom.offset = off;
- if (!self.sixtyfour_bit) {
- mem.writeInt(u32, got_table[atom.got_index.? * 4 ..][0..4], @as(u32, @intCast(off)), target.cpu.arch.endian());
- } else {
- mem.writeInt(u64, got_table[atom.got_index.? * 8 ..][0..8], off, target.cpu.arch.endian());
- }
- self.syms.items[atom.sym_index.?].value = off;
- }
- }
- // the anon decls
{
- var it_anon = self.anon_decls.iterator();
- while (it_anon.next()) |kv| {
+ var it_uav = self.uavs.iterator();
+ while (it_uav.next()) |kv| {
const atom = self.getAtomPtr(kv.value_ptr.*);
const code = atom.code.getOwnedCode().?;
log.debug("write anon decl: {s}", .{self.syms.items[atom.sym_index.?].name});
@@ -1011,14 +901,14 @@ pub fn flushModule(self: *Plan9, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
// write it all!
try file.pwritevAll(iovecs, 0);
}
-fn addDeclExports(
+fn addNavExports(
self: *Plan9,
mod: *Zcu,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
export_indices: []const u32,
) !void {
const gpa = self.base.comp.gpa;
- const metadata = self.decls.getPtr(decl_index).?;
+ const metadata = self.navs.getPtr(nav_index).?;
const atom = self.getAtom(metadata.index);
for (export_indices) |export_idx| {
@@ -1031,7 +921,7 @@ fn addDeclExports(
{
try mod.failed_exports.put(mod.gpa, export_idx, try Zcu.ErrorMsg.create(
gpa,
- mod.declPtr(decl_index).navSrcLoc(mod),
+ mod.navSrcLoc(nav_index),
"plan9 does not support extra sections",
.{},
));
@@ -1090,7 +980,6 @@ pub fn freeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) void {
}
kv.value.exports.deinit(gpa);
}
- self.freeUnnamedConsts(decl_index);
{
const atom_index = self.decls.get(decl_index).?.index;
const relocs = self.relocs.getPtr(atom_index) orelse return;
@@ -1098,18 +987,6 @@ pub fn freeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) void {
assert(self.relocs.remove(atom_index));
}
}
-fn freeUnnamedConsts(self: *Plan9, decl_index: InternPool.DeclIndex) void {
- const gpa = self.base.comp.gpa;
- const unnamed_consts = self.unnamed_const_atoms.getPtr(decl_index) orelse return;
- for (unnamed_consts.items) |atom_idx| {
- const atom = self.getAtom(atom_idx);
- gpa.free(self.syms.items[atom.sym_index.?].name);
- self.syms.items[atom.sym_index.?] = aout.Sym.undefined_symbol;
- self.syms_index_free_list.append(gpa, atom.sym_index.?) catch {};
- }
- unnamed_consts.clearAndFree(gpa);
-}
-
fn createAtom(self: *Plan9) !Atom.Index {
const gpa = self.base.comp.gpa;
const index = @as(Atom.Index, @intCast(self.atoms.items.len));
@@ -1124,9 +1001,11 @@ fn createAtom(self: *Plan9) !Atom.Index {
return index;
}
-pub fn seeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) !Atom.Index {
- const gpa = self.base.comp.gpa;
- const gop = try self.decls.getOrPut(gpa, decl_index);
+pub fn seeNav(self: *Plan9, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !Atom.Index {
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
+ const gpa = zcu.gpa;
+ const gop = try self.navs.getOrPut(gpa, nav_index);
if (!gop.found_existing) {
const index = try self.createAtom();
self.getAtomPtr(index).got_index = self.allocateGotIndex();
@@ -1137,23 +1016,22 @@ pub fn seeDecl(self: *Plan9, decl_index: InternPool.DeclIndex) !Atom.Index {
}
const atom_idx = gop.value_ptr.index;
// handle externs here because they might not get updateDecl called on them
- const mod = self.base.comp.module.?;
- const decl = mod.declPtr(decl_index);
- if (decl.isExtern(mod)) {
+ const nav = ip.getNav(nav_index);
+ if (ip.indexToKey(nav.status.resolved.val) == .@"extern") {
// this is a "phantom atom" - it is never actually written to disk, just convenient for us to store stuff about externs
- if (decl.name.eqlSlice("etext", &mod.intern_pool)) {
+ if (nav.name.eqlSlice("etext", ip)) {
self.etext_edata_end_atom_indices[0] = atom_idx;
- } else if (decl.name.eqlSlice("edata", &mod.intern_pool)) {
+ } else if (nav.name.eqlSlice("edata", ip)) {
self.etext_edata_end_atom_indices[1] = atom_idx;
- } else if (decl.name.eqlSlice("end", &mod.intern_pool)) {
+ } else if (nav.name.eqlSlice("end", ip)) {
self.etext_edata_end_atom_indices[2] = atom_idx;
}
- try self.updateFinish(decl_index);
- log.debug("seeDecl(extern) for {} (got_addr=0x{x})", .{
- decl.name.fmt(&mod.intern_pool),
+ try self.updateFinish(pt, nav_index);
+ log.debug("seeNav(extern) for {} (got_addr=0x{x})", .{
+ nav.name.fmt(ip),
self.getAtom(atom_idx).getOffsetTableAddress(self),
});
- } else log.debug("seeDecl for {}", .{decl.name.fmt(&mod.intern_pool)});
+ } else log.debug("seeNav for {}", .{nav.name.fmt(ip)});
return atom_idx;
}
@@ -1165,45 +1043,41 @@ pub fn updateExports(
) !void {
const gpa = self.base.comp.gpa;
switch (exported) {
- .value => @panic("TODO: plan9 updateExports handling values"),
- .decl_index => |decl_index| {
- _ = try self.seeDecl(decl_index);
- if (self.decl_exports.fetchSwapRemove(decl_index)) |kv| {
+ .uav => @panic("TODO: plan9 updateExports handling values"),
+ .nav => |nav| {
+ _ = try self.seeNav(pt, nav);
+ if (self.nav_exports.fetchSwapRemove(nav)) |kv| {
gpa.free(kv.value);
}
- try self.decl_exports.ensureUnusedCapacity(gpa, 1);
+ try self.nav_exports.ensureUnusedCapacity(gpa, 1);
const duped_indices = try gpa.dupe(u32, export_indices);
- self.decl_exports.putAssumeCapacityNoClobber(decl_index, duped_indices);
+ self.nav_exports.putAssumeCapacityNoClobber(nav, duped_indices);
},
}
// all proper work is done in flush
- _ = pt;
}
-pub fn getOrCreateAtomForLazySymbol(self: *Plan9, pt: Zcu.PerThread, sym: File.LazySymbol) !Atom.Index {
- const gpa = pt.zcu.gpa;
- const gop = try self.lazy_syms.getOrPut(gpa, sym.getDecl(self.base.comp.module.?));
+pub fn getOrCreateAtomForLazySymbol(self: *Plan9, pt: Zcu.PerThread, lazy_sym: File.LazySymbol) !Atom.Index {
+ const gop = try self.lazy_syms.getOrPut(pt.zcu.gpa, lazy_sym.ty);
errdefer _ = if (!gop.found_existing) self.lazy_syms.pop();
if (!gop.found_existing) gop.value_ptr.* = .{};
- const metadata: struct { atom: *Atom.Index, state: *LazySymbolMetadata.State } = switch (sym.kind) {
- .code => .{ .atom = &gop.value_ptr.text_atom, .state = &gop.value_ptr.text_state },
- .const_data => .{ .atom = &gop.value_ptr.rodata_atom, .state = &gop.value_ptr.rodata_state },
+ const atom_ptr, const state_ptr = switch (lazy_sym.kind) {
+ .code => .{ &gop.value_ptr.text_atom, &gop.value_ptr.text_state },
+ .const_data => .{ &gop.value_ptr.rodata_atom, &gop.value_ptr.rodata_state },
};
- switch (metadata.state.*) {
- .unused => metadata.atom.* = try self.createAtom(),
- .pending_flush => return metadata.atom.*,
+ switch (state_ptr.*) {
+ .unused => atom_ptr.* = try self.createAtom(),
+ .pending_flush => return atom_ptr.*,
.flushed => {},
}
- metadata.state.* = .pending_flush;
- const atom = metadata.atom.*;
+ state_ptr.* = .pending_flush;
+ const atom = atom_ptr.*;
_ = try self.getAtomPtr(atom).getOrCreateSymbolTableEntry(self);
_ = self.getAtomPtr(atom).getOrCreateOffsetTableEntry(self);
// anyerror needs to be deferred until flushModule
- if (sym.getDecl(self.base.comp.module.?) != .none) {
- try self.updateLazySymbolAtom(pt, sym, atom);
- }
+ if (lazy_sym.ty != .anyerror_type) try self.updateLazySymbolAtom(pt, lazy_sym, atom);
return atom;
}
@@ -1217,7 +1091,7 @@ fn updateLazySymbolAtom(self: *Plan9, pt: Zcu.PerThread, sym: File.LazySymbol, a
// create the symbol for the name
const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{
@tagName(sym.kind),
- sym.ty.fmt(pt),
+ Type.fromInterned(sym.ty).fmt(pt),
});
const symbol: aout.Sym = .{
@@ -1228,7 +1102,7 @@ fn updateLazySymbolAtom(self: *Plan9, pt: Zcu.PerThread, sym: File.LazySymbol, a
self.syms.items[self.getAtomPtr(atom_index).sym_index.?] = symbol;
// generate the code
- const src = sym.ty.srcLocOrNull(pt.zcu) orelse Zcu.LazySrcLoc.unneeded;
+ const src = Type.fromInterned(sym.ty).srcLocOrNull(pt.zcu) orelse Zcu.LazySrcLoc.unneeded;
const res = try codegen.generateLazySymbol(
&self.base,
pt,
@@ -1264,12 +1138,6 @@ pub fn deinit(self: *Plan9) void {
}
self.relocs.deinit(gpa);
}
- // free the unnamed consts
- var it_unc = self.unnamed_const_atoms.iterator();
- while (it_unc.next()) |kv| {
- self.freeUnnamedConsts(kv.key_ptr.*);
- }
- self.unnamed_const_atoms.deinit(gpa);
var it_lzc = self.lazy_syms.iterator();
while (it_lzc.next()) |kv| {
if (kv.value_ptr.text_state != .unused)
@@ -1278,7 +1146,7 @@ pub fn deinit(self: *Plan9) void {
gpa.free(self.syms.items[self.getAtom(kv.value_ptr.rodata_atom).sym_index.?].name);
}
self.lazy_syms.deinit(gpa);
- var itf_files = self.fn_decl_table.iterator();
+ var itf_files = self.fn_nav_table.iterator();
while (itf_files.next()) |ent| {
// get the submap
var submap = ent.value_ptr.functions;
@@ -1289,21 +1157,21 @@ pub fn deinit(self: *Plan9) void {
gpa.free(entry.value_ptr.lineinfo);
}
}
- self.fn_decl_table.deinit(gpa);
- var itd = self.data_decl_table.iterator();
+ self.fn_nav_table.deinit(gpa);
+ var itd = self.data_nav_table.iterator();
while (itd.next()) |entry| {
gpa.free(entry.value_ptr.*);
}
- var it_anon = self.anon_decls.iterator();
- while (it_anon.next()) |entry| {
+ var it_uav = self.uavs.iterator();
+ while (it_uav.next()) |entry| {
const sym_index = self.getAtom(entry.value_ptr.*).sym_index.?;
gpa.free(self.syms.items[sym_index].name);
}
- self.data_decl_table.deinit(gpa);
- for (self.decl_exports.values()) |export_indices| {
+ self.data_nav_table.deinit(gpa);
+ for (self.nav_exports.values()) |export_indices| {
gpa.free(export_indices);
}
- self.decl_exports.deinit(gpa);
+ self.nav_exports.deinit(gpa);
self.syms.deinit(gpa);
self.got_index_free_list.deinit(gpa);
self.syms_index_free_list.deinit(gpa);
@@ -1317,11 +1185,11 @@ pub fn deinit(self: *Plan9) void {
self.atoms.deinit(gpa);
{
- var it = self.decls.iterator();
+ var it = self.navs.iterator();
while (it.next()) |entry| {
entry.value_ptr.exports.deinit(gpa);
}
- self.decls.deinit(gpa);
+ self.navs.deinit(gpa);
}
}
@@ -1402,17 +1270,17 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
// write the data symbols
{
- var it = self.data_decl_table.iterator();
+ var it = self.data_nav_table.iterator();
while (it.next()) |entry| {
- const decl_index = entry.key_ptr.*;
- const decl_metadata = self.decls.get(decl_index).?;
- const atom = self.getAtom(decl_metadata.index);
+ const nav_index = entry.key_ptr.*;
+ const nav_metadata = self.navs.get(nav_index).?;
+ const atom = self.getAtom(nav_metadata.index);
const sym = self.syms.items[atom.sym_index.?];
try self.writeSym(writer, sym);
- if (self.decl_exports.get(decl_index)) |export_indices| {
+ if (self.nav_exports.get(nav_index)) |export_indices| {
for (export_indices) |export_idx| {
const exp = mod.all_exports.items[export_idx];
- if (decl_metadata.getExport(self, exp.opts.name.toSlice(ip))) |exp_i| {
+ if (nav_metadata.getExport(self, exp.opts.name.toSlice(ip))) |exp_i| {
try self.writeSym(writer, self.syms.items[exp_i]);
}
}
@@ -1429,22 +1297,11 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
try self.writeSym(writer, sym);
}
}
- // unnamed consts
- {
- var it = self.unnamed_const_atoms.iterator();
- while (it.next()) |kv| {
- const consts = kv.value_ptr;
- for (consts.items) |atom_index| {
- const sym = self.syms.items[self.getAtom(atom_index).sym_index.?];
- try self.writeSym(writer, sym);
- }
- }
- }
// text symbols are the hardest:
// the file of a text symbol is the .z symbol before it
// so we have to write everything in the right order
{
- var it_file = self.fn_decl_table.iterator();
+ var it_file = self.fn_nav_table.iterator();
while (it_file.next()) |fentry| {
var symidx_and_submap = fentry.value_ptr;
// write the z symbols
@@ -1454,15 +1311,15 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
// write all the decls come from the file of the z symbol
var submap_it = symidx_and_submap.functions.iterator();
while (submap_it.next()) |entry| {
- const decl_index = entry.key_ptr.*;
- const decl_metadata = self.decls.get(decl_index).?;
- const atom = self.getAtom(decl_metadata.index);
+ const nav_index = entry.key_ptr.*;
+ const nav_metadata = self.navs.get(nav_index).?;
+ const atom = self.getAtom(nav_metadata.index);
const sym = self.syms.items[atom.sym_index.?];
try self.writeSym(writer, sym);
- if (self.decl_exports.get(decl_index)) |export_indices| {
+ if (self.nav_exports.get(nav_index)) |export_indices| {
for (export_indices) |export_idx| {
const exp = mod.all_exports.items[export_idx];
- if (decl_metadata.getExport(self, exp.opts.name.toSlice(ip))) |exp_i| {
+ if (nav_metadata.getExport(self, exp.opts.name.toSlice(ip))) |exp_i| {
const s = self.syms.items[exp_i];
if (mem.eql(u8, s.name, "_start"))
self.entry_val = s.value;
@@ -1500,31 +1357,31 @@ pub fn updateDeclLineNumber(self: *Plan9, pt: Zcu.PerThread, decl_index: InternP
_ = decl_index;
}
-pub fn getDeclVAddr(
+pub fn getNavVAddr(
self: *Plan9,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
reloc_info: link.File.RelocInfo,
) !u64 {
const ip = &pt.zcu.intern_pool;
- const decl = pt.zcu.declPtr(decl_index);
- log.debug("getDeclVAddr for {}", .{decl.name.fmt(ip)});
- if (decl.isExtern(pt.zcu)) {
- if (decl.name.eqlSlice("etext", ip)) {
+ const nav = ip.getNav(nav_index);
+ log.debug("getDeclVAddr for {}", .{nav.name.fmt(ip)});
+ if (ip.indexToKey(nav.status.resolved.val) == .@"extern") {
+ if (nav.name.eqlSlice("etext", ip)) {
try self.addReloc(reloc_info.parent_atom_index, .{
.target = undefined,
.offset = reloc_info.offset,
.addend = reloc_info.addend,
.type = .special_etext,
});
- } else if (decl.name.eqlSlice("edata", ip)) {
+ } else if (nav.name.eqlSlice("edata", ip)) {
try self.addReloc(reloc_info.parent_atom_index, .{
.target = undefined,
.offset = reloc_info.offset,
.addend = reloc_info.addend,
.type = .special_edata,
});
- } else if (decl.name.eqlSlice("end", ip)) {
+ } else if (nav.name.eqlSlice("end", ip)) {
try self.addReloc(reloc_info.parent_atom_index, .{
.target = undefined,
.offset = reloc_info.offset,
@@ -1536,7 +1393,7 @@ pub fn getDeclVAddr(
return undefined;
}
// otherwise, we just add a relocation
- const atom_index = try self.seeDecl(decl_index);
+ const atom_index = try self.seeNav(pt, nav_index);
// the parent_atom_index in this case is just the decl_index of the parent
try self.addReloc(reloc_info.parent_atom_index, .{
.target = atom_index,
@@ -1546,15 +1403,14 @@ pub fn getDeclVAddr(
return undefined;
}
-pub fn lowerAnonDecl(
+pub fn lowerUav(
self: *Plan9,
pt: Zcu.PerThread,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
explicit_alignment: InternPool.Alignment,
src_loc: Zcu.LazySrcLoc,
-) !codegen.Result {
+) !codegen.GenResult {
_ = explicit_alignment;
- // This is basically the same as lowerUnnamedConst.
// example:
// const ty = mod.intern_pool.typeOf(decl_val).toType();
// const val = decl_val.toValue();
@@ -1564,41 +1420,40 @@ pub fn lowerAnonDecl(
// to put it in some location.
// ...
const gpa = self.base.comp.gpa;
- const gop = try self.anon_decls.getOrPut(gpa, decl_val);
- if (!gop.found_existing) {
- const val = Value.fromInterned(decl_val);
- const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(decl_val)});
-
- const index = try self.createAtom();
- const got_index = self.allocateGotIndex();
- gop.value_ptr.* = index;
- // we need to free name latex
- var code_buffer = std.ArrayList(u8).init(gpa);
- const res = try codegen.generateSymbol(&self.base, pt, src_loc, val, &code_buffer, .{ .none = {} }, .{ .parent_atom_index = index });
- const code = switch (res) {
- .ok => code_buffer.items,
- .fail => |em| return .{ .fail = em },
- };
- const atom_ptr = self.getAtomPtr(index);
- atom_ptr.* = .{
- .type = .d,
- .offset = undefined,
- .sym_index = null,
- .got_index = got_index,
- .code = Atom.CodePtr.fromSlice(code),
- };
- _ = try atom_ptr.getOrCreateSymbolTableEntry(self);
- self.syms.items[atom_ptr.sym_index.?] = .{
- .type = .d,
- .value = undefined,
- .name = name,
- };
- }
- return .ok;
+ const gop = try self.uavs.getOrPut(gpa, uav);
+ if (gop.found_existing) return .{ .mcv = .{ .load_direct = gop.value_ptr.* } };
+ const val = Value.fromInterned(uav);
+ const name = try std.fmt.allocPrint(gpa, "__anon_{d}", .{@intFromEnum(uav)});
+
+ const index = try self.createAtom();
+ const got_index = self.allocateGotIndex();
+ gop.value_ptr.* = index;
+ // we need to free name latex
+ var code_buffer = std.ArrayList(u8).init(gpa);
+ const res = try codegen.generateSymbol(&self.base, pt, src_loc, val, &code_buffer, .{ .none = {} }, .{ .parent_atom_index = index });
+ const code = switch (res) {
+ .ok => code_buffer.items,
+ .fail => |em| return .{ .fail = em },
+ };
+ const atom_ptr = self.getAtomPtr(index);
+ atom_ptr.* = .{
+ .type = .d,
+ .offset = undefined,
+ .sym_index = null,
+ .got_index = got_index,
+ .code = Atom.CodePtr.fromSlice(code),
+ };
+ _ = try atom_ptr.getOrCreateSymbolTableEntry(self);
+ self.syms.items[atom_ptr.sym_index.?] = .{
+ .type = .d,
+ .value = undefined,
+ .name = name,
+ };
+ return .{ .mcv = .{ .load_direct = index } };
}
-pub fn getAnonDeclVAddr(self: *Plan9, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
- const atom_index = self.anon_decls.get(decl_val).?;
+pub fn getUavVAddr(self: *Plan9, uav: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
+ const atom_index = self.uavs.get(uav).?;
try self.addReloc(reloc_info.parent_atom_index, .{
.target = atom_index,
.offset = reloc_info.offset,
diff --git a/src/link/SpirV.zig b/src/link/SpirV.zig
index ce7e25824c..e97c80c3fe 100644
--- a/src/link/SpirV.zig
+++ b/src/link/SpirV.zig
@@ -36,6 +36,7 @@ const trace = @import("../tracy.zig").trace;
const build_options = @import("build_options");
const Air = @import("../Air.zig");
const Liveness = @import("../Liveness.zig");
+const Type = @import("../Type.zig");
const Value = @import("../Value.zig");
const SpvModule = @import("../codegen/spirv/Module.zig");
@@ -50,8 +51,6 @@ base: link.File,
object: codegen.Object,
-pub const base_tag: link.File.Tag = .spirv;
-
pub fn createEmpty(
arena: Allocator,
comp: *Compilation,
@@ -128,22 +127,22 @@ pub fn updateFunc(self: *SpirV, pt: Zcu.PerThread, func_index: InternPool.Index,
@panic("Attempted to compile for architecture that was disabled by build configuration");
}
+ const ip = &pt.zcu.intern_pool;
const func = pt.zcu.funcInfo(func_index);
- const decl = pt.zcu.declPtr(func.owner_decl);
- log.debug("lowering function {}", .{decl.name.fmt(&pt.zcu.intern_pool)});
+ log.debug("lowering function {}", .{ip.getNav(func.owner_nav).name.fmt(ip)});
try self.object.updateFunc(pt, func_index, air, liveness);
}
-pub fn updateDecl(self: *SpirV, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNav(self: *SpirV, pt: Zcu.PerThread, nav: InternPool.Nav.Index) !void {
if (build_options.skip_non_native) {
@panic("Attempted to compile for architecture that was disabled by build configuration");
}
- const decl = pt.zcu.declPtr(decl_index);
- log.debug("lowering declaration {}", .{decl.name.fmt(&pt.zcu.intern_pool)});
+ const ip = &pt.zcu.intern_pool;
+ log.debug("lowering declaration {}", .{ip.getNav(nav).name.fmt(ip)});
- try self.object.updateDecl(pt, decl_index);
+ try self.object.updateNav(pt, nav);
}
pub fn updateExports(
@@ -152,19 +151,20 @@ pub fn updateExports(
exported: Zcu.Exported,
export_indices: []const u32,
) !void {
- const mod = pt.zcu;
- const decl_index = switch (exported) {
- .decl_index => |i| i,
- .value => |val| {
- _ = val;
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
+ const nav_index = switch (exported) {
+ .nav => |nav| nav,
+ .uav => |uav| {
+ _ = uav;
@panic("TODO: implement SpirV linker code for exporting a constant value");
},
};
- const decl = mod.declPtr(decl_index);
- if (decl.val.isFuncBody(mod)) {
- const target = mod.getTarget();
- const spv_decl_index = try self.object.resolveDecl(mod, decl_index);
- const execution_model = switch (decl.typeOf(mod).fnCallingConvention(mod)) {
+ const nav_ty = ip.getNav(nav_index).typeOf(ip);
+ if (ip.isFunctionType(nav_ty)) {
+ const target = zcu.getTarget();
+ const spv_decl_index = try self.object.resolveNav(zcu, nav_index);
+ const execution_model = switch (Type.fromInterned(nav_ty).fnCallingConvention(zcu)) {
.Vertex => spec.ExecutionModel.Vertex,
.Fragment => spec.ExecutionModel.Fragment,
.Kernel => spec.ExecutionModel.Kernel,
@@ -177,10 +177,10 @@ pub fn updateExports(
(is_vulkan and (execution_model == .Fragment or execution_model == .Vertex)))
{
for (export_indices) |export_idx| {
- const exp = mod.all_exports.items[export_idx];
+ const exp = zcu.all_exports.items[export_idx];
try self.object.spv.declareEntryPoint(
spv_decl_index,
- exp.opts.name.toSlice(&mod.intern_pool),
+ exp.opts.name.toSlice(ip),
execution_model,
);
}
diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig
index 4f655f2ea9..87dd8c13f9 100644
--- a/src/link/Wasm.zig
+++ b/src/link/Wasm.zig
@@ -39,8 +39,6 @@ const ZigObject = @import("Wasm/ZigObject.zig");
pub const Atom = @import("Wasm/Atom.zig");
pub const Relocation = types.Relocation;
-pub const base_tag: link.File.Tag = .wasm;
-
base: link.File,
/// Symbol name of the entry function to export
entry_name: ?[]const u8,
@@ -1451,19 +1449,19 @@ pub fn updateFunc(wasm: *Wasm, pt: Zcu.PerThread, func_index: InternPool.Index,
try wasm.zigObjectPtr().?.updateFunc(wasm, pt, func_index, air, liveness);
}
-// Generate code for the Decl, storing it in memory to be later written to
+// Generate code for the "Nav", storing it in memory to be later written to
// the file on flush().
-pub fn updateDecl(wasm: *Wasm, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav: InternPool.Nav.Index) !void {
if (build_options.skip_non_native and builtin.object_format != .wasm) {
@panic("Attempted to compile for object format that was disabled by build configuration");
}
- if (wasm.llvm_object) |llvm_object| return llvm_object.updateDecl(pt, decl_index);
- try wasm.zigObjectPtr().?.updateDecl(wasm, pt, decl_index);
+ if (wasm.llvm_object) |llvm_object| return llvm_object.updateNav(pt, nav);
+ try wasm.zigObjectPtr().?.updateNav(wasm, pt, nav);
}
-pub fn updateDeclLineNumber(wasm: *Wasm, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !void {
+pub fn updateNavLineNumber(wasm: *Wasm, pt: Zcu.PerThread, nav: InternPool.Nav.Index) !void {
if (wasm.llvm_object) |_| return;
- try wasm.zigObjectPtr().?.updateDeclLineNumber(pt, decl_index);
+ try wasm.zigObjectPtr().?.updateNavLineNumber(pt, nav);
}
/// From a given symbol location, returns its `wasm.GlobalType`.
@@ -1505,13 +1503,6 @@ fn getFunctionSignature(wasm: *const Wasm, loc: SymbolLoc) std.wasm.Type {
return wasm.func_types.items[wasm.functions.get(.{ .file = loc.file, .index = symbol.index }).?.func.type_index];
}
-/// Lowers a constant typed value to a local symbol and atom.
-/// Returns the symbol index of the local
-/// The given `decl` is the parent decl whom owns the constant.
-pub fn lowerUnnamedConst(wasm: *Wasm, pt: Zcu.PerThread, val: Value, decl_index: InternPool.DeclIndex) !u32 {
- return wasm.zigObjectPtr().?.lowerUnnamedConst(wasm, pt, val, decl_index);
-}
-
/// Returns the symbol index from a symbol of which its flag is set global,
/// such as an exported or imported symbol.
/// If the symbol does not yet exist, creates a new one symbol instead
@@ -1521,29 +1512,29 @@ pub fn getGlobalSymbol(wasm: *Wasm, name: []const u8, lib_name: ?[]const u8) !Sy
return wasm.zigObjectPtr().?.getGlobalSymbol(wasm.base.comp.gpa, name);
}
-/// For a given decl, find the given symbol index's atom, and create a relocation for the type.
+/// For a given `Nav`, find the given symbol index's atom, and create a relocation for the type.
/// Returns the given pointer address
-pub fn getDeclVAddr(
+pub fn getNavVAddr(
wasm: *Wasm,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav: InternPool.Nav.Index,
reloc_info: link.File.RelocInfo,
) !u64 {
- return wasm.zigObjectPtr().?.getDeclVAddr(wasm, pt, decl_index, reloc_info);
+ return wasm.zigObjectPtr().?.getNavVAddr(wasm, pt, nav, reloc_info);
}
-pub fn lowerAnonDecl(
+pub fn lowerUav(
wasm: *Wasm,
pt: Zcu.PerThread,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
explicit_alignment: Alignment,
src_loc: Zcu.LazySrcLoc,
-) !codegen.Result {
- return wasm.zigObjectPtr().?.lowerAnonDecl(wasm, pt, decl_val, explicit_alignment, src_loc);
+) !codegen.GenResult {
+ return wasm.zigObjectPtr().?.lowerUav(wasm, pt, uav, explicit_alignment, src_loc);
}
-pub fn getAnonDeclVAddr(wasm: *Wasm, decl_val: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
- return wasm.zigObjectPtr().?.getAnonDeclVAddr(wasm, decl_val, reloc_info);
+pub fn getUavVAddr(wasm: *Wasm, uav: InternPool.Index, reloc_info: link.File.RelocInfo) !u64 {
+ return wasm.zigObjectPtr().?.getUavVAddr(wasm, uav, reloc_info);
}
pub fn deleteExport(
@@ -4018,11 +4009,11 @@ pub fn putOrGetFuncType(wasm: *Wasm, func_type: std.wasm.Type) !u32 {
return index;
}
-/// For the given `decl_index`, stores the corresponding type representing the function signature.
+/// For the given `nav`, stores the corresponding type representing the function signature.
/// Asserts declaration has an associated `Atom`.
/// Returns the index into the list of types.
-pub fn storeDeclType(wasm: *Wasm, decl_index: InternPool.DeclIndex, func_type: std.wasm.Type) !u32 {
- return wasm.zigObjectPtr().?.storeDeclType(wasm.base.comp.gpa, decl_index, func_type);
+pub fn storeNavType(wasm: *Wasm, nav: InternPool.Nav.Index, func_type: std.wasm.Type) !u32 {
+ return wasm.zigObjectPtr().?.storeDeclType(wasm.base.comp.gpa, nav, func_type);
}
/// Returns the symbol index of the error name table.
@@ -4036,8 +4027,8 @@ pub fn getErrorTableSymbol(wasm_file: *Wasm, pt: Zcu.PerThread) !u32 {
/// For a given `InternPool.DeclIndex` returns its corresponding `Atom.Index`.
/// When the index was not found, a new `Atom` will be created, and its index will be returned.
/// The newly created Atom is empty with default fields as specified by `Atom.empty`.
-pub fn getOrCreateAtomForDecl(wasm_file: *Wasm, pt: Zcu.PerThread, decl_index: InternPool.DeclIndex) !Atom.Index {
- return wasm_file.zigObjectPtr().?.getOrCreateAtomForDecl(wasm_file, pt, decl_index);
+pub fn getOrCreateAtomForNav(wasm_file: *Wasm, pt: Zcu.PerThread, nav: InternPool.Nav.Index) !Atom.Index {
+ return wasm_file.zigObjectPtr().?.getOrCreateAtomForNav(wasm_file, pt, nav);
}
/// Verifies all resolved symbols and checks whether itself needs to be marked alive,
diff --git a/src/link/Wasm/ZigObject.zig b/src/link/Wasm/ZigObject.zig
index e3c257cd70..3424006523 100644
--- a/src/link/Wasm/ZigObject.zig
+++ b/src/link/Wasm/ZigObject.zig
@@ -6,9 +6,9 @@
path: []const u8,
/// Index within the list of relocatable objects of the linker driver.
index: File.Index,
-/// Map of all `Decl` that are currently alive.
-/// Each index maps to the corresponding `DeclInfo`.
-decls_map: std.AutoHashMapUnmanaged(InternPool.DeclIndex, DeclInfo) = .{},
+/// Map of all `Nav` that are currently alive.
+/// Each index maps to the corresponding `NavInfo`.
+navs: std.AutoHashMapUnmanaged(InternPool.Nav.Index, NavInfo) = .{},
/// List of function type signatures for this Zig module.
func_types: std.ArrayListUnmanaged(std.wasm.Type) = .{},
/// List of `std.wasm.Func`. Each entry contains the function signature,
@@ -36,7 +36,7 @@ segment_free_list: std.ArrayListUnmanaged(u32) = .{},
/// File encapsulated string table, used to deduplicate strings within the generated file.
string_table: StringTable = .{},
/// Map for storing anonymous declarations. Each anonymous decl maps to its Atom's index.
-anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, Atom.Index) = .{},
+uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Atom.Index) = .{},
/// List of atom indexes of functions that are generated by the backend.
synthetic_functions: std.ArrayListUnmanaged(Atom.Index) = .{},
/// Represents the symbol index of the error name table
@@ -86,12 +86,12 @@ debug_str_index: ?u32 = null,
/// The index of the segment representing the custom '.debug_pubtypes' section.
debug_abbrev_index: ?u32 = null,
-const DeclInfo = struct {
+const NavInfo = struct {
atom: Atom.Index = .null,
exports: std.ArrayListUnmanaged(Symbol.Index) = .{},
- fn @"export"(di: DeclInfo, zig_object: *const ZigObject, name: []const u8) ?Symbol.Index {
- for (di.exports.items) |sym_index| {
+ fn @"export"(ni: NavInfo, zig_object: *const ZigObject, name: []const u8) ?Symbol.Index {
+ for (ni.exports.items) |sym_index| {
const sym_name_index = zig_object.symbol(sym_index).name;
const sym_name = zig_object.string_table.getAssumeExists(sym_name_index);
if (std.mem.eql(u8, name, sym_name)) {
@@ -101,14 +101,14 @@ const DeclInfo = struct {
return null;
}
- fn appendExport(di: *DeclInfo, gpa: std.mem.Allocator, sym_index: Symbol.Index) !void {
- return di.exports.append(gpa, sym_index);
+ fn appendExport(ni: *NavInfo, gpa: std.mem.Allocator, sym_index: Symbol.Index) !void {
+ return ni.exports.append(gpa, sym_index);
}
- fn deleteExport(di: *DeclInfo, sym_index: Symbol.Index) void {
- for (di.exports.items, 0..) |idx, index| {
+ fn deleteExport(ni: *NavInfo, sym_index: Symbol.Index) void {
+ for (ni.exports.items, 0..) |idx, index| {
if (idx == sym_index) {
- _ = di.exports.swapRemove(index);
+ _ = ni.exports.swapRemove(index);
return;
}
}
@@ -155,19 +155,19 @@ pub fn deinit(zig_object: *ZigObject, wasm_file: *Wasm) void {
}
{
- var it = zig_object.decls_map.valueIterator();
- while (it.next()) |decl_info| {
- const atom = wasm_file.getAtomPtr(decl_info.atom);
+ var it = zig_object.navs.valueIterator();
+ while (it.next()) |nav_info| {
+ const atom = wasm_file.getAtomPtr(nav_info.atom);
for (atom.locals.items) |local_index| {
const local_atom = wasm_file.getAtomPtr(local_index);
local_atom.deinit(gpa);
}
atom.deinit(gpa);
- decl_info.exports.deinit(gpa);
+ nav_info.exports.deinit(gpa);
}
}
{
- for (zig_object.anon_decls.values()) |atom_index| {
+ for (zig_object.uavs.values()) |atom_index| {
const atom = wasm_file.getAtomPtr(atom_index);
for (atom.locals.items) |local_index| {
const local_atom = wasm_file.getAtomPtr(local_index);
@@ -201,8 +201,8 @@ pub fn deinit(zig_object: *ZigObject, wasm_file: *Wasm) void {
zig_object.atom_types.deinit(gpa);
zig_object.functions.deinit(gpa);
zig_object.imports.deinit(gpa);
- zig_object.decls_map.deinit(gpa);
- zig_object.anon_decls.deinit(gpa);
+ zig_object.navs.deinit(gpa);
+ zig_object.uavs.deinit(gpa);
zig_object.symbols.deinit(gpa);
zig_object.symbols_free_list.deinit(gpa);
zig_object.segment_info.deinit(gpa);
@@ -236,34 +236,35 @@ pub fn allocateSymbol(zig_object: *ZigObject, gpa: std.mem.Allocator) !Symbol.In
return index;
}
-// Generate code for the Decl, storing it in memory to be later written to
+// Generate code for the `Nav`, storing it in memory to be later written to
// the file on flush().
-pub fn updateDecl(
+pub fn updateNav(
zig_object: *ZigObject,
wasm_file: *Wasm,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
) !void {
- const mod = pt.zcu;
- const decl = mod.declPtr(decl_index);
- if (decl.val.getFunction(mod)) |_| {
- return;
- } else if (decl.val.getExternFunc(mod)) |_| {
- return;
- }
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
+ const nav = ip.getNav(nav_index);
+
+ const is_extern, const lib_name, const nav_init = switch (ip.indexToKey(nav.status.resolved.val)) {
+ .variable => |variable| .{ false, variable.lib_name, variable.init },
+ .func => return,
+ .@"extern" => |@"extern"| if (ip.isFunctionType(nav.typeOf(ip)))
+ return
+ else
+ .{ true, @"extern".lib_name, nav.status.resolved.val },
+ else => .{ false, .none, nav.status.resolved.val },
+ };
const gpa = wasm_file.base.comp.gpa;
- const atom_index = try zig_object.getOrCreateAtomForDecl(wasm_file, pt, decl_index);
+ const atom_index = try zig_object.getOrCreateAtomForNav(wasm_file, pt, nav_index);
const atom = wasm_file.getAtomPtr(atom_index);
atom.clear();
- if (decl.isExtern(mod)) {
- const variable = decl.getOwnedVariable(mod).?;
- const name = decl.name.toSlice(&mod.intern_pool);
- const lib_name = variable.lib_name.toSlice(&mod.intern_pool);
- return zig_object.addOrUpdateImport(wasm_file, name, atom.sym_index, lib_name, null);
- }
- const val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
+ if (is_extern)
+ return zig_object.addOrUpdateImport(wasm_file, nav.name.toSlice(ip), atom.sym_index, lib_name.toSlice(ip), null);
var code_writer = std.ArrayList(u8).init(gpa);
defer code_writer.deinit();
@@ -271,8 +272,8 @@ pub fn updateDecl(
const res = try codegen.generateSymbol(
&wasm_file.base,
pt,
- decl.navSrcLoc(mod),
- val,
+ zcu.navSrcLoc(nav_index),
+ Value.fromInterned(nav_init),
&code_writer,
.none,
.{ .parent_atom_index = @intFromEnum(atom.sym_index) },
@@ -281,13 +282,12 @@ pub fn updateDecl(
const code = switch (res) {
.ok => code_writer.items,
.fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try zcu.failed_codegen.put(zcu.gpa, nav_index, em);
return;
},
};
- return zig_object.finishUpdateDecl(wasm_file, pt, decl_index, code);
+ return zig_object.finishUpdateNav(wasm_file, pt, nav_index, code);
}
pub fn updateFunc(
@@ -298,11 +298,10 @@ pub fn updateFunc(
air: Air,
liveness: Liveness,
) !void {
- const gpa = wasm_file.base.comp.gpa;
+ const zcu = pt.zcu;
+ const gpa = zcu.gpa;
const func = pt.zcu.funcInfo(func_index);
- const decl_index = func.owner_decl;
- const decl = pt.zcu.declPtr(decl_index);
- const atom_index = try zig_object.getOrCreateAtomForDecl(wasm_file, pt, decl_index);
+ const atom_index = try zig_object.getOrCreateAtomForNav(wasm_file, pt, func.owner_nav);
const atom = wasm_file.getAtomPtr(atom_index);
atom.clear();
@@ -311,7 +310,7 @@ pub fn updateFunc(
const result = try codegen.generateFunction(
&wasm_file.base,
pt,
- decl.navSrcLoc(pt.zcu),
+ zcu.navSrcLoc(func.owner_nav),
func_index,
air,
liveness,
@@ -322,79 +321,75 @@ pub fn updateFunc(
const code = switch (result) {
.ok => code_writer.items,
.fail => |em| {
- decl.analysis = .codegen_failure;
- try pt.zcu.failed_analysis.put(gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
+ try pt.zcu.failed_codegen.put(gpa, func.owner_nav, em);
return;
},
};
- return zig_object.finishUpdateDecl(wasm_file, pt, decl_index, code);
+ return zig_object.finishUpdateNav(wasm_file, pt, func.owner_nav, code);
}
-fn finishUpdateDecl(
+fn finishUpdateNav(
zig_object: *ZigObject,
wasm_file: *Wasm,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
code: []const u8,
) !void {
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const gpa = zcu.gpa;
- const decl = zcu.declPtr(decl_index);
- const decl_info = zig_object.decls_map.get(decl_index).?;
- const atom_index = decl_info.atom;
+ const nav = ip.getNav(nav_index);
+ const nav_val = zcu.navValue(nav_index);
+ const nav_info = zig_object.navs.get(nav_index).?;
+ const atom_index = nav_info.atom;
const atom = wasm_file.getAtomPtr(atom_index);
const sym = zig_object.symbol(atom.sym_index);
- sym.name = try zig_object.string_table.insert(gpa, decl.fqn.toSlice(ip));
+ sym.name = try zig_object.string_table.insert(gpa, nav.fqn.toSlice(ip));
try atom.code.appendSlice(gpa, code);
atom.size = @intCast(code.len);
- switch (decl.typeOf(zcu).zigTypeTag(zcu)) {
- .Fn => {
- sym.index = try zig_object.appendFunction(gpa, .{ .type_index = zig_object.atom_types.get(atom_index).? });
- sym.tag = .function;
- },
- else => {
- const segment_name: []const u8 = if (decl.getOwnedVariable(zcu)) |variable| name: {
- if (variable.is_const) {
- break :name ".rodata.";
- } else if (Value.fromInterned(variable.init).isUndefDeep(zcu)) {
- const decl_namespace = zcu.namespacePtr(decl.src_namespace);
- const optimize_mode = decl_namespace.fileScope(zcu).mod.optimize_mode;
- const is_initialized = switch (optimize_mode) {
- .Debug, .ReleaseSafe => true,
- .ReleaseFast, .ReleaseSmall => false,
- };
- if (is_initialized) {
- break :name ".data.";
- }
- break :name ".bss.";
- }
- // when the decl is all zeroes, we store the atom in the bss segment,
- // in all other cases it will be in the data segment.
- for (atom.code.items) |byte| {
- if (byte != 0) break :name ".data.";
- }
- break :name ".bss.";
- } else ".rodata.";
- if ((wasm_file.base.isObject() or wasm_file.base.comp.config.import_memory) and
- std.mem.startsWith(u8, segment_name, ".bss"))
- {
- @memset(atom.code.items, 0);
+ if (ip.isFunctionType(nav.typeOf(ip))) {
+ sym.index = try zig_object.appendFunction(gpa, .{ .type_index = zig_object.atom_types.get(atom_index).? });
+ sym.tag = .function;
+ } else {
+ const is_const, const nav_init = switch (ip.indexToKey(nav_val.toIntern())) {
+ .variable => |variable| .{ false, variable.init },
+ .@"extern" => |@"extern"| .{ @"extern".is_const, .none },
+ else => .{ true, nav_val.toIntern() },
+ };
+ const segment_name = name: {
+ if (is_const) break :name ".rodata.";
+
+ if (nav_init != .none and Value.fromInterned(nav_init).isUndefDeep(zcu)) {
+ break :name switch (zcu.navFileScope(nav_index).mod.optimize_mode) {
+ .Debug, .ReleaseSafe => ".data.",
+ .ReleaseFast, .ReleaseSmall => ".bss.",
+ };
}
- // Will be freed upon freeing of decl or after cleanup of Wasm binary.
- const full_segment_name = try std.mem.concat(gpa, u8, &.{
- segment_name,
- decl.fqn.toSlice(ip),
- });
- errdefer gpa.free(full_segment_name);
- sym.tag = .data;
- sym.index = try zig_object.createDataSegment(gpa, full_segment_name, decl.alignment);
- },
+ // when the decl is all zeroes, we store the atom in the bss segment,
+ // in all other cases it will be in the data segment.
+ for (atom.code.items) |byte| {
+ if (byte != 0) break :name ".data.";
+ }
+ break :name ".bss.";
+ };
+ if ((wasm_file.base.isObject() or wasm_file.base.comp.config.import_memory) and
+ std.mem.startsWith(u8, segment_name, ".bss"))
+ {
+ @memset(atom.code.items, 0);
+ }
+ // Will be freed upon freeing of decl or after cleanup of Wasm binary.
+ const full_segment_name = try std.mem.concat(gpa, u8, &.{
+ segment_name,
+ nav.fqn.toSlice(ip),
+ });
+ errdefer gpa.free(full_segment_name);
+ sym.tag = .data;
+ sym.index = try zig_object.createDataSegment(gpa, full_segment_name, pt.navAlignment(nav_index));
}
if (code.len == 0) return;
- atom.alignment = decl.getAlignment(pt);
+ atom.alignment = pt.navAlignment(nav_index);
}
/// Creates and initializes a new segment in the 'Data' section.
@@ -420,50 +415,51 @@ fn createDataSegment(
return segment_index;
}
-/// For a given `InternPool.DeclIndex` returns its corresponding `Atom.Index`.
+/// For a given `InternPool.Nav.Index` returns its corresponding `Atom.Index`.
/// When the index was not found, a new `Atom` will be created, and its index will be returned.
/// The newly created Atom is empty with default fields as specified by `Atom.empty`.
-pub fn getOrCreateAtomForDecl(
+pub fn getOrCreateAtomForNav(
zig_object: *ZigObject,
wasm_file: *Wasm,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
) !Atom.Index {
+ const ip = &pt.zcu.intern_pool;
const gpa = pt.zcu.gpa;
- const gop = try zig_object.decls_map.getOrPut(gpa, decl_index);
+ const gop = try zig_object.navs.getOrPut(gpa, nav_index);
if (!gop.found_existing) {
const sym_index = try zig_object.allocateSymbol(gpa);
gop.value_ptr.* = .{ .atom = try wasm_file.createAtom(sym_index, zig_object.index) };
- const decl = pt.zcu.declPtr(decl_index);
+ const nav = ip.getNav(nav_index);
const sym = zig_object.symbol(sym_index);
- sym.name = try zig_object.string_table.insert(gpa, decl.fqn.toSlice(&pt.zcu.intern_pool));
+ sym.name = try zig_object.string_table.insert(gpa, nav.fqn.toSlice(ip));
}
return gop.value_ptr.atom;
}
-pub fn lowerAnonDecl(
+pub fn lowerUav(
zig_object: *ZigObject,
wasm_file: *Wasm,
pt: Zcu.PerThread,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
explicit_alignment: InternPool.Alignment,
src_loc: Zcu.LazySrcLoc,
-) !codegen.Result {
+) !codegen.GenResult {
const gpa = wasm_file.base.comp.gpa;
- const gop = try zig_object.anon_decls.getOrPut(gpa, decl_val);
+ const gop = try zig_object.uavs.getOrPut(gpa, uav);
if (!gop.found_existing) {
var name_buf: [32]u8 = undefined;
const name = std.fmt.bufPrint(&name_buf, "__anon_{d}", .{
- @intFromEnum(decl_val),
+ @intFromEnum(uav),
}) catch unreachable;
- switch (try zig_object.lowerConst(wasm_file, pt, name, Value.fromInterned(decl_val), src_loc)) {
- .ok => |atom_index| zig_object.anon_decls.values()[gop.index] = atom_index,
+ switch (try zig_object.lowerConst(wasm_file, pt, name, Value.fromInterned(uav), src_loc)) {
+ .ok => |atom_index| zig_object.uavs.values()[gop.index] = atom_index,
.fail => |em| return .{ .fail = em },
}
}
- const atom = wasm_file.getAtomPtr(zig_object.anon_decls.values()[gop.index]);
+ const atom = wasm_file.getAtomPtr(zig_object.uavs.values()[gop.index]);
atom.alignment = switch (atom.alignment) {
.none => explicit_alignment,
else => switch (explicit_alignment) {
@@ -471,53 +467,7 @@ pub fn lowerAnonDecl(
else => atom.alignment.maxStrict(explicit_alignment),
},
};
- return .ok;
-}
-
-/// Lowers a constant typed value to a local symbol and atom.
-/// Returns the symbol index of the local
-/// The given `decl` is the parent decl whom owns the constant.
-pub fn lowerUnnamedConst(
- zig_object: *ZigObject,
- wasm_file: *Wasm,
- pt: Zcu.PerThread,
- val: Value,
- decl_index: InternPool.DeclIndex,
-) !u32 {
- const mod = pt.zcu;
- const gpa = mod.gpa;
- std.debug.assert(val.typeOf(mod).zigTypeTag(mod) != .Fn); // cannot create local symbols for functions
- const decl = mod.declPtr(decl_index);
-
- const parent_atom_index = try zig_object.getOrCreateAtomForDecl(wasm_file, pt, decl_index);
- const parent_atom = wasm_file.getAtom(parent_atom_index);
- const local_index = parent_atom.locals.items.len;
- const name = try std.fmt.allocPrintZ(gpa, "__unnamed_{}_{d}", .{
- decl.fqn.fmt(&mod.intern_pool), local_index,
- });
- defer gpa.free(name);
-
- // We want to lower the source location of `decl`. However, when generating
- // lazy functions (for e.g. `@tagName`), `decl` may correspond to a type
- // rather than a `Nav`!
- // The future split of `Decl` into `Nav` and `Cau` may require rethinking this
- // logic. For now, just get the source location conditionally as needed.
- const decl_src = if (decl.typeOf(mod).toIntern() == .type_type)
- decl.val.toType().srcLoc(mod)
- else
- decl.navSrcLoc(mod);
-
- switch (try zig_object.lowerConst(wasm_file, pt, name, val, decl_src)) {
- .ok => |atom_index| {
- try wasm_file.getAtomPtr(parent_atom_index).locals.append(gpa, atom_index);
- return @intFromEnum(wasm_file.getAtom(atom_index).sym_index);
- },
- .fail => |em| {
- decl.analysis = .codegen_failure;
- try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
- return error.CodegenFail;
- },
- }
+ return .{ .mcv = .{ .load_symbol = @intFromEnum(atom.sym_index) } };
}
const LowerConstResult = union(enum) {
@@ -782,36 +732,38 @@ pub fn getGlobalSymbol(zig_object: *ZigObject, gpa: std.mem.Allocator, name: []c
/// For a given decl, find the given symbol index's atom, and create a relocation for the type.
/// Returns the given pointer address
-pub fn getDeclVAddr(
+pub fn getNavVAddr(
zig_object: *ZigObject,
wasm_file: *Wasm,
pt: Zcu.PerThread,
- decl_index: InternPool.DeclIndex,
+ nav_index: InternPool.Nav.Index,
reloc_info: link.File.RelocInfo,
) !u64 {
- const target = wasm_file.base.comp.root_mod.resolved_target.result;
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const gpa = zcu.gpa;
- const decl = zcu.declPtr(decl_index);
+ const nav = ip.getNav(nav_index);
+ const target = &zcu.navFileScope(nav_index).mod.resolved_target.result;
- const target_atom_index = try zig_object.getOrCreateAtomForDecl(wasm_file, pt, decl_index);
+ const target_atom_index = try zig_object.getOrCreateAtomForNav(wasm_file, pt, nav_index);
const target_atom = wasm_file.getAtom(target_atom_index);
const target_symbol_index = @intFromEnum(target_atom.sym_index);
- if (decl.isExtern(zcu)) {
- const name = decl.name.toSlice(ip);
- const lib_name = if (decl.getOwnedExternFunc(zcu)) |ext_fn|
- ext_fn.lib_name.toSlice(ip)
- else
- decl.getOwnedVariable(zcu).?.lib_name.toSlice(ip);
- try zig_object.addOrUpdateImport(wasm_file, name, target_atom.sym_index, lib_name, null);
+ switch (ip.indexToKey(nav.status.resolved.val)) {
+ .@"extern" => |@"extern"| try zig_object.addOrUpdateImport(
+ wasm_file,
+ nav.name.toSlice(ip),
+ target_atom.sym_index,
+ @"extern".lib_name.toSlice(ip),
+ null,
+ ),
+ else => {},
}
std.debug.assert(reloc_info.parent_atom_index != 0);
const atom_index = wasm_file.symbol_atom.get(.{ .file = zig_object.index, .index = @enumFromInt(reloc_info.parent_atom_index) }).?;
const atom = wasm_file.getAtomPtr(atom_index);
const is_wasm32 = target.cpu.arch == .wasm32;
- if (decl.typeOf(pt.zcu).zigTypeTag(pt.zcu) == .Fn) {
+ if (ip.isFunctionType(ip.getNav(nav_index).typeOf(ip))) {
std.debug.assert(reloc_info.addend == 0); // addend not allowed for function relocations
try atom.relocs.append(gpa, .{
.index = target_symbol_index,
@@ -834,22 +786,22 @@ pub fn getDeclVAddr(
return target_symbol_index;
}
-pub fn getAnonDeclVAddr(
+pub fn getUavVAddr(
zig_object: *ZigObject,
wasm_file: *Wasm,
- decl_val: InternPool.Index,
+ uav: InternPool.Index,
reloc_info: link.File.RelocInfo,
) !u64 {
const gpa = wasm_file.base.comp.gpa;
const target = wasm_file.base.comp.root_mod.resolved_target.result;
- const atom_index = zig_object.anon_decls.get(decl_val).?;
+ const atom_index = zig_object.uavs.get(uav).?;
const target_symbol_index = @intFromEnum(wasm_file.getAtom(atom_index).sym_index);
const parent_atom_index = wasm_file.symbol_atom.get(.{ .file = zig_object.index, .index = @enumFromInt(reloc_info.parent_atom_index) }).?;
const parent_atom = wasm_file.getAtomPtr(parent_atom_index);
const is_wasm32 = target.cpu.arch == .wasm32;
const mod = wasm_file.base.comp.module.?;
- const ty = Type.fromInterned(mod.intern_pool.typeOf(decl_val));
+ const ty = Type.fromInterned(mod.intern_pool.typeOf(uav));
if (ty.zigTypeTag(mod) == .Fn) {
std.debug.assert(reloc_info.addend == 0); // addend not allowed for function relocations
try parent_atom.relocs.append(gpa, .{
@@ -880,14 +832,14 @@ pub fn deleteExport(
name: InternPool.NullTerminatedString,
) void {
const mod = wasm_file.base.comp.module.?;
- const decl_index = switch (exported) {
- .decl_index => |decl_index| decl_index,
- .value => @panic("TODO: implement Wasm linker code for exporting a constant value"),
+ const nav_index = switch (exported) {
+ .nav => |nav_index| nav_index,
+ .uav => @panic("TODO: implement Wasm linker code for exporting a constant value"),
};
- const decl_info = zig_object.decls_map.getPtr(decl_index) orelse return;
- if (decl_info.@"export"(zig_object, name.toSlice(&mod.intern_pool))) |sym_index| {
+ const nav_info = zig_object.navs.getPtr(nav_index) orelse return;
+ if (nav_info.@"export"(zig_object, name.toSlice(&mod.intern_pool))) |sym_index| {
const sym = zig_object.symbol(sym_index);
- decl_info.deleteExport(sym_index);
+ nav_info.deleteExport(sym_index);
std.debug.assert(zig_object.global_syms.remove(sym.name));
std.debug.assert(wasm_file.symbol_atom.remove(.{ .file = zig_object.index, .index = sym_index }));
zig_object.symbols_free_list.append(wasm_file.base.comp.gpa, sym_index) catch {};
@@ -902,38 +854,39 @@ pub fn updateExports(
exported: Zcu.Exported,
export_indices: []const u32,
) !void {
- const mod = pt.zcu;
- const decl_index = switch (exported) {
- .decl_index => |i| i,
- .value => |val| {
- _ = val;
+ const zcu = pt.zcu;
+ const ip = &zcu.intern_pool;
+ const nav_index = switch (exported) {
+ .nav => |nav| nav,
+ .uav => |uav| {
+ _ = uav;
@panic("TODO: implement Wasm linker code for exporting a constant value");
},
};
- const decl = mod.declPtr(decl_index);
- const atom_index = try zig_object.getOrCreateAtomForDecl(wasm_file, pt, decl_index);
- const decl_info = zig_object.decls_map.getPtr(decl_index).?;
+ const nav = ip.getNav(nav_index);
+ const atom_index = try zig_object.getOrCreateAtomForNav(wasm_file, pt, nav_index);
+ const nav_info = zig_object.navs.getPtr(nav_index).?;
const atom = wasm_file.getAtom(atom_index);
const atom_sym = atom.symbolLoc().getSymbol(wasm_file).*;
- const gpa = mod.gpa;
- log.debug("Updating exports for decl '{}'", .{decl.name.fmt(&mod.intern_pool)});
+ const gpa = zcu.gpa;
+ log.debug("Updating exports for decl '{}'", .{nav.name.fmt(ip)});
for (export_indices) |export_idx| {
- const exp = mod.all_exports.items[export_idx];
- if (exp.opts.section.toSlice(&mod.intern_pool)) |section| {
- try mod.failed_exports.putNoClobber(gpa, export_idx, try Zcu.ErrorMsg.create(
+ const exp = zcu.all_exports.items[export_idx];
+ if (exp.opts.section.toSlice(ip)) |section| {
+ try zcu.failed_exports.putNoClobber(gpa, export_idx, try Zcu.ErrorMsg.create(
gpa,
- decl.navSrcLoc(mod),
+ zcu.navSrcLoc(nav_index),
"Unimplemented: ExportOptions.section '{s}'",
.{section},
));
continue;
}
- const export_string = exp.opts.name.toSlice(&mod.intern_pool);
- const sym_index = if (decl_info.@"export"(zig_object, export_string)) |idx| idx else index: {
+ const export_string = exp.opts.name.toSlice(ip);
+ const sym_index = if (nav_info.@"export"(zig_object, export_string)) |idx| idx else index: {
const sym_index = try zig_object.allocateSymbol(gpa);
- try decl_info.appendExport(gpa, sym_index);
+ try nav_info.appendExport(gpa, sym_index);
break :index sym_index;
};
@@ -954,9 +907,9 @@ pub fn updateExports(
},
.strong => {}, // symbols are strong by default
.link_once => {
- try mod.failed_exports.putNoClobber(gpa, export_idx, try Zcu.ErrorMsg.create(
+ try zcu.failed_exports.putNoClobber(gpa, export_idx, try Zcu.ErrorMsg.create(
gpa,
- decl.navSrcLoc(mod),
+ zcu.navSrcLoc(nav_index),
"Unimplemented: LinkOnce",
.{},
));
@@ -972,21 +925,21 @@ pub fn updateExports(
}
}
-pub fn freeDecl(zig_object: *ZigObject, wasm_file: *Wasm, decl_index: InternPool.DeclIndex) void {
+pub fn freeNav(zig_object: *ZigObject, wasm_file: *Wasm, nav_index: InternPool.Nav.Index) void {
const gpa = wasm_file.base.comp.gpa;
const mod = wasm_file.base.comp.module.?;
- const decl = mod.declPtr(decl_index);
- const decl_info = zig_object.decls_map.getPtr(decl_index).?;
- const atom_index = decl_info.atom;
+ const ip = &mod.intern_pool;
+ const nav_info = zig_object.navs.getPtr(nav_index).?;
+ const atom_index = nav_info.atom;
const atom = wasm_file.getAtomPtr(atom_index);
zig_object.symbols_free_list.append(gpa, atom.sym_index) catch {};
- for (decl_info.exports.items) |exp_sym_index| {
+ for (nav_info.exports.items) |exp_sym_index| {
const exp_sym = zig_object.symbol(exp_sym_index);
exp_sym.tag = .dead;
zig_object.symbols_free_list.append(exp_sym_index) catch {};
}
- decl_info.exports.deinit(gpa);
- std.debug.assert(zig_object.decls_map.remove(decl_index));
+ nav_info.exports.deinit(gpa);
+ std.debug.assert(zig_object.navs.remove(nav_index));
const sym = &zig_object.symbols.items[atom.sym_index];
for (atom.locals.items) |local_atom_index| {
const local_atom = wasm_file.getAtom(local_atom_index);
@@ -1000,7 +953,8 @@ pub fn freeDecl(zig_object: *ZigObject, wasm_file: *Wasm, decl_index: InternPool
segment.name = &.{}; // Ensure no accidental double free
}
- if (decl.isExtern(mod)) {
+ const nav_val = mod.navValue(nav_index).toIntern();
+ if (ip.indexToKey(nav_val) == .@"extern") {
std.debug.assert(zig_object.imports.remove(atom.sym_index));
}
std.debug.assert(wasm_file.symbol_atom.remove(atom.symbolLoc()));
@@ -1014,17 +968,14 @@ pub fn freeDecl(zig_object: *ZigObject, wasm_file: *Wasm, decl_index: InternPool
if (sym.isGlobal()) {
std.debug.assert(zig_object.global_syms.remove(atom.sym_index));
}
- switch (decl.typeOf(mod).zigTypeTag(mod)) {
- .Fn => {
- zig_object.functions_free_list.append(gpa, sym.index) catch {};
- std.debug.assert(zig_object.atom_types.remove(atom_index));
- },
- else => {
- zig_object.segment_free_list.append(gpa, sym.index) catch {};
- const segment = &zig_object.segment_info.items[sym.index];
- gpa.free(segment.name);
- segment.name = &.{}; // Prevent accidental double free
- },
+ if (ip.isFunctionType(ip.typeOf(nav_val))) {
+ zig_object.functions_free_list.append(gpa, sym.index) catch {};
+ std.debug.assert(zig_object.atom_types.remove(atom_index));
+ } else {
+ zig_object.segment_free_list.append(gpa, sym.index) catch {};
+ const segment = &zig_object.segment_info.items[sym.index];
+ gpa.free(segment.name);
+ segment.name = &.{}; // Prevent accidental double free
}
}
@@ -1182,10 +1133,10 @@ fn allocateDebugAtoms(zig_object: *ZigObject) !void {
/// For the given `decl_index`, stores the corresponding type representing the function signature.
/// Asserts declaration has an associated `Atom`.
/// Returns the index into the list of types.
-pub fn storeDeclType(zig_object: *ZigObject, gpa: std.mem.Allocator, decl_index: InternPool.DeclIndex, func_type: std.wasm.Type) !u32 {
- const decl_info = zig_object.decls_map.get(decl_index).?;
+pub fn storeDeclType(zig_object: *ZigObject, gpa: std.mem.Allocator, nav_index: InternPool.Nav.Index, func_type: std.wasm.Type) !u32 {
+ const nav_info = zig_object.navs.get(nav_index).?;
const index = try zig_object.putOrGetFuncType(gpa, func_type);
- try zig_object.atom_types.put(gpa, decl_info.atom, index);
+ try zig_object.atom_types.put(gpa, nav_info.atom, index);
return index;
}