From 2dcfa723767d284cef5eb180be7c080583ddbe25 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 16 Oct 2024 18:48:43 -0700 Subject: link.Elf: untangle parseObject and parseArchive from link.Elf, so that they can be used earlier in the pipeline --- src/link/Elf.zig | 113 ++++++--------- src/link/Elf/Archive.zig | 56 +++++--- src/link/Elf/Atom.zig | 51 +++---- src/link/Elf/Object.zig | 323 ++++++++++++++++++++++++++----------------- src/link/Elf/eh_frame.zig | 37 +++-- src/link/Elf/gc.zig | 49 ++++--- src/link/Elf/relocatable.zig | 23 +-- 7 files changed, 363 insertions(+), 289 deletions(-) (limited to 'src') diff --git a/src/link/Elf.zig b/src/link/Elf.zig index 74d7a6e643..293573b112 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -1398,8 +1398,15 @@ fn parseObject(self: *Elf, obj: link.Input.Object) ParseError!void { defer tracy.end(); const gpa = self.base.comp.gpa; + const diags = &self.base.comp.link_diags; + const first_eflags = &self.first_eflags; + const target = self.base.comp.root_mod.resolved_target.result; + const debug_fmt_strip = self.base.comp.config.debug_format == .strip; + const default_sym_version = self.default_sym_version; + const file_handles = &self.file_handles; + const handle = obj.file; - const fh = try self.addFileHandle(handle); + const fh = try addFileHandle(gpa, file_handles, handle); const index: File.Index = @intCast(try self.files.addOne(gpa)); self.files.set(index, .{ .object = .{ @@ -1413,7 +1420,7 @@ fn parseObject(self: *Elf, obj: link.Input.Object) ParseError!void { try self.objects.append(gpa, index); const object = self.file(index).?.object; - try object.parse(self); + try object.parse(gpa, diags, obj.path, handle, first_eflags, target, debug_fmt_strip, default_sym_version); } pub fn openParseArchiveReportingFailure(self: *Elf, path: Path) void { @@ -1427,36 +1434,49 @@ pub fn openParseArchiveReportingFailure(self: *Elf, path: Path) void { } pub fn parseArchiveReportingFailure(self: *Elf, obj: link.Input.Object) void { + const gpa = self.base.comp.gpa; const diags = &self.base.comp.link_diags; - self.parseArchive(obj) catch |err| switch (err) { + const first_eflags = &self.first_eflags; + const target = self.base.comp.root_mod.resolved_target.result; + const debug_fmt_strip = self.base.comp.config.debug_format == .strip; + const default_sym_version = self.default_sym_version; + const file_handles = &self.file_handles; + const files = &self.files; + const objects = &self.objects; + + parseArchive(gpa, diags, file_handles, files, first_eflags, target, debug_fmt_strip, default_sym_version, objects, obj) catch |err| switch (err) { error.LinkFailure => return, // already reported else => |e| diags.addParseError(obj.path, "failed to parse archive: {s}", .{@errorName(e)}), }; } -fn parseArchive(self: *Elf, obj: link.Input.Object) ParseError!void { +fn parseArchive( + gpa: Allocator, + diags: *Diags, + file_handles: *std.ArrayListUnmanaged(File.Handle), + files: *std.MultiArrayList(File.Entry), + first_eflags: *?elf.Word, + target: std.Target, + debug_fmt_strip: bool, + default_sym_version: elf.Versym, + objects: *std.ArrayListUnmanaged(File.Index), + obj: link.Input.Object, +) ParseError!void { const tracy = trace(@src()); defer tracy.end(); - const gpa = self.base.comp.gpa; - const handle = obj.file; - const fh = try self.addFileHandle(handle); - - var archive: Archive = .{}; + const fh = try addFileHandle(gpa, file_handles, obj.file); + var archive = try Archive.parse(gpa, diags, file_handles, obj.path, fh); defer archive.deinit(gpa); - try archive.parse(self, obj.path, fh); - - const objects = try archive.objects.toOwnedSlice(gpa); - defer gpa.free(objects); - for (objects) |extracted| { - const index: File.Index = @intCast(try self.files.addOne(gpa)); - self.files.set(index, .{ .object = extracted }); - const object = &self.files.items(.data)[index].object; + for (archive.objects) |extracted| { + const index: File.Index = @intCast(try files.addOne(gpa)); + files.set(index, .{ .object = extracted }); + const object = &files.items(.data)[index].object; object.index = index; object.alive = obj.must_link; - try object.parse(self); - try self.objects.append(gpa, index); + try object.parse(gpa, diags, obj.path, obj.file, first_eflags, target, debug_fmt_strip, default_sym_version); + try objects.append(gpa, index); } } @@ -1565,46 +1585,6 @@ fn parseDso( } } -pub fn validateEFlags(self: *Elf, file_index: File.Index, e_flags: elf.Word) !void { - if (self.first_eflags == null) { - self.first_eflags = e_flags; - return; // there isn't anything to conflict with yet - } - const self_eflags: *elf.Word = &self.first_eflags.?; - - switch (self.getTarget().cpu.arch) { - .riscv64 => { - if (e_flags != self_eflags.*) { - const riscv_eflags: riscv.RiscvEflags = @bitCast(e_flags); - const self_riscv_eflags: *riscv.RiscvEflags = @ptrCast(self_eflags); - - self_riscv_eflags.rvc = self_riscv_eflags.rvc or riscv_eflags.rvc; - self_riscv_eflags.tso = self_riscv_eflags.tso or riscv_eflags.tso; - - var any_errors: bool = false; - if (self_riscv_eflags.fabi != riscv_eflags.fabi) { - any_errors = true; - try self.addFileError( - file_index, - "cannot link object files with different float-point ABIs", - .{}, - ); - } - if (self_riscv_eflags.rve != riscv_eflags.rve) { - any_errors = true; - try self.addFileError( - file_index, - "cannot link object files with different RVEs", - .{}, - ); - } - if (any_errors) return error.LinkFailure; - } - }, - else => {}, - } -} - /// When resolving symbols, we approach the problem similarly to `mold`. /// 1. Resolve symbols across all objects (including those preemptively extracted archives). /// 2. Resolve symbols across all shared objects. @@ -4704,16 +4684,16 @@ fn fileLookup(files: std.MultiArrayList(File.Entry), index: File.Index) ?File { }; } -pub fn addFileHandle(self: *Elf, handle: fs.File) !File.HandleIndex { - const gpa = self.base.comp.gpa; - const index: File.HandleIndex = @intCast(self.file_handles.items.len); - const fh = try self.file_handles.addOne(gpa); - fh.* = handle; - return index; +pub fn addFileHandle( + gpa: Allocator, + file_handles: *std.ArrayListUnmanaged(File.Handle), + handle: fs.File, +) Allocator.Error!File.HandleIndex { + try file_handles.append(gpa, handle); + return @intCast(file_handles.items.len - 1); } pub fn fileHandle(self: Elf, index: File.HandleIndex) File.Handle { - assert(index < self.file_handles.items.len); return self.file_handles.items[index]; } @@ -5588,4 +5568,3 @@ const Thunk = @import("Elf/Thunk.zig"); const Value = @import("../Value.zig"); const VerneedSection = synthetic_sections.VerneedSection; const ZigObject = @import("Elf/ZigObject.zig"); -const riscv = @import("riscv.zig"); diff --git a/src/link/Elf/Archive.zig b/src/link/Elf/Archive.zig index 50fffb0c19..5b9edc0c77 100644 --- a/src/link/Elf/Archive.zig +++ b/src/link/Elf/Archive.zig @@ -1,18 +1,29 @@ -objects: std.ArrayListUnmanaged(Object) = .empty, -strtab: std.ArrayListUnmanaged(u8) = .empty, - -pub fn deinit(self: *Archive, allocator: Allocator) void { - self.objects.deinit(allocator); - self.strtab.deinit(allocator); +objects: []const Object, +/// '\n'-delimited +strtab: []const u8, + +pub fn deinit(a: *Archive, gpa: Allocator) void { + gpa.free(a.objects); + gpa.free(a.strtab); + a.* = undefined; } -pub fn parse(self: *Archive, elf_file: *Elf, path: Path, handle_index: File.HandleIndex) !void { - const comp = elf_file.base.comp; - const gpa = comp.gpa; - const diags = &comp.link_diags; - const handle = elf_file.fileHandle(handle_index); +pub fn parse( + gpa: Allocator, + diags: *Diags, + file_handles: *const std.ArrayListUnmanaged(File.Handle), + path: Path, + handle_index: File.HandleIndex, +) !Archive { + const handle = file_handles.items[handle_index]; const size = (try handle.stat()).size; + var objects: std.ArrayListUnmanaged(Object) = .empty; + defer objects.deinit(gpa); + + var strtab: std.ArrayListUnmanaged(u8) = .empty; + defer strtab.deinit(gpa); + var pos: usize = elf.ARMAG.len; while (true) { if (pos >= size) break; @@ -37,8 +48,8 @@ pub fn parse(self: *Archive, elf_file: *Elf, path: Path, handle_index: File.Hand if (hdr.isSymtab() or hdr.isSymtab64()) continue; if (hdr.isStrtab()) { - try self.strtab.resize(gpa, obj_size); - const amt = try handle.preadAll(self.strtab.items, pos); + try strtab.resize(gpa, obj_size); + const amt = try handle.preadAll(strtab.items, pos); if (amt != obj_size) return error.InputOutput; continue; } @@ -47,7 +58,7 @@ pub fn parse(self: *Archive, elf_file: *Elf, path: Path, handle_index: File.Hand const name = if (hdr.name()) |name| name else if (try hdr.nameOffset()) |off| - self.getString(off) + stringTableLookup(strtab.items, off) else unreachable; @@ -70,14 +81,18 @@ pub fn parse(self: *Archive, elf_file: *Elf, path: Path, handle_index: File.Hand @as(Path, object.path), @as(Path, path), }); - try self.objects.append(gpa, object); + try objects.append(gpa, object); } + + return .{ + .objects = try objects.toOwnedSlice(gpa), + .strtab = try strtab.toOwnedSlice(gpa), + }; } -fn getString(self: Archive, off: u32) []const u8 { - assert(off < self.strtab.items.len); - const name = mem.sliceTo(@as([*:'\n']const u8, @ptrCast(self.strtab.items.ptr + off)), 0); - return name[0 .. name.len - 1]; +pub fn stringTableLookup(strtab: []const u8, off: u32) [:'\n']const u8 { + const slice = strtab[off..]; + return slice[0..mem.indexOfScalar(u8, slice, '\n').? :'\n']; } pub fn setArHdr(opts: struct { @@ -290,8 +305,9 @@ const fs = std.fs; const log = std.log.scoped(.link); const mem = std.mem; const Path = std.Build.Cache.Path; +const Allocator = std.mem.Allocator; -const Allocator = mem.Allocator; +const Diags = @import("../../link.zig").Diags; const Archive = @This(); const Elf = @import("../Elf.zig"); const File = @import("file.zig").File; diff --git a/src/link/Elf/Atom.zig b/src/link/Elf/Atom.zig index 4d8c23d2ff..6775c8c7ff 100644 --- a/src/link/Elf/Atom.zig +++ b/src/link/Elf/Atom.zig @@ -102,9 +102,13 @@ pub fn relocsShndx(self: Atom) ?u32 { return self.relocs_section_index; } -pub fn priority(self: Atom, elf_file: *Elf) u64 { - const index = self.file(elf_file).?.index(); - return (@as(u64, @intCast(index)) << 32) | @as(u64, @intCast(self.input_section_index)); +pub fn priority(atom: Atom, elf_file: *Elf) u64 { + const index = atom.file(elf_file).?.index(); + return priorityLookup(index, atom.input_section_index); +} + +pub fn priorityLookup(file_index: File.Index, input_section_index: u32) u64 { + return (@as(u64, @intCast(file_index)) << 32) | @as(u64, @intCast(input_section_index)); } /// Returns how much room there is to grow in virtual address space. @@ -255,19 +259,13 @@ pub fn writeRelocs(self: Atom, elf_file: *Elf, out_relocs: *std.ArrayList(elf.El } } -pub fn fdes(self: Atom, elf_file: *Elf) []Fde { - const extras = self.extra(elf_file); - return switch (self.file(elf_file).?) { - .shared_object => unreachable, - .linker_defined, .zig_object => &[0]Fde{}, - .object => |x| x.fdes.items[extras.fde_start..][0..extras.fde_count], - }; +pub fn fdes(atom: Atom, object: *Object) []Fde { + const extras = object.atomExtra(atom.extra_index); + return object.fdes.items[extras.fde_start..][0..extras.fde_count]; } -pub fn markFdesDead(self: Atom, elf_file: *Elf) void { - for (self.fdes(elf_file)) |*fde| { - fde.alive = false; - } +pub fn markFdesDead(self: Atom, object: *Object) void { + for (self.fdes(object)) |*fde| fde.alive = false; } pub fn addReloc(self: Atom, alloc: Allocator, reloc: elf.Elf64_Rela, zo: *ZigObject) !void { @@ -946,16 +944,21 @@ fn format2( atom.output_section_index, atom.alignment.toByteUnits() orelse 0, atom.size, atom.prev_atom_ref, atom.next_atom_ref, }); - if (atom.fdes(elf_file).len > 0) { - try writer.writeAll(" : fdes{ "); - const extras = atom.extra(elf_file); - for (atom.fdes(elf_file), extras.fde_start..) |fde, i| { - try writer.print("{d}", .{i}); - if (!fde.alive) try writer.writeAll("([*])"); - if (i - extras.fde_start < extras.fde_count - 1) try writer.writeAll(", "); - } - try writer.writeAll(" }"); - } + if (atom.file(elf_file)) |atom_file| switch (atom_file) { + .object => |object| { + if (atom.fdes(object).len > 0) { + try writer.writeAll(" : fdes{ "); + const extras = atom.extra(elf_file); + for (atom.fdes(object), extras.fde_start..) |fde, i| { + try writer.print("{d}", .{i}); + if (!fde.alive) try writer.writeAll("([*])"); + if (i - extras.fde_start < extras.fde_count - 1) try writer.writeAll(", "); + } + try writer.writeAll(" }"); + } + }, + else => {}, + }; if (!atom.alive) { try writer.writeAll(" : [*]"); } diff --git a/src/link/Elf/Object.zig b/src/link/Elf/Object.zig index 5aca15a205..d8ba812019 100644 --- a/src/link/Elf/Object.zig +++ b/src/link/Elf/Object.zig @@ -37,72 +37,87 @@ num_dynrelocs: u32 = 0, output_symtab_ctx: Elf.SymtabCtx = .{}, output_ar_state: Archive.ArState = .{}, -pub fn deinit(self: *Object, allocator: Allocator) void { - if (self.archive) |*ar| allocator.free(ar.path.sub_path); - allocator.free(self.path.sub_path); - self.shdrs.deinit(allocator); - self.symtab.deinit(allocator); - self.strtab.deinit(allocator); - self.symbols.deinit(allocator); - self.symbols_extra.deinit(allocator); - self.symbols_resolver.deinit(allocator); - self.atoms.deinit(allocator); - self.atoms_indexes.deinit(allocator); - self.atoms_extra.deinit(allocator); - self.comdat_groups.deinit(allocator); - self.comdat_group_data.deinit(allocator); - self.relocs.deinit(allocator); - self.fdes.deinit(allocator); - self.cies.deinit(allocator); - self.eh_frame_data.deinit(allocator); +pub fn deinit(self: *Object, gpa: Allocator) void { + if (self.archive) |*ar| gpa.free(ar.path.sub_path); + gpa.free(self.path.sub_path); + self.shdrs.deinit(gpa); + self.symtab.deinit(gpa); + self.strtab.deinit(gpa); + self.symbols.deinit(gpa); + self.symbols_extra.deinit(gpa); + self.symbols_resolver.deinit(gpa); + self.atoms.deinit(gpa); + self.atoms_indexes.deinit(gpa); + self.atoms_extra.deinit(gpa); + self.comdat_groups.deinit(gpa); + self.comdat_group_data.deinit(gpa); + self.relocs.deinit(gpa); + self.fdes.deinit(gpa); + self.cies.deinit(gpa); + self.eh_frame_data.deinit(gpa); for (self.input_merge_sections.items) |*isec| { - isec.deinit(allocator); + isec.deinit(gpa); } - self.input_merge_sections.deinit(allocator); - self.input_merge_sections_indexes.deinit(allocator); + self.input_merge_sections.deinit(gpa); + self.input_merge_sections_indexes.deinit(gpa); } -pub fn parse(self: *Object, elf_file: *Elf) !void { - const gpa = elf_file.base.comp.gpa; - const cpu_arch = elf_file.getTarget().cpu.arch; - const handle = elf_file.fileHandle(self.file_handle); - - try self.parseCommon(gpa, handle, elf_file); +pub fn parse( + self: *Object, + gpa: Allocator, + diags: *Diags, + /// For error reporting purposes only. + path: Path, + handle: fs.File, + first_eflags: *?elf.Word, + target: std.Target, + debug_fmt_strip: bool, + default_sym_version: elf.Versym, +) !void { + try self.parseCommon(gpa, diags, path, handle, first_eflags, target); // Append null input merge section try self.input_merge_sections.append(gpa, .{}); // Allocate atom index 0 to null atom try self.atoms.append(gpa, .{ .extra_index = try self.addAtomExtra(gpa, .{}) }); - try self.initAtoms(gpa, handle, elf_file); - try self.initSymbols(gpa, elf_file); + try self.initAtoms(gpa, diags, path, handle, debug_fmt_strip, target); + try self.initSymbols(gpa, default_sym_version); for (self.shdrs.items, 0..) |shdr, i| { const atom_ptr = self.atom(self.atoms_indexes.items[i]) orelse continue; if (!atom_ptr.alive) continue; - if ((cpu_arch == .x86_64 and shdr.sh_type == elf.SHT_X86_64_UNWIND) or - mem.eql(u8, atom_ptr.name(elf_file), ".eh_frame")) + if ((target.cpu.arch == .x86_64 and shdr.sh_type == elf.SHT_X86_64_UNWIND) or + mem.eql(u8, self.getString(atom_ptr.name_offset), ".eh_frame")) { - try self.parseEhFrame(gpa, handle, @as(u32, @intCast(i)), elf_file); + try self.parseEhFrame(gpa, handle, @intCast(i), target); } } } -fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_file: *Elf) !void { +fn parseCommon( + self: *Object, + gpa: Allocator, + diags: *Diags, + path: Path, + handle: fs.File, + first_eflags: *?elf.Word, + target: std.Target, +) !void { const offset = if (self.archive) |ar| ar.offset else 0; const file_size = (try handle.stat()).size; - const header_buffer = try Elf.preadAllAlloc(allocator, handle, offset, @sizeOf(elf.Elf64_Ehdr)); - defer allocator.free(header_buffer); + const header_buffer = try Elf.preadAllAlloc(gpa, handle, offset, @sizeOf(elf.Elf64_Ehdr)); + defer gpa.free(header_buffer); self.header = @as(*align(1) const elf.Elf64_Ehdr, @ptrCast(header_buffer)).*; - const em = elf_file.base.comp.root_mod.resolved_target.result.toElfMachine(); + const em = target.toElfMachine(); if (em != self.header.?.e_machine) { - return elf_file.failFile(self.index, "invalid ELF machine type: {s}", .{ + return diags.failParse(path, "invalid ELF machine type: {s}", .{ @tagName(self.header.?.e_machine), }); } - try elf_file.validateEFlags(self.index, self.header.?.e_flags); + try validateEFlags(diags, path, target, self.header.?.e_flags, first_eflags); if (self.header.?.e_shnum == 0) return; @@ -110,30 +125,30 @@ fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_fil const shnum = math.cast(usize, self.header.?.e_shnum) orelse return error.Overflow; const shsize = shnum * @sizeOf(elf.Elf64_Shdr); if (file_size < offset + shoff or file_size < offset + shoff + shsize) { - return elf_file.failFile(self.index, "corrupt header: section header table extends past the end of file", .{}); + return diags.failParse(path, "corrupt header: section header table extends past the end of file", .{}); } - const shdrs_buffer = try Elf.preadAllAlloc(allocator, handle, offset + shoff, shsize); - defer allocator.free(shdrs_buffer); + const shdrs_buffer = try Elf.preadAllAlloc(gpa, handle, offset + shoff, shsize); + defer gpa.free(shdrs_buffer); const shdrs = @as([*]align(1) const elf.Elf64_Shdr, @ptrCast(shdrs_buffer.ptr))[0..shnum]; - try self.shdrs.appendUnalignedSlice(allocator, shdrs); + try self.shdrs.appendUnalignedSlice(gpa, shdrs); for (self.shdrs.items) |shdr| { if (shdr.sh_type != elf.SHT_NOBITS) { if (file_size < offset + shdr.sh_offset or file_size < offset + shdr.sh_offset + shdr.sh_size) { - return elf_file.failFile(self.index, "corrupt section: extends past the end of file", .{}); + return diags.failParse(path, "corrupt section: extends past the end of file", .{}); } } } - const shstrtab = try self.preadShdrContentsAlloc(allocator, handle, self.header.?.e_shstrndx); - defer allocator.free(shstrtab); + const shstrtab = try self.preadShdrContentsAlloc(gpa, handle, self.header.?.e_shstrndx); + defer gpa.free(shstrtab); for (self.shdrs.items) |shdr| { if (shdr.sh_name >= shstrtab.len) { - return elf_file.failFile(self.index, "corrupt section name offset", .{}); + return diags.failParse(path, "corrupt section name offset", .{}); } } - try self.strtab.appendSlice(allocator, shstrtab); + try self.strtab.appendSlice(gpa, shstrtab); const symtab_index = for (self.shdrs.items, 0..) |shdr, i| switch (shdr.sh_type) { elf.SHT_SYMTAB => break @as(u32, @intCast(i)), @@ -144,19 +159,19 @@ fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_fil const shdr = self.shdrs.items[index]; self.first_global = shdr.sh_info; - const raw_symtab = try self.preadShdrContentsAlloc(allocator, handle, index); - defer allocator.free(raw_symtab); + const raw_symtab = try self.preadShdrContentsAlloc(gpa, handle, index); + defer gpa.free(raw_symtab); const nsyms = math.divExact(usize, raw_symtab.len, @sizeOf(elf.Elf64_Sym)) catch { - return elf_file.failFile(self.index, "symbol table not evenly divisible", .{}); + return diags.failParse(path, "symbol table not evenly divisible", .{}); }; const symtab = @as([*]align(1) const elf.Elf64_Sym, @ptrCast(raw_symtab.ptr))[0..nsyms]; const strtab_bias = @as(u32, @intCast(self.strtab.items.len)); - const strtab = try self.preadShdrContentsAlloc(allocator, handle, shdr.sh_link); - defer allocator.free(strtab); - try self.strtab.appendSlice(allocator, strtab); + const strtab = try self.preadShdrContentsAlloc(gpa, handle, shdr.sh_link); + defer gpa.free(strtab); + try self.strtab.appendSlice(gpa, strtab); - try self.symtab.ensureUnusedCapacity(allocator, symtab.len); + try self.symtab.ensureUnusedCapacity(gpa, symtab.len); for (symtab) |sym| { const out_sym = self.symtab.addOneAssumeCapacity(); out_sym.* = sym; @@ -168,15 +183,56 @@ fn parseCommon(self: *Object, allocator: Allocator, handle: std.fs.File, elf_fil } } -fn initAtoms(self: *Object, allocator: Allocator, handle: std.fs.File, elf_file: *Elf) !void { - const comp = elf_file.base.comp; - const debug_fmt_strip = comp.config.debug_format == .strip; - const target = comp.root_mod.resolved_target.result; +fn validateEFlags( + diags: *Diags, + path: Path, + target: std.Target, + e_flags: elf.Word, + first_eflags: *?elf.Word, +) error{LinkFailure}!void { + if (first_eflags.*) |*self_eflags| { + switch (target.cpu.arch) { + .riscv64 => { + if (e_flags != self_eflags.*) { + const riscv_eflags: riscv.RiscvEflags = @bitCast(e_flags); + const self_riscv_eflags: *riscv.RiscvEflags = @ptrCast(self_eflags); + + self_riscv_eflags.rvc = self_riscv_eflags.rvc or riscv_eflags.rvc; + self_riscv_eflags.tso = self_riscv_eflags.tso or riscv_eflags.tso; + + var any_errors: bool = false; + if (self_riscv_eflags.fabi != riscv_eflags.fabi) { + any_errors = true; + diags.addParseError(path, "cannot link object files with different float-point ABIs", .{}); + } + if (self_riscv_eflags.rve != riscv_eflags.rve) { + any_errors = true; + diags.addParseError(path, "cannot link object files with different RVEs", .{}); + } + if (any_errors) return error.LinkFailure; + } + }, + else => {}, + } + } else { + first_eflags.* = e_flags; + } +} + +fn initAtoms( + self: *Object, + gpa: Allocator, + diags: *Diags, + path: Path, + handle: fs.File, + debug_fmt_strip: bool, + target: std.Target, +) !void { const shdrs = self.shdrs.items; - try self.atoms.ensureTotalCapacityPrecise(allocator, shdrs.len); - try self.atoms_extra.ensureTotalCapacityPrecise(allocator, shdrs.len * @sizeOf(Atom.Extra)); - try self.atoms_indexes.ensureTotalCapacityPrecise(allocator, shdrs.len); - try self.atoms_indexes.resize(allocator, shdrs.len); + try self.atoms.ensureTotalCapacityPrecise(gpa, shdrs.len); + try self.atoms_extra.ensureTotalCapacityPrecise(gpa, shdrs.len * @sizeOf(Atom.Extra)); + try self.atoms_indexes.ensureTotalCapacityPrecise(gpa, shdrs.len); + try self.atoms_indexes.resize(gpa, shdrs.len); @memset(self.atoms_indexes.items, 0); for (shdrs, 0..) |shdr, i| { @@ -201,24 +257,24 @@ fn initAtoms(self: *Object, allocator: Allocator, handle: std.fs.File, elf_file: }; const shndx: u32 = @intCast(i); - const group_raw_data = try self.preadShdrContentsAlloc(allocator, handle, shndx); - defer allocator.free(group_raw_data); + const group_raw_data = try self.preadShdrContentsAlloc(gpa, handle, shndx); + defer gpa.free(group_raw_data); const group_nmembers = math.divExact(usize, group_raw_data.len, @sizeOf(u32)) catch { - return elf_file.failFile(self.index, "corrupt section group: not evenly divisible ", .{}); + return diags.failParse(path, "corrupt section group: not evenly divisible ", .{}); }; if (group_nmembers == 0) { - return elf_file.failFile(self.index, "corrupt section group: empty section", .{}); + return diags.failParse(path, "corrupt section group: empty section", .{}); } const group_members = @as([*]align(1) const u32, @ptrCast(group_raw_data.ptr))[0..group_nmembers]; if (group_members[0] != elf.GRP_COMDAT) { - return elf_file.failFile(self.index, "corrupt section group: unknown SHT_GROUP format", .{}); + return diags.failParse(path, "corrupt section group: unknown SHT_GROUP format", .{}); } const group_start: u32 = @intCast(self.comdat_group_data.items.len); - try self.comdat_group_data.appendUnalignedSlice(allocator, group_members[1..]); + try self.comdat_group_data.appendUnalignedSlice(gpa, group_members[1..]); - const comdat_group_index = try self.addComdatGroup(allocator); + const comdat_group_index = try self.addComdatGroup(gpa); const comdat_group = self.comdatGroup(comdat_group_index); comdat_group.* = .{ .signature_off = group_signature, @@ -242,8 +298,8 @@ fn initAtoms(self: *Object, allocator: Allocator, handle: std.fs.File, elf_file: const shndx: u32 = @intCast(i); if (self.skipShdr(shndx, debug_fmt_strip)) continue; const size, const alignment = if (shdr.sh_flags & elf.SHF_COMPRESSED != 0) blk: { - const data = try self.preadShdrContentsAlloc(allocator, handle, shndx); - defer allocator.free(data); + const data = try self.preadShdrContentsAlloc(gpa, handle, shndx); + defer gpa.free(data); const chdr = @as(*align(1) const elf.Elf64_Chdr, @ptrCast(data.ptr)).*; break :blk .{ chdr.ch_size, Alignment.fromNonzeroByteUnits(chdr.ch_addralign) }; } else .{ shdr.sh_size, Alignment.fromNonzeroByteUnits(shdr.sh_addralign) }; @@ -263,13 +319,13 @@ fn initAtoms(self: *Object, allocator: Allocator, handle: std.fs.File, elf_file: elf.SHT_REL, elf.SHT_RELA => { const atom_index = self.atoms_indexes.items[shdr.sh_info]; if (self.atom(atom_index)) |atom_ptr| { - const relocs = try self.preadRelocsAlloc(allocator, handle, @intCast(i)); - defer allocator.free(relocs); + const relocs = try self.preadRelocsAlloc(gpa, handle, @intCast(i)); + defer gpa.free(relocs); atom_ptr.relocs_section_index = @intCast(i); const rel_index: u32 = @intCast(self.relocs.items.len); const rel_count: u32 = @intCast(relocs.len); self.setAtomFields(atom_ptr, .{ .rel_index = rel_index, .rel_count = rel_count }); - try self.relocs.appendUnalignedSlice(allocator, relocs); + try self.relocs.appendUnalignedSlice(gpa, relocs); if (target.cpu.arch == .riscv64) { sortRelocs(self.relocs.items[rel_index..][0..rel_count]); } @@ -293,14 +349,18 @@ fn skipShdr(self: *Object, index: u32, debug_fmt_strip: bool) bool { return ignore; } -fn initSymbols(self: *Object, allocator: Allocator, elf_file: *Elf) !void { +fn initSymbols( + self: *Object, + gpa: Allocator, + default_sym_version: elf.Versym, +) !void { const first_global = self.first_global orelse self.symtab.items.len; const nglobals = self.symtab.items.len - first_global; - try self.symbols.ensureTotalCapacityPrecise(allocator, self.symtab.items.len); - try self.symbols_extra.ensureTotalCapacityPrecise(allocator, self.symtab.items.len * @sizeOf(Symbol.Extra)); - try self.symbols_resolver.ensureTotalCapacityPrecise(allocator, nglobals); - self.symbols_resolver.resize(allocator, nglobals) catch unreachable; + try self.symbols.ensureTotalCapacityPrecise(gpa, self.symtab.items.len); + try self.symbols_extra.ensureTotalCapacityPrecise(gpa, self.symtab.items.len * @sizeOf(Symbol.Extra)); + try self.symbols_resolver.ensureTotalCapacityPrecise(gpa, nglobals); + self.symbols_resolver.resize(gpa, nglobals) catch unreachable; @memset(self.symbols_resolver.items, 0); for (self.symtab.items, 0..) |sym, i| { @@ -310,7 +370,7 @@ fn initSymbols(self: *Object, allocator: Allocator, elf_file: *Elf) !void { sym_ptr.name_offset = sym.st_name; sym_ptr.esym_index = @intCast(i); sym_ptr.extra_index = self.addSymbolExtraAssumeCapacity(.{}); - sym_ptr.version_index = if (i >= first_global) elf_file.default_sym_version else .LOCAL; + sym_ptr.version_index = if (i >= first_global) default_sym_version else .LOCAL; sym_ptr.flags.weak = sym.st_bind() == elf.STB_WEAK; if (sym.st_shndx != elf.SHN_ABS and sym.st_shndx != elf.SHN_COMMON) { sym_ptr.ref = .{ .index = self.atoms_indexes.items[sym.st_shndx], .file = self.index }; @@ -318,24 +378,30 @@ fn initSymbols(self: *Object, allocator: Allocator, elf_file: *Elf) !void { } } -fn parseEhFrame(self: *Object, allocator: Allocator, handle: std.fs.File, shndx: u32, elf_file: *Elf) !void { +fn parseEhFrame( + self: *Object, + gpa: Allocator, + handle: fs.File, + shndx: u32, + target: std.Target, +) !void { const relocs_shndx = for (self.shdrs.items, 0..) |shdr, i| switch (shdr.sh_type) { elf.SHT_RELA => if (shdr.sh_info == shndx) break @as(u32, @intCast(i)), else => {}, } else null; - const raw = try self.preadShdrContentsAlloc(allocator, handle, shndx); - defer allocator.free(raw); - const data_start = @as(u32, @intCast(self.eh_frame_data.items.len)); - try self.eh_frame_data.appendSlice(allocator, raw); + const raw = try self.preadShdrContentsAlloc(gpa, handle, shndx); + defer gpa.free(raw); + const data_start: u32 = @intCast(self.eh_frame_data.items.len); + try self.eh_frame_data.appendSlice(gpa, raw); const relocs = if (relocs_shndx) |index| - try self.preadRelocsAlloc(allocator, handle, index) + try self.preadRelocsAlloc(gpa, handle, index) else &[0]elf.Elf64_Rela{}; - defer allocator.free(relocs); - const rel_start = @as(u32, @intCast(self.relocs.items.len)); - try self.relocs.appendUnalignedSlice(allocator, relocs); - if (elf_file.getTarget().cpu.arch == .riscv64) { + defer gpa.free(relocs); + const rel_start: u32 = @intCast(self.relocs.items.len); + try self.relocs.appendUnalignedSlice(gpa, relocs); + if (target.cpu.arch == .riscv64) { sortRelocs(self.relocs.items[rel_start..][0..relocs.len]); } const fdes_start = self.fdes.items.len; @@ -345,11 +411,11 @@ fn parseEhFrame(self: *Object, allocator: Allocator, handle: std.fs.File, shndx: while (try it.next()) |rec| { const rel_range = filterRelocs(self.relocs.items[rel_start..][0..relocs.len], rec.offset, rec.size + 4); switch (rec.tag) { - .cie => try self.cies.append(allocator, .{ + .cie => try self.cies.append(gpa, .{ .offset = data_start + rec.offset, .size = rec.size, .rel_index = rel_start + @as(u32, @intCast(rel_range.start)), - .rel_num = @as(u32, @intCast(rel_range.len)), + .rel_num = @intCast(rel_range.len), .input_section_index = shndx, .file_index = self.index, }), @@ -361,12 +427,12 @@ fn parseEhFrame(self: *Object, allocator: Allocator, handle: std.fs.File, shndx: // this can happen for object files built with -r flag by the linker. continue; } - try self.fdes.append(allocator, .{ + try self.fdes.append(gpa, .{ .offset = data_start + rec.offset, .size = rec.size, .cie_index = undefined, .rel_index = rel_start + @as(u32, @intCast(rel_range.start)), - .rel_num = @as(u32, @intCast(rel_range.len)), + .rel_num = @intCast(rel_range.len), .input_section_index = shndx, .file_index = self.index, }); @@ -376,7 +442,7 @@ fn parseEhFrame(self: *Object, allocator: Allocator, handle: std.fs.File, shndx: // Tie each FDE to its CIE for (self.fdes.items[fdes_start..]) |*fde| { - const cie_ptr = fde.offset + 4 - fde.ciePointer(elf_file); + const cie_ptr = fde.offset + 4 - fde.ciePointer(self); const cie_index = for (self.cies.items[cies_start..], cies_start..) |cie, cie_index| { if (cie.offset == cie_ptr) break @as(u32, @intCast(cie_index)); } else { @@ -392,26 +458,26 @@ fn parseEhFrame(self: *Object, allocator: Allocator, handle: std.fs.File, shndx: // Tie each FDE record to its matching atom const SortFdes = struct { - pub fn lessThan(ctx: *Elf, lhs: Fde, rhs: Fde) bool { + pub fn lessThan(ctx: *Object, lhs: Fde, rhs: Fde) bool { const lhs_atom = lhs.atom(ctx); const rhs_atom = rhs.atom(ctx); - return lhs_atom.priority(ctx) < rhs_atom.priority(ctx); + return Atom.priorityLookup(ctx.index, lhs_atom.input_section_index) < Atom.priorityLookup(ctx.index, rhs_atom.input_section_index); } }; - mem.sort(Fde, self.fdes.items[fdes_start..], elf_file, SortFdes.lessThan); + mem.sort(Fde, self.fdes.items[fdes_start..], self, SortFdes.lessThan); // Create a back-link from atom to FDEs - var i: u32 = @as(u32, @intCast(fdes_start)); + var i: u32 = @intCast(fdes_start); while (i < self.fdes.items.len) { const fde = self.fdes.items[i]; - const atom_ptr = fde.atom(elf_file); + const atom_ptr = fde.atom(self); const start = i; i += 1; while (i < self.fdes.items.len) : (i += 1) { const next_fde = self.fdes.items[i]; - if (atom_ptr.atom_index != next_fde.atom(elf_file).atom_index) break; + if (atom_ptr.atom_index != next_fde.atom(self).atom_index) break; } - atom_ptr.addExtra(.{ .fde_start = start, .fde_count = i - start }, elf_file); + self.setAtomFields(atom_ptr, .{ .fde_start = start, .fde_count = i - start }); } } @@ -904,7 +970,7 @@ pub fn markComdatGroupsDead(self: *Object, elf_file: *Elf) void { const atom_index = self.atoms_indexes.items[shndx]; if (self.atom(atom_index)) |atom_ptr| { atom_ptr.alive = false; - atom_ptr.markFdesDead(elf_file); + atom_ptr.markFdesDead(self); } } } @@ -970,10 +1036,13 @@ pub fn addAtomsToRelaSections(self: *Object, elf_file: *Elf) !void { } } -pub fn parseAr(self: *Object, elf_file: *Elf) !void { +pub fn parseAr(self: *Object, path: Path, elf_file: *Elf) !void { const gpa = elf_file.base.comp.gpa; + const diags = &elf_file.base.comp.link_diags; const handle = elf_file.fileHandle(self.file_handle); - try self.parseCommon(gpa, handle, elf_file); + const first_eflags = &elf_file.first_eflags; + const target = elf_file.base.comp.root_mod.resolved_target.result; + try self.parseCommon(gpa, diags, path, handle, first_eflags, target); } pub fn updateArSymtab(self: Object, ar_symtab: *Archive.ArSymtab, elf_file: *Elf) !void { @@ -1000,7 +1069,7 @@ pub fn updateArSize(self: *Object, elf_file: *Elf) !void { pub fn writeAr(self: Object, elf_file: *Elf, writer: anytype) !void { const size = std.math.cast(usize, self.output_ar_state.size) orelse return error.Overflow; const offset: u64 = if (self.archive) |ar| ar.offset else 0; - const name = std.fs.path.basename(self.path.sub_path); + const name = fs.path.basename(self.path.sub_path); const hdr = Archive.setArHdr(.{ .name = if (name.len <= Archive.max_member_name_len) .{ .name = name } @@ -1136,8 +1205,8 @@ pub fn resolveSymbol(self: Object, index: Symbol.Index, elf_file: *Elf) Elf.Ref return elf_file.resolver.get(resolv).?; } -fn addSymbol(self: *Object, allocator: Allocator) !Symbol.Index { - try self.symbols.ensureUnusedCapacity(allocator, 1); +fn addSymbol(self: *Object, gpa: Allocator) !Symbol.Index { + try self.symbols.ensureUnusedCapacity(gpa, 1); return self.addSymbolAssumeCapacity(); } @@ -1147,9 +1216,9 @@ fn addSymbolAssumeCapacity(self: *Object) Symbol.Index { return index; } -pub fn addSymbolExtra(self: *Object, allocator: Allocator, extra: Symbol.Extra) !u32 { +pub fn addSymbolExtra(self: *Object, gpa: Allocator, extra: Symbol.Extra) !u32 { const fields = @typeInfo(Symbol.Extra).@"struct".fields; - try self.symbols_extra.ensureUnusedCapacity(allocator, fields.len); + try self.symbols_extra.ensureUnusedCapacity(gpa, fields.len); return self.addSymbolExtraAssumeCapacity(extra); } @@ -1198,27 +1267,27 @@ pub fn getString(self: Object, off: u32) [:0]const u8 { return mem.sliceTo(@as([*:0]const u8, @ptrCast(self.strtab.items.ptr + off)), 0); } -fn addString(self: *Object, allocator: Allocator, str: []const u8) !u32 { +fn addString(self: *Object, gpa: Allocator, str: []const u8) !u32 { const off: u32 = @intCast(self.strtab.items.len); - try self.strtab.ensureUnusedCapacity(allocator, str.len + 1); + try self.strtab.ensureUnusedCapacity(gpa, str.len + 1); self.strtab.appendSliceAssumeCapacity(str); self.strtab.appendAssumeCapacity(0); return off; } /// Caller owns the memory. -fn preadShdrContentsAlloc(self: Object, allocator: Allocator, handle: std.fs.File, index: u32) ![]u8 { +fn preadShdrContentsAlloc(self: Object, gpa: Allocator, handle: fs.File, index: u32) ![]u8 { assert(index < self.shdrs.items.len); const offset = if (self.archive) |ar| ar.offset else 0; const shdr = self.shdrs.items[index]; const sh_offset = math.cast(u64, shdr.sh_offset) orelse return error.Overflow; const sh_size = math.cast(u64, shdr.sh_size) orelse return error.Overflow; - return Elf.preadAllAlloc(allocator, handle, offset + sh_offset, sh_size); + return Elf.preadAllAlloc(gpa, handle, offset + sh_offset, sh_size); } /// Caller owns the memory. -fn preadRelocsAlloc(self: Object, allocator: Allocator, handle: std.fs.File, shndx: u32) ![]align(1) const elf.Elf64_Rela { - const raw = try self.preadShdrContentsAlloc(allocator, handle, shndx); +fn preadRelocsAlloc(self: Object, gpa: Allocator, handle: fs.File, shndx: u32) ![]align(1) const elf.Elf64_Rela { + const raw = try self.preadShdrContentsAlloc(gpa, handle, shndx); const num = @divExact(raw.len, @sizeOf(elf.Elf64_Rela)); return @as([*]align(1) const elf.Elf64_Rela, @ptrCast(raw.ptr))[0..num]; } @@ -1230,9 +1299,9 @@ const AddAtomArgs = struct { alignment: Alignment, }; -fn addAtom(self: *Object, allocator: Allocator, args: AddAtomArgs) !Atom.Index { - try self.atoms.ensureUnusedCapacity(allocator, 1); - try self.atoms_extra.ensureUnusedCapacity(allocator, @sizeOf(Atom.Extra)); +fn addAtom(self: *Object, gpa: Allocator, args: AddAtomArgs) !Atom.Index { + try self.atoms.ensureUnusedCapacity(gpa, 1); + try self.atoms_extra.ensureUnusedCapacity(gpa, @sizeOf(Atom.Extra)); return self.addAtomAssumeCapacity(args); } @@ -1257,9 +1326,9 @@ pub fn atom(self: *Object, atom_index: Atom.Index) ?*Atom { return &self.atoms.items[atom_index]; } -pub fn addAtomExtra(self: *Object, allocator: Allocator, extra: Atom.Extra) !u32 { +pub fn addAtomExtra(self: *Object, gpa: Allocator, extra: Atom.Extra) !u32 { const fields = @typeInfo(Atom.Extra).@"struct".fields; - try self.atoms_extra.ensureUnusedCapacity(allocator, fields.len); + try self.atoms_extra.ensureUnusedCapacity(gpa, fields.len); return self.addAtomExtraAssumeCapacity(extra); } @@ -1308,9 +1377,9 @@ fn setAtomFields(o: *Object, atom_ptr: *Atom, opts: Atom.Extra.AsOptionals) void o.setAtomExtra(atom_ptr.extra_index, extras); } -fn addInputMergeSection(self: *Object, allocator: Allocator) !Merge.InputSection.Index { +fn addInputMergeSection(self: *Object, gpa: Allocator) !Merge.InputSection.Index { const index: Merge.InputSection.Index = @intCast(self.input_merge_sections.items.len); - const msec = try self.input_merge_sections.addOne(allocator); + const msec = try self.input_merge_sections.addOne(gpa); msec.* = .{}; return index; } @@ -1320,9 +1389,9 @@ fn inputMergeSection(self: *Object, index: Merge.InputSection.Index) ?*Merge.Inp return &self.input_merge_sections.items[index]; } -fn addComdatGroup(self: *Object, allocator: Allocator) !Elf.ComdatGroup.Index { +fn addComdatGroup(self: *Object, gpa: Allocator) !Elf.ComdatGroup.Index { const index = @as(Elf.ComdatGroup.Index, @intCast(self.comdat_groups.items.len)); - _ = try self.comdat_groups.addOne(allocator); + _ = try self.comdat_groups.addOne(gpa); return index; } @@ -1516,8 +1585,9 @@ const log = std.log.scoped(.link); const math = std.math; const mem = std.mem; const Path = std.Build.Cache.Path; -const Allocator = mem.Allocator; +const Allocator = std.mem.Allocator; +const Diags = @import("../../link.zig").Diags; const Archive = @import("Archive.zig"); const Atom = @import("Atom.zig"); const AtomList = @import("AtomList.zig"); @@ -1528,3 +1598,4 @@ const File = @import("file.zig").File; const Merge = @import("Merge.zig"); const Symbol = @import("Symbol.zig"); const Alignment = Atom.Alignment; +const riscv = @import("../riscv.zig"); diff --git a/src/link/Elf/eh_frame.zig b/src/link/Elf/eh_frame.zig index 81913cb33c..1af1236526 100644 --- a/src/link/Elf/eh_frame.zig +++ b/src/link/Elf/eh_frame.zig @@ -19,18 +19,16 @@ pub const Fde = struct { return base + fde.out_offset; } - pub fn data(fde: Fde, elf_file: *Elf) []u8 { - const object = elf_file.file(fde.file_index).?.object; + pub fn data(fde: Fde, object: *Object) []u8 { return object.eh_frame_data.items[fde.offset..][0..fde.calcSize()]; } - pub fn cie(fde: Fde, elf_file: *Elf) Cie { - const object = elf_file.file(fde.file_index).?.object; + pub fn cie(fde: Fde, object: *Object) Cie { return object.cies.items[fde.cie_index]; } - pub fn ciePointer(fde: Fde, elf_file: *Elf) u32 { - const fde_data = fde.data(elf_file); + pub fn ciePointer(fde: Fde, object: *Object) u32 { + const fde_data = fde.data(object); return std.mem.readInt(u32, fde_data[4..8], .little); } @@ -38,16 +36,14 @@ pub const Fde = struct { return fde.size + 4; } - pub fn atom(fde: Fde, elf_file: *Elf) *Atom { - const object = elf_file.file(fde.file_index).?.object; - const rel = fde.relocs(elf_file)[0]; + pub fn atom(fde: Fde, object: *Object) *Atom { + const rel = fde.relocs(object)[0]; const sym = object.symtab.items[rel.r_sym()]; const atom_index = object.atoms_indexes.items[sym.st_shndx]; return object.atom(atom_index).?; } - pub fn relocs(fde: Fde, elf_file: *Elf) []align(1) const elf.Elf64_Rela { - const object = elf_file.file(fde.file_index).?.object; + pub fn relocs(fde: Fde, object: *Object) []const elf.Elf64_Rela { return object.relocs.items[fde.rel_index..][0..fde.rel_num]; } @@ -87,7 +83,8 @@ pub const Fde = struct { const fde = ctx.fde; const elf_file = ctx.elf_file; const base_addr = fde.address(elf_file); - const atom_name = fde.atom(elf_file).name(elf_file); + const object = elf_file.file(fde.file_index).?.object; + const atom_name = fde.atom(object).name(elf_file); try writer.print("@{x} : size({x}) : cie({d}) : {s}", .{ base_addr + fde.out_offset, fde.calcSize(), @@ -306,7 +303,7 @@ pub fn calcEhFrameRelocs(elf_file: *Elf) usize { } for (object.fdes.items) |fde| { if (!fde.alive) continue; - count += fde.relocs(elf_file).len; + count += fde.relocs(object).len; } } return count; @@ -369,16 +366,16 @@ pub fn writeEhFrame(elf_file: *Elf, writer: anytype) !void { for (object.fdes.items) |fde| { if (!fde.alive) continue; - const contents = fde.data(elf_file); + const contents = fde.data(object); std.mem.writeInt( i32, contents[4..8], - @truncate(@as(i64, @intCast(fde.out_offset + 4)) - @as(i64, @intCast(fde.cie(elf_file).out_offset))), + @truncate(@as(i64, @intCast(fde.out_offset + 4)) - @as(i64, @intCast(fde.cie(object).out_offset))), .little, ); - for (fde.relocs(elf_file)) |rel| { + for (fde.relocs(object)) |rel| { const ref = object.resolveSymbol(rel.r_sym(), elf_file); const sym = elf_file.symbol(ref).?; resolveReloc(fde, sym, rel, elf_file, contents) catch |err| switch (err) { @@ -412,12 +409,12 @@ pub fn writeEhFrameRelocatable(elf_file: *Elf, writer: anytype) !void { for (object.fdes.items) |fde| { if (!fde.alive) continue; - const contents = fde.data(elf_file); + const contents = fde.data(object); std.mem.writeInt( i32, contents[4..8], - @truncate(@as(i64, @intCast(fde.out_offset + 4)) - @as(i64, @intCast(fde.cie(elf_file).out_offset))), + @truncate(@as(i64, @intCast(fde.out_offset + 4)) - @as(i64, @intCast(fde.cie(object).out_offset))), .little, ); @@ -490,7 +487,7 @@ pub fn writeEhFrameRelocs(elf_file: *Elf, writer: anytype) !void { for (object.fdes.items) |fde| { if (!fde.alive) continue; - for (fde.relocs(elf_file)) |rel| { + for (fde.relocs(object)) |rel| { const ref = object.resolveSymbol(rel.r_sym(), elf_file); const sym = elf_file.symbol(ref).?; const r_offset = fde.address(elf_file) + rel.r_offset - fde.offset; @@ -548,7 +545,7 @@ pub fn writeEhFrameHdr(elf_file: *Elf, writer: anytype) !void { for (object.fdes.items) |fde| { if (!fde.alive) continue; - const relocs = fde.relocs(elf_file); + const relocs = fde.relocs(object); assert(relocs.len > 0); // Should this be an error? Things are completely broken anyhow if this trips... const rel = relocs[0]; const ref = object.resolveSymbol(rel.r_sym(), elf_file); diff --git a/src/link/Elf/gc.zig b/src/link/Elf/gc.zig index e0680d5db6..ff6c0bb7ce 100644 --- a/src/link/Elf/gc.zig +++ b/src/link/Elf/gc.zig @@ -103,15 +103,20 @@ fn markLive(atom: *Atom, elf_file: *Elf) void { assert(atom.visited); const file = atom.file(elf_file).?; - for (atom.fdes(elf_file)) |fde| { - for (fde.relocs(elf_file)[1..]) |rel| { - const ref = file.resolveSymbol(rel.r_sym(), elf_file); - const target_sym = elf_file.symbol(ref) orelse continue; - const target_atom = target_sym.atom(elf_file) orelse continue; - target_atom.alive = true; - gc_track_live_log.debug("{}marking live atom({d})", .{ track_live_level, target_atom.atom_index }); - if (markAtom(target_atom)) markLive(target_atom, elf_file); - } + switch (file) { + .object => |object| { + for (atom.fdes(object)) |fde| { + for (fde.relocs(object)[1..]) |rel| { + const ref = file.resolveSymbol(rel.r_sym(), elf_file); + const target_sym = elf_file.symbol(ref) orelse continue; + const target_atom = target_sym.atom(elf_file) orelse continue; + target_atom.alive = true; + gc_track_live_log.debug("{}marking live atom({d})", .{ track_live_level, target_atom.atom_index }); + if (markAtom(target_atom)) markLive(target_atom, elf_file); + } + } + }, + else => {}, } for (atom.relocs(elf_file)) |rel| { @@ -135,23 +140,25 @@ fn mark(roots: std.ArrayList(*Atom), elf_file: *Elf) void { } } -fn prune(elf_file: *Elf) void { - const pruneInFile = struct { - fn pruneInFile(file: File, ef: *Elf) void { - for (file.atoms()) |atom_index| { - const atom = file.atom(atom_index) orelse continue; - if (atom.alive and !atom.visited) { - atom.alive = false; - atom.markFdesDead(ef); - } +fn pruneInFile(file: File) void { + for (file.atoms()) |atom_index| { + const atom = file.atom(atom_index) orelse continue; + if (atom.alive and !atom.visited) { + atom.alive = false; + switch (file) { + .object => |object| atom.markFdesDead(object), + else => {}, } } - }.pruneInFile; + } +} + +fn prune(elf_file: *Elf) void { if (elf_file.zigObjectPtr()) |zo| { - pruneInFile(zo.asFile(), elf_file); + pruneInFile(zo.asFile()); } for (elf_file.objects.items) |index| { - pruneInFile(elf_file.file(index).?, elf_file); + pruneInFile(elf_file.file(index).?); } } diff --git a/src/link/Elf/relocatable.zig b/src/link/Elf/relocatable.zig index 82f62356e9..5e4d4aa7b7 100644 --- a/src/link/Elf/relocatable.zig +++ b/src/link/Elf/relocatable.zig @@ -233,8 +233,10 @@ fn parseArchiveStaticLibReportingFailure(elf_file: *Elf, path: Path) void { fn parseObjectStaticLib(elf_file: *Elf, path: Path) Elf.ParseError!void { const gpa = elf_file.base.comp.gpa; + const file_handles = &elf_file.file_handles; + const handle = try path.root_dir.handle.openFile(path.sub_path, .{}); - const fh = try elf_file.addFileHandle(handle); + const fh = try Elf.addFileHandle(gpa, file_handles, handle); const index: File.Index = @intCast(try elf_file.files.addOne(gpa)); elf_file.files.set(index, .{ .object = .{ @@ -248,27 +250,26 @@ fn parseObjectStaticLib(elf_file: *Elf, path: Path) Elf.ParseError!void { try elf_file.objects.append(gpa, index); const object = elf_file.file(index).?.object; - try object.parseAr(elf_file); + try object.parseAr(path, elf_file); } fn parseArchiveStaticLib(elf_file: *Elf, path: Path) Elf.ParseError!void { const gpa = elf_file.base.comp.gpa; + const diags = &elf_file.base.comp.link_diags; + const file_handles = &elf_file.file_handles; + const handle = try path.root_dir.handle.openFile(path.sub_path, .{}); - const fh = try elf_file.addFileHandle(handle); + const fh = try Elf.addFileHandle(gpa, file_handles, handle); - var archive = Archive{}; + var archive = try Archive.parse(gpa, diags, file_handles, path, fh); defer archive.deinit(gpa); - try archive.parse(elf_file, path, fh); - - const objects = try archive.objects.toOwnedSlice(gpa); - defer gpa.free(objects); - for (objects) |extracted| { - const index = @as(File.Index, @intCast(try elf_file.files.addOne(gpa))); + for (archive.objects) |extracted| { + const index: File.Index = @intCast(try elf_file.files.addOne(gpa)); elf_file.files.set(index, .{ .object = extracted }); const object = &elf_file.files.items(.data)[index].object; object.index = index; - try object.parseAr(elf_file); + try object.parseAr(path, elf_file); try elf_file.objects.append(gpa, index); } } -- cgit v1.2.3