aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/Compilation.zig4
-rw-r--r--src/Package/Fetch.zig16
-rw-r--r--src/Package/Fetch/git.zig22
-rw-r--r--src/Zcu.zig8
-rw-r--r--src/Zcu/PerThread.zig6
-rw-r--r--src/fmt.zig16
-rw-r--r--src/link.zig20
-rw-r--r--src/link/Dwarf.zig53
-rw-r--r--src/link/Elf.zig5
-rw-r--r--src/link/Elf/Object.zig65
-rw-r--r--src/link/Elf/SharedObject.zig37
-rw-r--r--src/link/Elf/file.zig35
-rw-r--r--src/link/Elf2.zig46
-rw-r--r--src/link/MachO.zig23
-rw-r--r--src/link/MachO/CodeSignature.zig8
-rw-r--r--src/link/MachO/fat.zig12
-rw-r--r--src/link/MachO/file.zig3
-rw-r--r--src/link/MachO/hasher.zig22
-rw-r--r--src/link/MachO/uuid.zig19
-rw-r--r--src/link/MappedFile.zig25
-rw-r--r--src/link/Wasm.zig5
-rw-r--r--src/link/tapi.zig10
22 files changed, 239 insertions, 221 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig
index df64dee19f..582c9dff3c 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -5325,7 +5325,7 @@ fn docsCopyModule(
comp: *Compilation,
module: *Package.Module,
name: []const u8,
- tar_file_writer: *fs.File.Writer,
+ tar_file_writer: *Io.File.Writer,
) !void {
const io = comp.io;
const root = module.root;
@@ -5361,7 +5361,7 @@ fn docsCopyModule(
};
defer file.close(io);
const stat = try file.stat();
- var file_reader: fs.File.Reader = .initSize(file.adaptToNewApi(), io, &buffer, stat.size);
+ var file_reader: Io.File.Reader = .initSize(file.adaptToNewApi(), io, &buffer, stat.size);
archiver.writeFileTimestamp(entry.path, &file_reader, stat.mtime) catch |err| {
return comp.lockAndSetMiscFailure(.docs_copy, "unable to archive {f}{s}: {t}", .{
diff --git a/src/Package/Fetch.zig b/src/Package/Fetch.zig
index 58f970abe5..988282097b 100644
--- a/src/Package/Fetch.zig
+++ b/src/Package/Fetch.zig
@@ -882,7 +882,7 @@ fn fail(f: *Fetch, msg_tok: std.zig.Ast.TokenIndex, msg_str: u32) RunError {
}
const Resource = union(enum) {
- file: fs.File.Reader,
+ file: Io.File.Reader,
http_request: HttpRequest,
git: Git,
dir: Io.Dir,
@@ -1653,7 +1653,7 @@ fn computeHash(f: *Fetch, pkg_path: Cache.Path, filter: Filter) RunError!Compute
fn dumpHashInfo(all_files: []const *const HashedFile) !void {
var stdout_buffer: [1024]u8 = undefined;
- var stdout_writer: fs.File.Writer = .initStreaming(.stdout(), &stdout_buffer);
+ var stdout_writer: Io.File.Writer = .initStreaming(.stdout(), &stdout_buffer);
const w = &stdout_writer.interface;
for (all_files) |hashed_file| {
try w.print("{t}: {x}: {s}\n", .{ hashed_file.kind, &hashed_file.hash, hashed_file.normalized_path });
@@ -1712,11 +1712,11 @@ fn deleteFileFallible(dir: Io.Dir, deleted_file: *DeletedFile) DeletedFile.Error
try dir.deleteFile(deleted_file.fs_path);
}
-fn setExecutable(file: fs.File) !void {
+fn setExecutable(file: Io.File) !void {
if (!std.fs.has_executable_bit) return;
const S = std.posix.S;
- const mode = fs.File.default_mode | S.IXUSR | S.IXGRP | S.IXOTH;
+ const mode = Io.File.default_mode | S.IXUSR | S.IXGRP | S.IXOTH;
try file.chmod(mode);
}
@@ -1738,10 +1738,10 @@ const HashedFile = struct {
size: u64,
const Error =
- fs.File.OpenError ||
- fs.File.ReadError ||
- fs.File.StatError ||
- fs.File.ChmodError ||
+ Io.File.OpenError ||
+ Io.File.ReadError ||
+ Io.File.StatError ||
+ Io.File.ChmodError ||
Io.Dir.ReadLinkError;
const Kind = enum { file, link };
diff --git a/src/Package/Fetch/git.zig b/src/Package/Fetch/git.zig
index 864865bd19..0fca3a0ee3 100644
--- a/src/Package/Fetch/git.zig
+++ b/src/Package/Fetch/git.zig
@@ -198,8 +198,8 @@ pub const Repository = struct {
repo: *Repository,
allocator: Allocator,
format: Oid.Format,
- pack_file: *std.fs.File.Reader,
- index_file: *std.fs.File.Reader,
+ pack_file: *Io.File.Reader,
+ index_file: *Io.File.Reader,
) !void {
repo.* = .{ .odb = undefined };
try repo.odb.init(allocator, format, pack_file, index_file);
@@ -372,9 +372,9 @@ pub const Repository = struct {
/// [pack-format](https://git-scm.com/docs/pack-format).
const Odb = struct {
format: Oid.Format,
- pack_file: *std.fs.File.Reader,
+ pack_file: *Io.File.Reader,
index_header: IndexHeader,
- index_file: *std.fs.File.Reader,
+ index_file: *Io.File.Reader,
cache: ObjectCache = .{},
allocator: Allocator,
@@ -383,8 +383,8 @@ const Odb = struct {
odb: *Odb,
allocator: Allocator,
format: Oid.Format,
- pack_file: *std.fs.File.Reader,
- index_file: *std.fs.File.Reader,
+ pack_file: *Io.File.Reader,
+ index_file: *Io.File.Reader,
) !void {
try pack_file.seekTo(0);
try index_file.seekTo(0);
@@ -1272,8 +1272,8 @@ const IndexEntry = struct {
pub fn indexPack(
allocator: Allocator,
format: Oid.Format,
- pack: *std.fs.File.Reader,
- index_writer: *std.fs.File.Writer,
+ pack: *Io.File.Reader,
+ index_writer: *Io.File.Writer,
) !void {
try pack.seekTo(0);
@@ -1372,7 +1372,7 @@ pub fn indexPack(
fn indexPackFirstPass(
allocator: Allocator,
format: Oid.Format,
- pack: *std.fs.File.Reader,
+ pack: *Io.File.Reader,
index_entries: *std.AutoHashMapUnmanaged(Oid, IndexEntry),
pending_deltas: *std.ArrayList(IndexEntry),
) !Oid {
@@ -1425,7 +1425,7 @@ fn indexPackFirstPass(
fn indexPackHashDelta(
allocator: Allocator,
format: Oid.Format,
- pack: *std.fs.File.Reader,
+ pack: *Io.File.Reader,
delta: IndexEntry,
index_entries: std.AutoHashMapUnmanaged(Oid, IndexEntry),
cache: *ObjectCache,
@@ -1477,7 +1477,7 @@ fn indexPackHashDelta(
fn resolveDeltaChain(
allocator: Allocator,
format: Oid.Format,
- pack: *std.fs.File.Reader,
+ pack: *Io.File.Reader,
base_object: Object,
delta_offsets: []const u64,
cache: *ObjectCache,
diff --git a/src/Zcu.zig b/src/Zcu.zig
index 58d884afe3..cd4a8c7783 100644
--- a/src/Zcu.zig
+++ b/src/Zcu.zig
@@ -1200,7 +1200,7 @@ pub const EmbedFile = struct {
/// `.none` means the file was not loaded, so `stat` is undefined.
val: InternPool.Index,
/// If this is `null` and `val` is `.none`, the file has never been loaded.
- err: ?(std.fs.File.OpenError || std.fs.File.StatError || std.fs.File.ReadError || error{UnexpectedEof}),
+ err: ?(Io.File.OpenError || Io.File.StatError || Io.File.ReadError || error{UnexpectedEof}),
stat: Cache.File.Stat,
pub const Index = enum(u32) {
@@ -2927,7 +2927,7 @@ comptime {
}
}
-pub fn loadZirCache(gpa: Allocator, io: Io, cache_file: std.fs.File) !Zir {
+pub fn loadZirCache(gpa: Allocator, io: Io, cache_file: Io.File) !Zir {
var buffer: [2000]u8 = undefined;
var file_reader = cache_file.reader(io, &buffer);
return result: {
@@ -2986,7 +2986,7 @@ pub fn loadZirCacheBody(gpa: Allocator, header: Zir.Header, cache_br: *Io.Reader
return zir;
}
-pub fn saveZirCache(gpa: Allocator, cache_file: std.fs.File, stat: std.fs.File.Stat, zir: Zir) (std.fs.File.WriteError || Allocator.Error)!void {
+pub fn saveZirCache(gpa: Allocator, cache_file: Io.File, stat: Io.File.Stat, zir: Zir) (Io.File.WriteError || Allocator.Error)!void {
const safety_buffer = if (data_has_safety_tag)
try gpa.alloc([8]u8, zir.instructions.len)
else
@@ -3026,7 +3026,7 @@ pub fn saveZirCache(gpa: Allocator, cache_file: std.fs.File, stat: std.fs.File.S
};
}
-pub fn saveZoirCache(cache_file: std.fs.File, stat: std.fs.File.Stat, zoir: Zoir) std.fs.File.WriteError!void {
+pub fn saveZoirCache(cache_file: Io.File, stat: Io.File.Stat, zoir: Zoir) Io.File.WriteError!void {
const header: Zoir.Header = .{
.nodes_len = @intCast(zoir.nodes.len),
.extra_len = @intCast(zoir.extra.len),
diff --git a/src/Zcu/PerThread.zig b/src/Zcu/PerThread.zig
index d2ca004058..55d6a3861f 100644
--- a/src/Zcu/PerThread.zig
+++ b/src/Zcu/PerThread.zig
@@ -118,7 +118,7 @@ pub fn updateFile(
const zir_dir = cache_directory.handle;
// Determine whether we need to reload the file from disk and redo parsing and AstGen.
- var lock: std.fs.File.Lock = switch (file.status) {
+ var lock: Io.File.Lock = switch (file.status) {
.never_loaded, .retryable_failure => lock: {
// First, load the cached ZIR code, if any.
log.debug("AstGen checking cache: {f} (local={}, digest={s})", .{
@@ -346,8 +346,8 @@ pub fn updateFile(
fn loadZirZoirCache(
zcu: *Zcu,
- cache_file: std.fs.File,
- stat: std.fs.File.Stat,
+ cache_file: Io.File,
+ stat: Io.File.Stat,
file: *Zcu.File,
comptime mode: Ast.Mode,
) !enum { success, invalid, truncated, stale } {
diff --git a/src/fmt.zig b/src/fmt.zig
index 907c7885ad..7bdc24054e 100644
--- a/src/fmt.zig
+++ b/src/fmt.zig
@@ -37,9 +37,9 @@ const Fmt = struct {
arena: Allocator,
io: Io,
out_buffer: std.Io.Writer.Allocating,
- stdout_writer: *fs.File.Writer,
+ stdout_writer: *Io.File.Writer,
- const SeenMap = std.AutoHashMap(fs.File.INode, void);
+ const SeenMap = std.AutoHashMap(Io.File.INode, void);
};
pub fn run(gpa: Allocator, arena: Allocator, io: Io, args: []const []const u8) !void {
@@ -59,7 +59,7 @@ pub fn run(gpa: Allocator, arena: Allocator, io: Io, args: []const []const u8) !
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
- try fs.File.stdout().writeAll(usage_fmt);
+ try Io.File.stdout().writeAll(usage_fmt);
return process.cleanExit();
} else if (mem.eql(u8, arg, "--color")) {
if (i + 1 >= args.len) {
@@ -99,9 +99,9 @@ pub fn run(gpa: Allocator, arena: Allocator, io: Io, args: []const []const u8) !
fatal("cannot use --stdin with positional arguments", .{});
}
- const stdin: fs.File = .stdin();
+ const stdin: Io.File = .stdin();
var stdio_buffer: [1024]u8 = undefined;
- var file_reader: fs.File.Reader = stdin.reader(io, &stdio_buffer);
+ var file_reader: Io.File.Reader = stdin.reader(io, &stdio_buffer);
const source_code = std.zig.readSourceFileToEndAlloc(gpa, &file_reader) catch |err| {
fatal("unable to read stdin: {}", .{err});
};
@@ -154,7 +154,7 @@ pub fn run(gpa: Allocator, arena: Allocator, io: Io, args: []const []const u8) !
process.exit(code);
}
- return fs.File.stdout().writeAll(formatted);
+ return Io.File.stdout().writeAll(formatted);
}
if (input_files.items.len == 0) {
@@ -162,7 +162,7 @@ pub fn run(gpa: Allocator, arena: Allocator, io: Io, args: []const []const u8) !
}
var stdout_buffer: [4096]u8 = undefined;
- var stdout_writer = fs.File.stdout().writer(&stdout_buffer);
+ var stdout_writer = Io.File.stdout().writer(&stdout_buffer);
var fmt: Fmt = .{
.gpa = gpa,
@@ -272,7 +272,7 @@ fn fmtPathFile(
return error.IsDir;
var read_buffer: [1024]u8 = undefined;
- var file_reader: fs.File.Reader = source_file.reader(io, &read_buffer);
+ var file_reader: Io.File.Reader = source_file.reader(io, &read_buffer);
file_reader.size = stat.size;
const gpa = fmt.gpa;
diff --git a/src/link.zig b/src/link.zig
index ef095987c9..d5daf6fca7 100644
--- a/src/link.zig
+++ b/src/link.zig
@@ -393,7 +393,7 @@ pub const File = struct {
comp: *Compilation,
emit: Path,
- file: ?fs.File,
+ file: ?Io.File,
/// When using the LLVM backend, the emitted object is written to a file with this name. This
/// object file then becomes a normal link input to LLD or a self-hosted linker.
///
@@ -1110,7 +1110,7 @@ pub const File = struct {
};
}
- fn loadGnuLdScript(base: *File, path: Path, parent_query: UnresolvedInput.Query, file: fs.File) anyerror!void {
+ fn loadGnuLdScript(base: *File, path: Path, parent_query: UnresolvedInput.Query, file: Io.File) anyerror!void {
const comp = base.comp;
const diags = &comp.link_diags;
const gpa = comp.gpa;
@@ -1238,7 +1238,7 @@ pub const File = struct {
pub fn determineMode(
output_mode: std.builtin.OutputMode,
link_mode: std.builtin.LinkMode,
- ) fs.File.Mode {
+ ) Io.File.Mode {
// On common systems with a 0o022 umask, 0o777 will still result in a file created
// with 0o755 permissions, but it works appropriately if the system is configured
// more leniently. As another data point, C's fopen seems to open files with the
@@ -1247,10 +1247,10 @@ pub const File = struct {
switch (output_mode) {
.Lib => return switch (link_mode) {
.dynamic => executable_mode,
- .static => fs.File.default_mode,
+ .static => Io.File.default_mode,
},
.Exe => return executable_mode,
- .Obj => return fs.File.default_mode,
+ .Obj => return Io.File.default_mode,
}
}
@@ -1660,19 +1660,19 @@ pub const Input = union(enum) {
pub const Object = struct {
path: Path,
- file: fs.File,
+ file: Io.File,
must_link: bool,
hidden: bool,
};
pub const Res = struct {
path: Path,
- file: fs.File,
+ file: Io.File,
};
pub const Dso = struct {
path: Path,
- file: fs.File,
+ file: Io.File,
needed: bool,
weak: bool,
reexport: bool,
@@ -1694,7 +1694,7 @@ pub const Input = union(enum) {
}
/// Returns `null` in the case of `dso_exact`.
- pub fn pathAndFile(input: Input) ?struct { Path, fs.File } {
+ pub fn pathAndFile(input: Input) ?struct { Path, Io.File } {
return switch (input) {
.object, .archive => |obj| .{ obj.path, obj.file },
inline .res, .dso => |x| .{ x.path, x.file },
@@ -2075,7 +2075,7 @@ fn resolveLibInput(
fn finishResolveLibInput(
resolved_inputs: *std.ArrayList(Input),
path: Path,
- file: std.fs.File,
+ file: Io.File,
link_mode: std.builtin.LinkMode,
query: UnresolvedInput.Query,
) ResolveLibInputResult {
diff --git a/src/link/Dwarf.zig b/src/link/Dwarf.zig
index 95f4ca8bbd..cfb02fba38 100644
--- a/src/link/Dwarf.zig
+++ b/src/link/Dwarf.zig
@@ -1,3 +1,24 @@
+const Dwarf = @This();
+
+const std = @import("std");
+const Io = std.Io;
+const Allocator = std.mem.Allocator;
+const DW = std.dwarf;
+const Zir = std.zig.Zir;
+const assert = std.debug.assert;
+const log = std.log.scoped(.dwarf);
+const Writer = std.Io.Writer;
+
+const InternPool = @import("../InternPool.zig");
+const Module = @import("../Package.zig").Module;
+const Type = @import("../Type.zig");
+const Value = @import("../Value.zig");
+const Zcu = @import("../Zcu.zig");
+const codegen = @import("../codegen.zig");
+const dev = @import("../dev.zig");
+const link = @import("../link.zig");
+const target_info = @import("../target.zig");
+
gpa: Allocator,
bin_file: *link.File,
format: DW.Format,
@@ -29,16 +50,16 @@ pub const UpdateError = error{
UnexpectedEndOfFile,
} ||
codegen.GenerateSymbolError ||
- std.fs.File.OpenError ||
- std.fs.File.SetEndPosError ||
- std.fs.File.CopyRangeError ||
- std.fs.File.PReadError ||
- std.fs.File.PWriteError;
+ Io.File.OpenError ||
+ Io.File.SetEndPosError ||
+ Io.File.CopyRangeError ||
+ Io.File.PReadError ||
+ Io.File.PWriteError;
pub const FlushError = UpdateError;
pub const RelocError =
- std.fs.File.PWriteError;
+ Io.File.PWriteError;
pub const AddressSize = enum(u8) {
@"32" = 4,
@@ -6350,7 +6371,7 @@ const AbbrevCode = enum {
});
};
-fn getFile(dwarf: *Dwarf) ?std.fs.File {
+fn getFile(dwarf: *Dwarf) ?Io.File {
if (dwarf.bin_file.cast(.macho)) |macho_file| if (macho_file.d_sym) |*d_sym| return d_sym.file;
return dwarf.bin_file.file;
}
@@ -6429,21 +6450,3 @@ const force_incremental = false;
inline fn incremental(dwarf: Dwarf) bool {
return force_incremental or dwarf.bin_file.comp.config.incremental;
}
-
-const Allocator = std.mem.Allocator;
-const DW = std.dwarf;
-const Dwarf = @This();
-const InternPool = @import("../InternPool.zig");
-const Module = @import("../Package.zig").Module;
-const Type = @import("../Type.zig");
-const Value = @import("../Value.zig");
-const Zcu = @import("../Zcu.zig");
-const Zir = std.zig.Zir;
-const assert = std.debug.assert;
-const codegen = @import("../codegen.zig");
-const dev = @import("../dev.zig");
-const link = @import("../link.zig");
-const log = std.log.scoped(.dwarf);
-const std = @import("std");
-const target_info = @import("../target.zig");
-const Writer = std.Io.Writer;
diff --git a/src/link/Elf.zig b/src/link/Elf.zig
index ae7d631f09..584a50c7f2 100644
--- a/src/link/Elf.zig
+++ b/src/link/Elf.zig
@@ -3651,7 +3651,7 @@ fn fileLookup(files: std.MultiArrayList(File.Entry), index: File.Index, zig_obje
pub fn addFileHandle(
gpa: Allocator,
file_handles: *std.ArrayList(File.Handle),
- handle: fs.File,
+ handle: Io.File,
) Allocator.Error!File.HandleIndex {
try file_handles.append(gpa, handle);
return @intCast(file_handles.items.len - 1);
@@ -4068,7 +4068,7 @@ fn fmtDumpState(self: *Elf, writer: *std.Io.Writer) std.Io.Writer.Error!void {
}
/// Caller owns the memory.
-pub fn preadAllAlloc(allocator: Allocator, handle: fs.File, offset: u64, size: u64) ![]u8 {
+pub fn preadAllAlloc(allocator: Allocator, handle: Io.File, offset: u64, size: u64) ![]u8 {
const buffer = try allocator.alloc(u8, math.cast(usize, size) orelse return error.Overflow);
errdefer allocator.free(buffer);
const amt = try handle.preadAll(buffer, offset);
@@ -4460,6 +4460,7 @@ pub fn cast(elf_file: *Elf, comptime T: type, x: anytype) error{LinkFailure}!T {
}
const std = @import("std");
+const Io = std.Io;
const build_options = @import("build_options");
const builtin = @import("builtin");
const assert = std.debug.assert;
diff --git a/src/link/Elf/Object.zig b/src/link/Elf/Object.zig
index d51a82b266..c0dde4176a 100644
--- a/src/link/Elf/Object.zig
+++ b/src/link/Elf/Object.zig
@@ -1,3 +1,30 @@
+const Object = @This();
+
+const std = @import("std");
+const Io = std.Io;
+const assert = std.debug.assert;
+const eh_frame = @import("eh_frame.zig");
+const elf = std.elf;
+const fs = std.fs;
+const log = std.log.scoped(.link);
+const math = std.math;
+const mem = std.mem;
+const Path = std.Build.Cache.Path;
+const Allocator = std.mem.Allocator;
+
+const Diags = @import("../../link.zig").Diags;
+const Archive = @import("Archive.zig");
+const Atom = @import("Atom.zig");
+const AtomList = @import("AtomList.zig");
+const Cie = eh_frame.Cie;
+const Elf = @import("../Elf.zig");
+const Fde = eh_frame.Fde;
+const File = @import("file.zig").File;
+const Merge = @import("Merge.zig");
+const Symbol = @import("Symbol.zig");
+const Alignment = Atom.Alignment;
+const riscv = @import("../riscv.zig");
+
archive: ?InArchive = null,
/// Archive files cannot contain subdirectories, so only the basename is needed
/// for output. However, the full path is kept for error reporting.
@@ -68,7 +95,7 @@ pub fn parse(
diags: *Diags,
/// For error reporting purposes only.
path: Path,
- handle: fs.File,
+ handle: Io.File,
target: *const std.Target,
debug_fmt_strip: bool,
default_sym_version: elf.Versym,
@@ -97,7 +124,7 @@ pub fn parseCommon(
gpa: Allocator,
diags: *Diags,
path: Path,
- handle: fs.File,
+ handle: Io.File,
target: *const std.Target,
) !void {
const offset = if (self.archive) |ar| ar.offset else 0;
@@ -264,7 +291,7 @@ fn initAtoms(
gpa: Allocator,
diags: *Diags,
path: Path,
- handle: fs.File,
+ handle: Io.File,
debug_fmt_strip: bool,
target: *const std.Target,
) !void {
@@ -421,7 +448,7 @@ fn initSymbols(
fn parseEhFrame(
self: *Object,
gpa: Allocator,
- handle: fs.File,
+ handle: Io.File,
shndx: u32,
target: *const std.Target,
) !void {
@@ -1310,7 +1337,7 @@ fn addString(self: *Object, gpa: Allocator, str: []const u8) !u32 {
}
/// Caller owns the memory.
-fn preadShdrContentsAlloc(self: Object, gpa: Allocator, handle: fs.File, index: u32) ![]u8 {
+fn preadShdrContentsAlloc(self: Object, gpa: Allocator, handle: Io.File, index: u32) ![]u8 {
assert(index < self.shdrs.items.len);
const offset = if (self.archive) |ar| ar.offset else 0;
const shdr = self.shdrs.items[index];
@@ -1320,7 +1347,7 @@ fn preadShdrContentsAlloc(self: Object, gpa: Allocator, handle: fs.File, index:
}
/// Caller owns the memory.
-fn preadRelocsAlloc(self: Object, gpa: Allocator, handle: fs.File, shndx: u32) ![]align(1) const elf.Elf64_Rela {
+fn preadRelocsAlloc(self: Object, gpa: Allocator, handle: Io.File, shndx: u32) ![]align(1) const elf.Elf64_Rela {
const raw = try self.preadShdrContentsAlloc(gpa, handle, shndx);
const num = @divExact(raw.len, @sizeOf(elf.Elf64_Rela));
return @as([*]align(1) const elf.Elf64_Rela, @ptrCast(raw.ptr))[0..num];
@@ -1552,29 +1579,3 @@ const InArchive = struct {
offset: u64,
size: u32,
};
-
-const Object = @This();
-
-const std = @import("std");
-const assert = std.debug.assert;
-const eh_frame = @import("eh_frame.zig");
-const elf = std.elf;
-const fs = std.fs;
-const log = std.log.scoped(.link);
-const math = std.math;
-const mem = std.mem;
-const Path = std.Build.Cache.Path;
-const Allocator = std.mem.Allocator;
-
-const Diags = @import("../../link.zig").Diags;
-const Archive = @import("Archive.zig");
-const Atom = @import("Atom.zig");
-const AtomList = @import("AtomList.zig");
-const Cie = eh_frame.Cie;
-const Elf = @import("../Elf.zig");
-const Fde = eh_frame.Fde;
-const File = @import("file.zig").File;
-const Merge = @import("Merge.zig");
-const Symbol = @import("Symbol.zig");
-const Alignment = Atom.Alignment;
-const riscv = @import("../riscv.zig");
diff --git a/src/link/Elf/SharedObject.zig b/src/link/Elf/SharedObject.zig
index 1e17aa34a8..3720fe53d6 100644
--- a/src/link/Elf/SharedObject.zig
+++ b/src/link/Elf/SharedObject.zig
@@ -1,3 +1,20 @@
+const SharedObject = @This();
+
+const std = @import("std");
+const Io = std.Io;
+const assert = std.debug.assert;
+const elf = std.elf;
+const log = std.log.scoped(.elf);
+const mem = std.mem;
+const Path = std.Build.Cache.Path;
+const Stat = std.Build.Cache.File.Stat;
+const Allocator = mem.Allocator;
+
+const Elf = @import("../Elf.zig");
+const File = @import("file.zig").File;
+const Symbol = @import("Symbol.zig");
+const Diags = @import("../../link.zig").Diags;
+
path: Path,
index: File.Index,
@@ -94,7 +111,7 @@ pub fn parseHeader(
gpa: Allocator,
diags: *Diags,
file_path: Path,
- fs_file: std.fs.File,
+ fs_file: Io.File,
stat: Stat,
target: *const std.Target,
) !Header {
@@ -192,7 +209,7 @@ pub fn parse(
gpa: Allocator,
/// Moves resources from header. Caller may unconditionally deinit.
header: *Header,
- fs_file: std.fs.File,
+ fs_file: Io.File,
) !Parsed {
const symtab = if (header.dynsym_sect_index) |index| st: {
const shdr = header.sections[index];
@@ -534,19 +551,3 @@ const Format = struct {
}
}
};
-
-const SharedObject = @This();
-
-const std = @import("std");
-const assert = std.debug.assert;
-const elf = std.elf;
-const log = std.log.scoped(.elf);
-const mem = std.mem;
-const Path = std.Build.Cache.Path;
-const Stat = std.Build.Cache.File.Stat;
-const Allocator = mem.Allocator;
-
-const Elf = @import("../Elf.zig");
-const File = @import("file.zig").File;
-const Symbol = @import("Symbol.zig");
-const Diags = @import("../../link.zig").Diags;
diff --git a/src/link/Elf/file.zig b/src/link/Elf/file.zig
index 50f5159d18..52d3c6e6f0 100644
--- a/src/link/Elf/file.zig
+++ b/src/link/Elf/file.zig
@@ -1,3 +1,20 @@
+const std = @import("std");
+const Io = std.Io;
+const elf = std.elf;
+const log = std.log.scoped(.link);
+const Path = std.Build.Cache.Path;
+const Allocator = std.mem.Allocator;
+
+const Archive = @import("Archive.zig");
+const Atom = @import("Atom.zig");
+const Cie = @import("eh_frame.zig").Cie;
+const Elf = @import("../Elf.zig");
+const LinkerDefined = @import("LinkerDefined.zig");
+const Object = @import("Object.zig");
+const SharedObject = @import("SharedObject.zig");
+const Symbol = @import("Symbol.zig");
+const ZigObject = @import("ZigObject.zig");
+
pub const File = union(enum) {
zig_object: *ZigObject,
linker_defined: *LinkerDefined,
@@ -279,22 +296,6 @@ pub const File = union(enum) {
shared_object: SharedObject,
};
- pub const Handle = std.fs.File;
+ pub const Handle = Io.File;
pub const HandleIndex = Index;
};
-
-const std = @import("std");
-const elf = std.elf;
-const log = std.log.scoped(.link);
-const Path = std.Build.Cache.Path;
-const Allocator = std.mem.Allocator;
-
-const Archive = @import("Archive.zig");
-const Atom = @import("Atom.zig");
-const Cie = @import("eh_frame.zig").Cie;
-const Elf = @import("../Elf.zig");
-const LinkerDefined = @import("LinkerDefined.zig");
-const Object = @import("Object.zig");
-const SharedObject = @import("SharedObject.zig");
-const Symbol = @import("Symbol.zig");
-const ZigObject = @import("ZigObject.zig");
diff --git a/src/link/Elf2.zig b/src/link/Elf2.zig
index 7d12ccedb2..e35444bc02 100644
--- a/src/link/Elf2.zig
+++ b/src/link/Elf2.zig
@@ -1,3 +1,23 @@
+const Elf = @This();
+
+const builtin = @import("builtin");
+const native_endian = builtin.cpu.arch.endian();
+
+const std = @import("std");
+const Io = std.Io;
+const assert = std.debug.assert;
+const log = std.log.scoped(.link);
+
+const codegen = @import("../codegen.zig");
+const Compilation = @import("../Compilation.zig");
+const InternPool = @import("../InternPool.zig");
+const link = @import("../link.zig");
+const MappedFile = @import("MappedFile.zig");
+const target_util = @import("../target.zig");
+const Type = @import("../Type.zig");
+const Value = @import("../Value.zig");
+const Zcu = @import("../Zcu.zig");
+
base: link.File,
options: link.File.OpenOptions,
mf: MappedFile,
@@ -1973,8 +1993,8 @@ pub fn lazySymbol(elf: *Elf, lazy: link.File.LazySymbol) !Symbol.Index {
return lazy_gop.value_ptr.*;
}
-pub fn loadInput(elf: *Elf, input: link.Input) (std.fs.File.Reader.SizeError ||
- std.Io.File.Reader.Error || MappedFile.Error || error{ EndOfStream, BadMagic, LinkFailure })!void {
+pub fn loadInput(elf: *Elf, input: link.Input) (Io.File.Reader.SizeError ||
+ Io.File.Reader.Error || MappedFile.Error || error{ EndOfStream, BadMagic, LinkFailure })!void {
const io = elf.base.comp.io;
var buf: [4096]u8 = undefined;
switch (input) {
@@ -2007,7 +2027,7 @@ pub fn loadInput(elf: *Elf, input: link.Input) (std.fs.File.Reader.SizeError ||
.dso_exact => |dso_exact| try elf.loadDsoExact(dso_exact.name),
}
}
-fn loadArchive(elf: *Elf, path: std.Build.Cache.Path, fr: *std.Io.File.Reader) !void {
+fn loadArchive(elf: *Elf, path: std.Build.Cache.Path, fr: *Io.File.Reader) !void {
const comp = elf.base.comp;
const gpa = comp.gpa;
const diags = &comp.link_diags;
@@ -2067,7 +2087,7 @@ fn loadObject(
elf: *Elf,
path: std.Build.Cache.Path,
member: ?[]const u8,
- fr: *std.Io.File.Reader,
+ fr: *Io.File.Reader,
fl: MappedFile.Node.FileLocation,
) !void {
const comp = elf.base.comp;
@@ -2310,7 +2330,7 @@ fn loadObject(
},
}
}
-fn loadDso(elf: *Elf, path: std.Build.Cache.Path, fr: *std.Io.File.Reader) !void {
+fn loadDso(elf: *Elf, path: std.Build.Cache.Path, fr: *Io.File.Reader) !void {
const comp = elf.base.comp;
const diags = &comp.link_diags;
const r = &fr.interface;
@@ -3822,19 +3842,3 @@ pub fn printNode(
try w.writeByte('\n');
}
}
-
-const assert = std.debug.assert;
-const builtin = @import("builtin");
-const codegen = @import("../codegen.zig");
-const Compilation = @import("../Compilation.zig");
-const Elf = @This();
-const InternPool = @import("../InternPool.zig");
-const link = @import("../link.zig");
-const log = std.log.scoped(.link);
-const MappedFile = @import("MappedFile.zig");
-const native_endian = builtin.cpu.arch.endian();
-const std = @import("std");
-const target_util = @import("../target.zig");
-const Type = @import("../Type.zig");
-const Value = @import("../Value.zig");
-const Zcu = @import("../Zcu.zig");
diff --git a/src/link/MachO.zig b/src/link/MachO.zig
index 471465cea1..72a49c0c9e 100644
--- a/src/link/MachO.zig
+++ b/src/link/MachO.zig
@@ -890,7 +890,7 @@ pub fn classifyInputFile(self: *MachO, input: link.Input) !void {
_ = try self.addTbd(.fromLinkInput(input), true, fh);
}
-fn parseFatFile(self: *MachO, file: std.fs.File, path: Path) !?fat.Arch {
+fn parseFatFile(self: *MachO, file: Io.File, path: Path) !?fat.Arch {
const diags = &self.base.comp.link_diags;
const fat_h = fat.readFatHeader(file) catch return null;
if (fat_h.magic != macho.FAT_MAGIC and fat_h.magic != macho.FAT_MAGIC_64) return null;
@@ -903,7 +903,7 @@ fn parseFatFile(self: *MachO, file: std.fs.File, path: Path) !?fat.Arch {
return diags.failParse(path, "missing arch in universal file: expected {s}", .{@tagName(cpu_arch)});
}
-pub fn readMachHeader(file: std.fs.File, offset: usize) !macho.mach_header_64 {
+pub fn readMachHeader(file: Io.File, offset: usize) !macho.mach_header_64 {
var buffer: [@sizeOf(macho.mach_header_64)]u8 = undefined;
const nread = try file.preadAll(&buffer, offset);
if (nread != buffer.len) return error.InputOutput;
@@ -911,7 +911,7 @@ pub fn readMachHeader(file: std.fs.File, offset: usize) !macho.mach_header_64 {
return hdr;
}
-pub fn readArMagic(file: std.fs.File, offset: usize, buffer: *[Archive.SARMAG]u8) ![]const u8 {
+pub fn readArMagic(file: Io.File, offset: usize, buffer: *[Archive.SARMAG]u8) ![]const u8 {
const nread = try file.preadAll(buffer, offset);
if (nread != buffer.len) return error.InputOutput;
return buffer[0..Archive.SARMAG];
@@ -3768,7 +3768,7 @@ pub fn getInternalObject(self: *MachO) ?*InternalObject {
return self.getFile(index).?.internal;
}
-pub fn addFileHandle(self: *MachO, file: fs.File) !File.HandleIndex {
+pub fn addFileHandle(self: *MachO, file: Io.File) !File.HandleIndex {
const gpa = self.base.comp.gpa;
const index: File.HandleIndex = @intCast(self.file_handles.items.len);
const fh = try self.file_handles.addOne(gpa);
@@ -5373,10 +5373,11 @@ const max_distance = (1 << (jump_bits - 1));
const max_allowed_distance = max_distance - 0x500_000;
const MachO = @This();
-
-const std = @import("std");
const build_options = @import("build_options");
const builtin = @import("builtin");
+
+const std = @import("std");
+const Io = std.Io;
const assert = std.debug.assert;
const fs = std.fs;
const log = std.log.scoped(.link);
@@ -5386,6 +5387,11 @@ const math = std.math;
const mem = std.mem;
const meta = std.meta;
const Writer = std.Io.Writer;
+const AtomicBool = std.atomic.Value(bool);
+const Cache = std.Build.Cache;
+const Hash = std.hash.Wyhash;
+const Md5 = std.crypto.hash.Md5;
+const Allocator = std.mem.Allocator;
const aarch64 = codegen.aarch64.encoding;
const bind = @import("MachO/dyld_info/bind.zig");
@@ -5403,11 +5409,8 @@ const trace = @import("../tracy.zig").trace;
const synthetic = @import("MachO/synthetic.zig");
const Alignment = Atom.Alignment;
-const Allocator = mem.Allocator;
const Archive = @import("MachO/Archive.zig");
-const AtomicBool = std.atomic.Value(bool);
const Bind = bind.Bind;
-const Cache = std.Build.Cache;
const CodeSignature = @import("MachO/CodeSignature.zig");
const Compilation = @import("../Compilation.zig");
const DataInCode = synthetic.DataInCode;
@@ -5417,14 +5420,12 @@ const ExportTrie = @import("MachO/dyld_info/Trie.zig");
const Path = Cache.Path;
const File = @import("MachO/file.zig").File;
const GotSection = synthetic.GotSection;
-const Hash = std.hash.Wyhash;
const Indsymtab = synthetic.Indsymtab;
const InternalObject = @import("MachO/InternalObject.zig");
const ObjcStubsSection = synthetic.ObjcStubsSection;
const Object = @import("MachO/Object.zig");
const LazyBind = bind.LazyBind;
const LaSymbolPtrSection = synthetic.LaSymbolPtrSection;
-const Md5 = std.crypto.hash.Md5;
const Zcu = @import("../Zcu.zig");
const InternPool = @import("../InternPool.zig");
const Rebase = @import("MachO/dyld_info/Rebase.zig");
diff --git a/src/link/MachO/CodeSignature.zig b/src/link/MachO/CodeSignature.zig
index 5bded3b9e3..5f9a9ecac9 100644
--- a/src/link/MachO/CodeSignature.zig
+++ b/src/link/MachO/CodeSignature.zig
@@ -1,17 +1,19 @@
const CodeSignature = @This();
const std = @import("std");
+const Io = std.Io;
const assert = std.debug.assert;
const fs = std.fs;
const log = std.log.scoped(.link);
const macho = std.macho;
const mem = std.mem;
const testing = std.testing;
+const Sha256 = std.crypto.hash.sha2.Sha256;
+const Allocator = std.mem.Allocator;
+
const trace = @import("../../tracy.zig").trace;
-const Allocator = mem.Allocator;
const Hasher = @import("hasher.zig").ParallelHasher;
const MachO = @import("../MachO.zig");
-const Sha256 = std.crypto.hash.sha2.Sha256;
const hash_size = Sha256.digest_length;
@@ -250,7 +252,7 @@ pub fn addEntitlements(self: *CodeSignature, allocator: Allocator, path: []const
}
pub const WriteOpts = struct {
- file: fs.File,
+ file: Io.File,
exec_seg_base: u64,
exec_seg_limit: u64,
file_size: u32,
diff --git a/src/link/MachO/fat.zig b/src/link/MachO/fat.zig
index 7772f7a4de..fd9a302531 100644
--- a/src/link/MachO/fat.zig
+++ b/src/link/MachO/fat.zig
@@ -1,18 +1,20 @@
+const builtin = @import("builtin");
+const native_endian = builtin.target.cpu.arch.endian();
+
const std = @import("std");
+const Io = std.Io;
const assert = std.debug.assert;
-const builtin = @import("builtin");
const log = std.log.scoped(.macho);
const macho = std.macho;
const mem = std.mem;
-const native_endian = builtin.target.cpu.arch.endian();
const MachO = @import("../MachO.zig");
-pub fn readFatHeader(file: std.fs.File) !macho.fat_header {
+pub fn readFatHeader(file: Io.File) !macho.fat_header {
return readFatHeaderGeneric(macho.fat_header, file, 0);
}
-fn readFatHeaderGeneric(comptime Hdr: type, file: std.fs.File, offset: usize) !Hdr {
+fn readFatHeaderGeneric(comptime Hdr: type, file: Io.File, offset: usize) !Hdr {
var buffer: [@sizeOf(Hdr)]u8 = undefined;
const nread = try file.preadAll(&buffer, offset);
if (nread != buffer.len) return error.InputOutput;
@@ -27,7 +29,7 @@ pub const Arch = struct {
size: u32,
};
-pub fn parseArchs(file: std.fs.File, fat_header: macho.fat_header, out: *[2]Arch) ![]const Arch {
+pub fn parseArchs(file: Io.File, fat_header: macho.fat_header, out: *[2]Arch) ![]const Arch {
var count: usize = 0;
var fat_arch_index: u32 = 0;
while (fat_arch_index < fat_header.nfat_arch and count < out.len) : (fat_arch_index += 1) {
diff --git a/src/link/MachO/file.zig b/src/link/MachO/file.zig
index 05b43de181..cd687a4941 100644
--- a/src/link/MachO/file.zig
+++ b/src/link/MachO/file.zig
@@ -355,11 +355,12 @@ pub const File = union(enum) {
dylib: Dylib,
};
- pub const Handle = std.fs.File;
+ pub const Handle = Io.File;
pub const HandleIndex = Index;
};
const std = @import("std");
+const Io = std.Io;
const assert = std.debug.assert;
const log = std.log.scoped(.link);
const macho = std.macho;
diff --git a/src/link/MachO/hasher.zig b/src/link/MachO/hasher.zig
index 78cd847c40..8cf53071c8 100644
--- a/src/link/MachO/hasher.zig
+++ b/src/link/MachO/hasher.zig
@@ -1,3 +1,9 @@
+const std = @import("std");
+const Io = std.Io;
+const Allocator = std.mem.Allocator;
+
+const trace = @import("../../tracy.zig").trace;
+
pub fn ParallelHasher(comptime Hasher: type) type {
const hash_size = Hasher.digest_length;
@@ -5,7 +11,7 @@ pub fn ParallelHasher(comptime Hasher: type) type {
allocator: Allocator,
io: std.Io,
- pub fn hash(self: Self, file: fs.File, out: [][hash_size]u8, opts: struct {
+ pub fn hash(self: Self, file: Io.File, out: [][hash_size]u8, opts: struct {
chunk_size: u64 = 0x4000,
max_file_size: ?u64 = null,
}) !void {
@@ -23,7 +29,7 @@ pub fn ParallelHasher(comptime Hasher: type) type {
const buffer = try self.allocator.alloc(u8, chunk_size * out.len);
defer self.allocator.free(buffer);
- const results = try self.allocator.alloc(fs.File.PReadError!usize, out.len);
+ const results = try self.allocator.alloc(Io.File.PReadError!usize, out.len);
defer self.allocator.free(results);
{
@@ -51,11 +57,11 @@ pub fn ParallelHasher(comptime Hasher: type) type {
}
fn worker(
- file: fs.File,
+ file: Io.File,
fstart: usize,
buffer: []u8,
out: *[hash_size]u8,
- err: *fs.File.PReadError!usize,
+ err: *Io.File.PReadError!usize,
) void {
const tracy = trace(@src());
defer tracy.end();
@@ -66,11 +72,3 @@ pub fn ParallelHasher(comptime Hasher: type) type {
const Self = @This();
};
}
-
-const assert = std.debug.assert;
-const fs = std.fs;
-const mem = std.mem;
-const std = @import("std");
-const trace = @import("../../tracy.zig").trace;
-
-const Allocator = mem.Allocator;
diff --git a/src/link/MachO/uuid.zig b/src/link/MachO/uuid.zig
index d08ac0c5b8..4d8eac7523 100644
--- a/src/link/MachO/uuid.zig
+++ b/src/link/MachO/uuid.zig
@@ -1,10 +1,18 @@
+const std = @import("std");
+const Io = std.Io;
+const Md5 = std.crypto.hash.Md5;
+
+const trace = @import("../../tracy.zig").trace;
+const Compilation = @import("../../Compilation.zig");
+const Hasher = @import("hasher.zig").ParallelHasher;
+
/// Calculates Md5 hash of each chunk in parallel and then hashes all Md5 hashes to produce
/// the final digest.
/// While this is NOT a correct MD5 hash of the contents, this methodology is used by LLVM/LLD
/// and we will use it too as it seems accepted by Apple OSes.
/// TODO LLD also hashes the output filename to disambiguate between same builds with different
/// output files. Should we also do that?
-pub fn calcUuid(comp: *const Compilation, file: fs.File, file_size: u64, out: *[Md5.digest_length]u8) !void {
+pub fn calcUuid(comp: *const Compilation, file: Io.File, file_size: u64, out: *[Md5.digest_length]u8) !void {
const tracy = trace(@src());
defer tracy.end();
@@ -37,12 +45,3 @@ inline fn conform(out: *[Md5.digest_length]u8) void {
out[6] = (out[6] & 0x0F) | (3 << 4);
out[8] = (out[8] & 0x3F) | 0x80;
}
-
-const fs = std.fs;
-const mem = std.mem;
-const std = @import("std");
-const trace = @import("../../tracy.zig").trace;
-
-const Compilation = @import("../../Compilation.zig");
-const Md5 = std.crypto.hash.Md5;
-const Hasher = @import("hasher.zig").ParallelHasher;
diff --git a/src/link/MappedFile.zig b/src/link/MappedFile.zig
index 975b94578b..7d4134ccaf 100644
--- a/src/link/MappedFile.zig
+++ b/src/link/MappedFile.zig
@@ -1,3 +1,15 @@
+const MappedFile = @This();
+
+const builtin = @import("builtin");
+const is_linux = builtin.os.tag == .linux;
+const is_windows = builtin.os.tag == .windows;
+
+const std = @import("std");
+const Io = std.Io;
+const assert = std.debug.assert;
+const linux = std.os.linux;
+const windows = std.os.windows;
+
file: std.Io.File,
flags: packed struct {
block_size: std.mem.Alignment,
@@ -16,7 +28,7 @@ writers: std.SinglyLinkedList,
pub const growth_factor = 4;
-pub const Error = std.posix.MMapError || std.posix.MRemapError || std.fs.File.SetEndPosError || error{
+pub const Error = std.posix.MMapError || std.posix.MRemapError || Io.File.SetEndPosError || error{
NotFile,
SystemResources,
IsDir,
@@ -618,7 +630,7 @@ fn resizeNode(mf: *MappedFile, gpa: std.mem.Allocator, ni: Node.Index, requested
// Resize the entire file
if (ni == Node.Index.root) {
try mf.ensureCapacityForSetLocation(gpa);
- try std.fs.File.adaptFromNewApi(mf.file).setEndPos(new_size);
+ try Io.File.adaptFromNewApi(mf.file).setEndPos(new_size);
try mf.ensureTotalCapacity(@intCast(new_size));
ni.setLocationAssumeCapacity(mf, old_offset, new_size);
return;
@@ -1059,12 +1071,3 @@ fn verifyNode(mf: *MappedFile, parent_ni: Node.Index) void {
ni = node.next;
}
}
-
-const assert = std.debug.assert;
-const builtin = @import("builtin");
-const is_linux = builtin.os.tag == .linux;
-const is_windows = builtin.os.tag == .windows;
-const linux = std.os.linux;
-const MappedFile = @This();
-const std = @import("std");
-const windows = std.os.windows;
diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig
index 160e6cdcc6..7ab1e0eb4b 100644
--- a/src/link/Wasm.zig
+++ b/src/link/Wasm.zig
@@ -20,6 +20,7 @@ const native_endian = builtin.cpu.arch.endian();
const build_options = @import("build_options");
const std = @import("std");
+const Io = std.Io;
const Allocator = std.mem.Allocator;
const Cache = std.Build.Cache;
const Path = Cache.Path;
@@ -3001,9 +3002,9 @@ pub fn createEmpty(
.read = true,
.mode = if (fs.has_executable_bit)
if (target.os.tag == .wasi and output_mode == .Exe)
- fs.File.default_mode | 0b001_000_000
+ Io.File.default_mode | 0b001_000_000
else
- fs.File.default_mode
+ Io.File.default_mode
else
0,
});
diff --git a/src/link/tapi.zig b/src/link/tapi.zig
index 4c1471a6b4..fff25b7544 100644
--- a/src/link/tapi.zig
+++ b/src/link/tapi.zig
@@ -1,10 +1,10 @@
const std = @import("std");
-const fs = std.fs;
+const Io = std.Io;
const mem = std.mem;
const log = std.log.scoped(.tapi);
-const yaml = @import("tapi/yaml.zig");
+const Allocator = std.mem.Allocator;
-const Allocator = mem.Allocator;
+const yaml = @import("tapi/yaml.zig");
const Yaml = yaml.Yaml;
const VersionField = union(enum) {
@@ -130,7 +130,7 @@ pub const Tbd = union(enum) {
pub const TapiError = error{
NotLibStub,
InputOutput,
-} || yaml.YamlError || std.fs.File.PReadError;
+} || yaml.YamlError || Io.File.PReadError;
pub const LibStub = struct {
/// Underlying memory for stub's contents.
@@ -139,7 +139,7 @@ pub const LibStub = struct {
/// Typed contents of the tbd file.
inner: []Tbd,
- pub fn loadFromFile(allocator: Allocator, file: fs.File) TapiError!LibStub {
+ pub fn loadFromFile(allocator: Allocator, file: Io.File) TapiError!LibStub {
const filesize = blk: {
const stat = file.stat() catch break :blk std.math.maxInt(u32);
break :blk @min(stat.size, std.math.maxInt(u32));