aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2025-12-08 14:17:52 -0800
committerAndrew Kelley <andrew@ziglang.org>2025-12-23 22:15:08 -0800
commit9f4d40b1f9bffc4137055b8a07f042ecfa398124 (patch)
treebe2a20f7e4f0d09a955c654e6627d147836954da /src
parent264d714321d3e5f1f189af393e1fb24d101a7e91 (diff)
downloadzig-9f4d40b1f9bffc4137055b8a07f042ecfa398124.tar.gz
zig-9f4d40b1f9bffc4137055b8a07f042ecfa398124.zip
update all stat() to stat(io)
Diffstat (limited to 'src')
-rw-r--r--src/Compilation.zig2
-rw-r--r--src/Zcu.zig2
-rw-r--r--src/Zcu/PerThread.zig4
-rw-r--r--src/fmt.zig6
-rw-r--r--src/link.zig6
-rw-r--r--src/link/Elf.zig5
-rw-r--r--src/link/Elf/Archive.zig45
-rw-r--r--src/link/Elf/Object.zig7
-rw-r--r--src/link/MachO.zig3
-rw-r--r--src/link/MachO/Archive.zig3
-rw-r--r--src/link/MachO/Object.zig4
-rw-r--r--src/link/MachO/relocatable.zig2
-rw-r--r--src/link/tapi.zig4
13 files changed, 52 insertions, 41 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig
index ef01fbb07c..3a48705880 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -5367,7 +5367,7 @@ fn docsCopyModule(
});
};
defer file.close(io);
- const stat = try file.stat();
+ const stat = try file.stat(io);
var file_reader: Io.File.Reader = .initSize(file, io, &buffer, stat.size);
archiver.writeFileTimestamp(entry.path, &file_reader, stat.mtime) catch |err| {
diff --git a/src/Zcu.zig b/src/Zcu.zig
index d2634a8962..1f2fe89236 100644
--- a/src/Zcu.zig
+++ b/src/Zcu.zig
@@ -1080,7 +1080,7 @@ pub const File = struct {
};
defer f.close(io);
- const stat = f.stat() catch |err| switch (err) {
+ const stat = f.stat(io) catch |err| switch (err) {
error.Streaming => {
// Since `file.stat` is populated, this was previously a file stream; since it is
// now not a file stream, it must have changed.
diff --git a/src/Zcu/PerThread.zig b/src/Zcu/PerThread.zig
index 9a75b2096e..74196705c3 100644
--- a/src/Zcu/PerThread.zig
+++ b/src/Zcu/PerThread.zig
@@ -98,7 +98,7 @@ pub fn updateFile(
};
defer source_file.close(io);
- const stat = try source_file.stat();
+ const stat = try source_file.stat(io);
const want_local_cache = switch (file.path.root) {
.none, .local_cache => true,
@@ -2470,7 +2470,7 @@ fn updateEmbedFileInner(
};
defer file.close(io);
- const stat: Cache.File.Stat = .fromFs(try file.stat());
+ const stat: Cache.File.Stat = .fromFs(try file.stat(io));
if (ef.val != .none) {
const old_stat = ef.stat;
diff --git a/src/fmt.zig b/src/fmt.zig
index dc67619d54..1a1e5298e2 100644
--- a/src/fmt.zig
+++ b/src/fmt.zig
@@ -188,7 +188,7 @@ pub fn run(gpa: Allocator, arena: Allocator, io: Io, args: []const []const u8) !
error.IsDir => dir: {
var dir = try Io.Dir.cwd().openDir(io, file_path, .{});
defer dir.close(io);
- break :dir try dir.stat();
+ break :dir try dir.stat(io);
},
else => |e| return e,
};
@@ -227,7 +227,7 @@ fn fmtPathDir(
var dir = try parent_dir.openDir(io, parent_sub_path, .{ .iterate = true });
defer dir.close(io);
- const stat = try dir.stat();
+ const stat = try dir.stat(io);
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
var dir_it = dir.iterate();
@@ -266,7 +266,7 @@ fn fmtPathFile(
var file_closed = false;
errdefer if (!file_closed) source_file.close(io);
- const stat = try source_file.stat();
+ const stat = try source_file.stat(io);
if (stat.kind == .directory)
return error.IsDir;
diff --git a/src/link.zig b/src/link.zig
index 073ec632c6..06d18ec2d5 100644
--- a/src/link.zig
+++ b/src/link.zig
@@ -1112,10 +1112,10 @@ pub const File = struct {
fn loadGnuLdScript(base: *File, path: Path, parent_query: UnresolvedInput.Query, file: Io.File) anyerror!void {
const comp = base.comp;
+ const io = comp.io;
const diags = &comp.link_diags;
const gpa = comp.gpa;
- const io = comp.io;
- const stat = try file.stat();
+ const stat = try file.stat(io);
const size = std.math.cast(u32, stat.size) orelse return error.FileTooBig;
const buf = try gpa.alloc(u8, size);
defer gpa.free(buf);
@@ -2180,7 +2180,7 @@ fn resolvePathInputLib(
// Appears to be an ELF or archive file.
return finishResolveLibInput(resolved_inputs, test_path, file, link_mode, pq.query);
}
- const stat = file.stat() catch |err|
+ const stat = file.stat(io) catch |err|
fatal("failed to stat {f}: {s}", .{ test_path, @errorName(err) });
const size = std.math.cast(u32, stat.size) orelse
fatal("{f}: linker script too big", .{test_path});
diff --git a/src/link/Elf.zig b/src/link/Elf.zig
index 53812a37ec..13a624a295 100644
--- a/src/link/Elf.zig
+++ b/src/link/Elf.zig
@@ -742,7 +742,7 @@ pub fn loadInput(self: *Elf, input: link.Input) !void {
.dso_exact => @panic("TODO"),
.object => |obj| try parseObject(self, obj),
.archive => |obj| try parseArchive(gpa, diags, &self.file_handles, &self.files, target, debug_fmt_strip, default_sym_version, &self.objects, obj, is_static_lib),
- .dso => |dso| try parseDso(gpa, diags, dso, &self.shared_objects, &self.files, target),
+ .dso => |dso| try parseDso(gpa, io, diags, dso, &self.shared_objects, &self.files, target),
}
}
@@ -1136,6 +1136,7 @@ fn parseArchive(
fn parseDso(
gpa: Allocator,
+ io: Io,
diags: *Diags,
dso: link.Input.Dso,
shared_objects: *std.StringArrayHashMapUnmanaged(File.Index),
@@ -1147,7 +1148,7 @@ fn parseDso(
const handle = dso.file;
- const stat = Stat.fromFs(try handle.stat());
+ const stat = Stat.fromFs(try handle.stat(io));
var header = try SharedObject.parseHeader(gpa, diags, dso.path, handle, stat, target);
defer header.deinit(gpa);
diff --git a/src/link/Elf/Archive.zig b/src/link/Elf/Archive.zig
index a9961bf8f9..0f786c1a47 100644
--- a/src/link/Elf/Archive.zig
+++ b/src/link/Elf/Archive.zig
@@ -1,3 +1,21 @@
+const Archive = @This();
+
+const std = @import("std");
+const Io = std.Io;
+const assert = std.debug.assert;
+const elf = std.elf;
+const fs = std.fs;
+const log = std.log.scoped(.link);
+const mem = std.mem;
+const Path = std.Build.Cache.Path;
+const Allocator = std.mem.Allocator;
+
+const Diags = @import("../../link.zig").Diags;
+const Elf = @import("../Elf.zig");
+const File = @import("file.zig").File;
+const Object = @import("Object.zig");
+const StringTable = @import("../StringTable.zig");
+
objects: []const Object,
/// '\n'-delimited
strtab: []const u8,
@@ -10,6 +28,7 @@ pub fn deinit(a: *Archive, gpa: Allocator) void {
pub fn parse(
gpa: Allocator,
+ io: Io,
diags: *Diags,
file_handles: *const std.ArrayList(File.Handle),
path: Path,
@@ -25,7 +44,7 @@ pub fn parse(
pos += magic_buffer.len;
}
- const size = (try handle.stat()).size;
+ const size = (try handle.stat(io)).size;
var objects: std.ArrayList(Object) = .empty;
defer objects.deinit(gpa);
@@ -120,7 +139,7 @@ pub fn setArHdr(opts: struct {
@memset(mem.asBytes(&hdr), 0x20);
{
- var writer: std.Io.Writer = .fixed(&hdr.ar_name);
+ var writer: Io.Writer = .fixed(&hdr.ar_name);
switch (opts.name) {
.symtab => writer.print("{s}", .{elf.SYM64NAME}) catch unreachable,
.strtab => writer.print("//", .{}) catch unreachable,
@@ -133,7 +152,7 @@ pub fn setArHdr(opts: struct {
hdr.ar_gid[0] = '0';
hdr.ar_mode[0] = '0';
{
- var writer: std.Io.Writer = .fixed(&hdr.ar_size);
+ var writer: Io.Writer = .fixed(&hdr.ar_size);
writer.print("{d}", .{opts.size}) catch unreachable;
}
hdr.ar_fmag = elf.ARFMAG.*;
@@ -206,7 +225,7 @@ pub const ArSymtab = struct {
ar: ArSymtab,
elf_file: *Elf,
- fn default(f: Format, writer: *std.Io.Writer) std.Io.Writer.Error!void {
+ fn default(f: Format, writer: *Io.Writer) Io.Writer.Error!void {
const ar = f.ar;
const elf_file = f.elf_file;
for (ar.symtab.items, 0..) |entry, i| {
@@ -261,7 +280,7 @@ pub const ArStrtab = struct {
try writer.writeAll(ar.buffer.items);
}
- pub fn format(ar: ArStrtab, writer: *std.Io.Writer) std.Io.Writer.Error!void {
+ pub fn format(ar: ArStrtab, writer: *Io.Writer) Io.Writer.Error!void {
try writer.print("{f}", .{std.ascii.hexEscape(ar.buffer.items, .lower)});
}
};
@@ -277,19 +296,3 @@ pub const ArState = struct {
/// Total size of the contributing object (excludes ar_hdr).
size: u64 = 0,
};
-
-const std = @import("std");
-const assert = std.debug.assert;
-const elf = std.elf;
-const fs = std.fs;
-const log = std.log.scoped(.link);
-const mem = std.mem;
-const Path = std.Build.Cache.Path;
-const Allocator = std.mem.Allocator;
-
-const Diags = @import("../../link.zig").Diags;
-const Archive = @This();
-const Elf = @import("../Elf.zig");
-const File = @import("file.zig").File;
-const Object = @import("Object.zig");
-const StringTable = @import("../StringTable.zig");
diff --git a/src/link/Elf/Object.zig b/src/link/Elf/Object.zig
index c0dde4176a..7dacfb3a62 100644
--- a/src/link/Elf/Object.zig
+++ b/src/link/Elf/Object.zig
@@ -122,13 +122,14 @@ pub fn parse(
pub fn parseCommon(
self: *Object,
gpa: Allocator,
+ io: Io,
diags: *Diags,
path: Path,
handle: Io.File,
target: *const std.Target,
) !void {
const offset = if (self.archive) |ar| ar.offset else 0;
- const file_size = (try handle.stat()).size;
+ const file_size = (try handle.stat(io)).size;
const header_buffer = try Elf.preadAllAlloc(gpa, handle, offset, @sizeOf(elf.Elf64_Ehdr));
defer gpa.free(header_buffer);
@@ -1122,9 +1123,11 @@ pub fn updateArSymtab(self: Object, ar_symtab: *Archive.ArSymtab, elf_file: *Elf
}
pub fn updateArSize(self: *Object, elf_file: *Elf) !void {
+ const comp = elf_file.base.comp;
+ const io = comp.io;
self.output_ar_state.size = if (self.archive) |ar| ar.size else size: {
const handle = elf_file.fileHandle(self.file_handle);
- break :size (try handle.stat()).size;
+ break :size (try handle.stat(io)).size;
};
}
diff --git a/src/link/MachO.zig b/src/link/MachO.zig
index 0f6127e10e..78e035e2ad 100644
--- a/src/link/MachO.zig
+++ b/src/link/MachO.zig
@@ -925,6 +925,7 @@ fn addObject(self: *MachO, path: Path, handle_index: File.HandleIndex, offset: u
const comp = self.base.comp;
const gpa = comp.gpa;
+ const io = comp.io;
const abs_path = try std.fs.path.resolvePosix(gpa, &.{
comp.dirs.cwd,
@@ -934,7 +935,7 @@ fn addObject(self: *MachO, path: Path, handle_index: File.HandleIndex, offset: u
errdefer gpa.free(abs_path);
const file = self.getFileHandle(handle_index);
- const stat = try file.stat();
+ const stat = try file.stat(io);
const mtime = stat.mtime.toSeconds();
const index: File.Index = @intCast(try self.files.addOne(gpa));
self.files.set(index, .{ .object = .{
diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig
index d1962412c4..122a408533 100644
--- a/src/link/MachO/Archive.zig
+++ b/src/link/MachO/Archive.zig
@@ -6,6 +6,7 @@ pub fn deinit(self: *Archive, allocator: Allocator) void {
pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File.HandleIndex, fat_arch: ?fat.Arch) !void {
const comp = macho_file.base.comp;
+ const io = comp.io;
const gpa = comp.gpa;
const diags = &comp.link_diags;
@@ -14,7 +15,7 @@ pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File
const handle = macho_file.getFileHandle(handle_index);
const offset = if (fat_arch) |ar| ar.offset else 0;
- const end_pos = if (fat_arch) |ar| offset + ar.size else (try handle.stat()).size;
+ const end_pos = if (fat_arch) |ar| offset + ar.size else (try handle.stat(io)).size;
var pos: usize = offset + SARMAG;
while (true) {
diff --git a/src/link/MachO/Object.zig b/src/link/MachO/Object.zig
index 5fc77fe763..1a1799f551 100644
--- a/src/link/MachO/Object.zig
+++ b/src/link/MachO/Object.zig
@@ -1689,9 +1689,11 @@ pub fn updateArSymtab(self: Object, ar_symtab: *Archive.ArSymtab, macho_file: *M
}
pub fn updateArSize(self: *Object, macho_file: *MachO) !void {
+ const comp = macho_file.base.comp;
+ const io = comp.io;
self.output_ar_state.size = if (self.in_archive) |ar| ar.size else size: {
const file = macho_file.getFileHandle(self.file_handle);
- break :size (try file.stat()).size;
+ break :size (try file.stat(io)).size;
};
}
diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig
index 0f42442640..e9f78a8ef2 100644
--- a/src/link/MachO/relocatable.zig
+++ b/src/link/MachO/relocatable.zig
@@ -22,7 +22,7 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Pat
const path = positionals.items[0].path().?;
const in_file = path.root_dir.handle.openFile(io, path.sub_path, .{}) catch |err|
return diags.fail("failed to open {f}: {s}", .{ path, @errorName(err) });
- const stat = in_file.stat() catch |err|
+ const stat = in_file.stat(io) catch |err|
return diags.fail("failed to stat {f}: {s}", .{ path, @errorName(err) });
const amt = in_file.copyRangeAll(0, macho_file.base.file.?, 0, stat.size) catch |err|
return diags.fail("failed to copy range of file {f}: {s}", .{ path, @errorName(err) });
diff --git a/src/link/tapi.zig b/src/link/tapi.zig
index fff25b7544..046f43eae2 100644
--- a/src/link/tapi.zig
+++ b/src/link/tapi.zig
@@ -139,9 +139,9 @@ pub const LibStub = struct {
/// Typed contents of the tbd file.
inner: []Tbd,
- pub fn loadFromFile(allocator: Allocator, file: Io.File) TapiError!LibStub {
+ pub fn loadFromFile(allocator: Allocator, io: Io, file: Io.File) TapiError!LibStub {
const filesize = blk: {
- const stat = file.stat() catch break :blk std.math.maxInt(u32);
+ const stat = file.stat(io) catch break :blk std.math.maxInt(u32);
break :blk @min(stat.size, std.math.maxInt(u32));
};
const source = try allocator.alloc(u8, filesize);