aboutsummaryrefslogtreecommitdiff
path: root/src/Package
diff options
context:
space:
mode:
authorRue <78876133+IOKG04@users.noreply.github.com>2025-07-28 14:54:52 +0200
committerGitHub <noreply@github.com>2025-07-28 14:54:52 +0200
commit5381e7891dcdd7b6a9e74250cdcce221fe464cdc (patch)
tree4c74744ed84120dccae6dc9811ce945911108a17 /src/Package
parent84ae54fbe64a15301317716e7f901d81585332d5 (diff)
parentdea3ed7f59347e87a1b8fa237202873988084ae8 (diff)
downloadzig-5381e7891dcdd7b6a9e74250cdcce221fe464cdc.tar.gz
zig-5381e7891dcdd7b6a9e74250cdcce221fe464cdc.zip
Merge branch 'ziglang:master' into some-documentation-updates-0
Diffstat (limited to 'src/Package')
-rw-r--r--src/Package/Fetch.zig36
-rw-r--r--src/Package/Fetch/git.zig65
-rw-r--r--src/Package/Module.zig2
3 files changed, 83 insertions, 20 deletions
diff --git a/src/Package/Fetch.zig b/src/Package/Fetch.zig
index a97b60a17c..6ad0030c17 100644
--- a/src/Package/Fetch.zig
+++ b/src/Package/Fetch.zig
@@ -1197,12 +1197,18 @@ fn unpackResource(
};
switch (file_type) {
- .tar => return try unpackTarball(f, tmp_directory.handle, resource.reader()),
+ .tar => {
+ var adapter_buffer: [1024]u8 = undefined;
+ var adapter = resource.reader().adaptToNewApi(&adapter_buffer);
+ return unpackTarball(f, tmp_directory.handle, &adapter.new_interface);
+ },
.@"tar.gz" => {
const reader = resource.reader();
var br = std.io.bufferedReaderSize(std.crypto.tls.max_ciphertext_record_len, reader);
var dcp = std.compress.gzip.decompressor(br.reader());
- return try unpackTarball(f, tmp_directory.handle, dcp.reader());
+ var adapter_buffer: [1024]u8 = undefined;
+ var adapter = dcp.reader().adaptToNewApi(&adapter_buffer);
+ return try unpackTarball(f, tmp_directory.handle, &adapter.new_interface);
},
.@"tar.xz" => {
const gpa = f.arena.child_allocator;
@@ -1215,17 +1221,19 @@ fn unpackResource(
));
};
defer dcp.deinit();
- return try unpackTarball(f, tmp_directory.handle, dcp.reader());
+ var adapter_buffer: [1024]u8 = undefined;
+ var adapter = dcp.reader().adaptToNewApi(&adapter_buffer);
+ return try unpackTarball(f, tmp_directory.handle, &adapter.new_interface);
},
.@"tar.zst" => {
- const window_size = std.compress.zstd.DecompressorOptions.default_window_buffer_len;
+ const window_size = std.compress.zstd.default_window_len;
const window_buffer = try f.arena.allocator().create([window_size]u8);
- const reader = resource.reader();
- var br = std.io.bufferedReaderSize(std.crypto.tls.max_ciphertext_record_len, reader);
- var dcp = std.compress.zstd.decompressor(br.reader(), .{
- .window_buffer = window_buffer,
+ var adapter_buffer: [std.crypto.tls.max_ciphertext_record_len]u8 = undefined;
+ var adapter = resource.reader().adaptToNewApi(&adapter_buffer);
+ var decompress: std.compress.zstd.Decompress = .init(&adapter.new_interface, window_buffer, .{
+ .verify_checksum = false,
});
- return try unpackTarball(f, tmp_directory.handle, dcp.reader());
+ return try unpackTarball(f, tmp_directory.handle, &decompress.reader);
},
.git_pack => return unpackGitPack(f, tmp_directory.handle, &resource.git) catch |err| switch (err) {
error.FetchFailed => return error.FetchFailed,
@@ -1239,7 +1247,7 @@ fn unpackResource(
}
}
-fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!UnpackResult {
+fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: *std.Io.Reader) RunError!UnpackResult {
const eb = &f.error_bundle;
const arena = f.arena.allocator();
@@ -1250,10 +1258,10 @@ fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!UnpackRes
.strip_components = 0,
.mode_mode = .ignore,
.exclude_empty_directories = true,
- }) catch |err| return f.fail(f.location_tok, try eb.printString(
- "unable to unpack tarball to temporary directory: {s}",
- .{@errorName(err)},
- ));
+ }) catch |err| return f.fail(
+ f.location_tok,
+ try eb.printString("unable to unpack tarball to temporary directory: {t}", .{err}),
+ );
var res: UnpackResult = .{ .root_dir = diagnostics.root_dir };
if (diagnostics.errors.items.len > 0) {
diff --git a/src/Package/Fetch/git.zig b/src/Package/Fetch/git.zig
index 4d2dae904f..a8446d48a8 100644
--- a/src/Package/Fetch/git.zig
+++ b/src/Package/Fetch/git.zig
@@ -1281,7 +1281,7 @@ pub fn indexPack(allocator: Allocator, format: Oid.Format, pack: std.fs.File, in
}
@memset(fan_out_table[fan_out_index..], count);
- var index_hashed_writer = std.compress.hashedWriter(index_writer, Oid.Hasher.init(format));
+ var index_hashed_writer = hashedWriter(index_writer, Oid.Hasher.init(format));
const writer = index_hashed_writer.writer();
try writer.writeAll(IndexHeader.signature);
try writer.writeInt(u32, IndexHeader.supported_version, .big);
@@ -1331,7 +1331,7 @@ fn indexPackFirstPass(
) !Oid {
var pack_buffered_reader = std.io.bufferedReader(pack.deprecatedReader());
var pack_counting_reader = std.io.countingReader(pack_buffered_reader.reader());
- var pack_hashed_reader = std.compress.hashedReader(pack_counting_reader.reader(), Oid.Hasher.init(format));
+ var pack_hashed_reader = hashedReader(pack_counting_reader.reader(), Oid.Hasher.init(format));
const pack_reader = pack_hashed_reader.reader();
const pack_header = try PackHeader.read(pack_reader);
@@ -1339,13 +1339,13 @@ fn indexPackFirstPass(
var current_entry: u32 = 0;
while (current_entry < pack_header.total_objects) : (current_entry += 1) {
const entry_offset = pack_counting_reader.bytes_read;
- var entry_crc32_reader = std.compress.hashedReader(pack_reader, std.hash.Crc32.init());
+ var entry_crc32_reader = hashedReader(pack_reader, std.hash.Crc32.init());
const entry_header = try EntryHeader.read(format, entry_crc32_reader.reader());
switch (entry_header) {
.commit, .tree, .blob, .tag => |object| {
var entry_decompress_stream = std.compress.zlib.decompressor(entry_crc32_reader.reader());
var entry_counting_reader = std.io.countingReader(entry_decompress_stream.reader());
- var entry_hashed_writer = std.compress.hashedWriter(std.io.null_writer, Oid.Hasher.init(format));
+ var entry_hashed_writer = hashedWriter(std.io.null_writer, Oid.Hasher.init(format));
const entry_writer = entry_hashed_writer.writer();
// The object header is not included in the pack data but is
// part of the object's ID
@@ -1432,7 +1432,7 @@ fn indexPackHashDelta(
const base_data = try resolveDeltaChain(allocator, format, pack, base_object, delta_offsets.items, cache);
var entry_hasher: Oid.Hasher = .init(format);
- var entry_hashed_writer = std.compress.hashedWriter(std.io.null_writer, &entry_hasher);
+ var entry_hashed_writer = hashedWriter(std.io.null_writer, &entry_hasher);
try entry_hashed_writer.writer().print("{s} {}\x00", .{ @tagName(base_object.type), base_data.len });
entry_hasher.update(base_data);
return entry_hasher.finalResult();
@@ -1703,3 +1703,58 @@ pub fn main() !void {
std.debug.print("Diagnostic: {}\n", .{err});
}
}
+
+/// Deprecated
+fn hashedReader(reader: anytype, hasher: anytype) HashedReader(@TypeOf(reader), @TypeOf(hasher)) {
+ return .{ .child_reader = reader, .hasher = hasher };
+}
+
+/// Deprecated
+fn HashedReader(ReaderType: type, HasherType: type) type {
+ return struct {
+ child_reader: ReaderType,
+ hasher: HasherType,
+
+ pub const Error = ReaderType.Error;
+ pub const Reader = std.io.GenericReader(*@This(), Error, read);
+
+ pub fn read(self: *@This(), buf: []u8) Error!usize {
+ const amt = try self.child_reader.read(buf);
+ self.hasher.update(buf[0..amt]);
+ return amt;
+ }
+
+ pub fn reader(self: *@This()) Reader {
+ return .{ .context = self };
+ }
+ };
+}
+
+/// Deprecated
+pub fn HashedWriter(WriterType: type, HasherType: type) type {
+ return struct {
+ child_writer: WriterType,
+ hasher: HasherType,
+
+ pub const Error = WriterType.Error;
+ pub const Writer = std.io.GenericWriter(*@This(), Error, write);
+
+ pub fn write(self: *@This(), buf: []const u8) Error!usize {
+ const amt = try self.child_writer.write(buf);
+ self.hasher.update(buf[0..amt]);
+ return amt;
+ }
+
+ pub fn writer(self: *@This()) Writer {
+ return .{ .context = self };
+ }
+ };
+}
+
+/// Deprecated
+pub fn hashedWriter(
+ writer: anytype,
+ hasher: anytype,
+) HashedWriter(@TypeOf(writer), @TypeOf(hasher)) {
+ return .{ .child_writer = writer, .hasher = hasher };
+}
diff --git a/src/Package/Module.zig b/src/Package/Module.zig
index d829b397ba..1c941f51f4 100644
--- a/src/Package/Module.zig
+++ b/src/Package/Module.zig
@@ -250,7 +250,7 @@ pub fn create(arena: Allocator, options: CreateOptions) !*Package.Module {
};
const stack_check = b: {
- if (!target_util.supportsStackProbing(target)) {
+ if (!target_util.supportsStackProbing(target, zig_backend)) {
if (options.inherited.stack_check == true)
return error.StackCheckUnsupportedByTarget;
break :b false;