aboutsummaryrefslogtreecommitdiff
path: root/src/Package/Fetch.zig
diff options
context:
space:
mode:
authorRue <78876133+IOKG04@users.noreply.github.com>2025-07-28 14:54:52 +0200
committerGitHub <noreply@github.com>2025-07-28 14:54:52 +0200
commit5381e7891dcdd7b6a9e74250cdcce221fe464cdc (patch)
tree4c74744ed84120dccae6dc9811ce945911108a17 /src/Package/Fetch.zig
parent84ae54fbe64a15301317716e7f901d81585332d5 (diff)
parentdea3ed7f59347e87a1b8fa237202873988084ae8 (diff)
downloadzig-5381e7891dcdd7b6a9e74250cdcce221fe464cdc.tar.gz
zig-5381e7891dcdd7b6a9e74250cdcce221fe464cdc.zip
Merge branch 'ziglang:master' into some-documentation-updates-0
Diffstat (limited to 'src/Package/Fetch.zig')
-rw-r--r--src/Package/Fetch.zig36
1 files changed, 22 insertions, 14 deletions
diff --git a/src/Package/Fetch.zig b/src/Package/Fetch.zig
index a97b60a17c..6ad0030c17 100644
--- a/src/Package/Fetch.zig
+++ b/src/Package/Fetch.zig
@@ -1197,12 +1197,18 @@ fn unpackResource(
};
switch (file_type) {
- .tar => return try unpackTarball(f, tmp_directory.handle, resource.reader()),
+ .tar => {
+ var adapter_buffer: [1024]u8 = undefined;
+ var adapter = resource.reader().adaptToNewApi(&adapter_buffer);
+ return unpackTarball(f, tmp_directory.handle, &adapter.new_interface);
+ },
.@"tar.gz" => {
const reader = resource.reader();
var br = std.io.bufferedReaderSize(std.crypto.tls.max_ciphertext_record_len, reader);
var dcp = std.compress.gzip.decompressor(br.reader());
- return try unpackTarball(f, tmp_directory.handle, dcp.reader());
+ var adapter_buffer: [1024]u8 = undefined;
+ var adapter = dcp.reader().adaptToNewApi(&adapter_buffer);
+ return try unpackTarball(f, tmp_directory.handle, &adapter.new_interface);
},
.@"tar.xz" => {
const gpa = f.arena.child_allocator;
@@ -1215,17 +1221,19 @@ fn unpackResource(
));
};
defer dcp.deinit();
- return try unpackTarball(f, tmp_directory.handle, dcp.reader());
+ var adapter_buffer: [1024]u8 = undefined;
+ var adapter = dcp.reader().adaptToNewApi(&adapter_buffer);
+ return try unpackTarball(f, tmp_directory.handle, &adapter.new_interface);
},
.@"tar.zst" => {
- const window_size = std.compress.zstd.DecompressorOptions.default_window_buffer_len;
+ const window_size = std.compress.zstd.default_window_len;
const window_buffer = try f.arena.allocator().create([window_size]u8);
- const reader = resource.reader();
- var br = std.io.bufferedReaderSize(std.crypto.tls.max_ciphertext_record_len, reader);
- var dcp = std.compress.zstd.decompressor(br.reader(), .{
- .window_buffer = window_buffer,
+ var adapter_buffer: [std.crypto.tls.max_ciphertext_record_len]u8 = undefined;
+ var adapter = resource.reader().adaptToNewApi(&adapter_buffer);
+ var decompress: std.compress.zstd.Decompress = .init(&adapter.new_interface, window_buffer, .{
+ .verify_checksum = false,
});
- return try unpackTarball(f, tmp_directory.handle, dcp.reader());
+ return try unpackTarball(f, tmp_directory.handle, &decompress.reader);
},
.git_pack => return unpackGitPack(f, tmp_directory.handle, &resource.git) catch |err| switch (err) {
error.FetchFailed => return error.FetchFailed,
@@ -1239,7 +1247,7 @@ fn unpackResource(
}
}
-fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!UnpackResult {
+fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: *std.Io.Reader) RunError!UnpackResult {
const eb = &f.error_bundle;
const arena = f.arena.allocator();
@@ -1250,10 +1258,10 @@ fn unpackTarball(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!UnpackRes
.strip_components = 0,
.mode_mode = .ignore,
.exclude_empty_directories = true,
- }) catch |err| return f.fail(f.location_tok, try eb.printString(
- "unable to unpack tarball to temporary directory: {s}",
- .{@errorName(err)},
- ));
+ }) catch |err| return f.fail(
+ f.location_tok,
+ try eb.printString("unable to unpack tarball to temporary directory: {t}", .{err}),
+ );
var res: UnpackResult = .{ .root_dir = diagnostics.root_dir };
if (diagnostics.errors.items.len > 0) {