From f104cfa1eb154ad51876270e10e8786b863d05f1 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 9 Jan 2023 22:37:17 -0700 Subject: compiler: add package manager skeleton see #943 --- src/Package.zig | 183 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ src/main.zig | 13 ++++ 2 files changed, 196 insertions(+) (limited to 'src') diff --git a/src/Package.zig b/src/Package.zig index df894280a9..20df256459 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -5,9 +5,11 @@ const fs = std.fs; const mem = std.mem; const Allocator = mem.Allocator; const assert = std.debug.assert; +const Hash = std.crypto.hash.sha2.Sha256; const Compilation = @import("Compilation.zig"); const Module = @import("Module.zig"); +const ThreadPool = @import("ThreadPool.zig"); pub const Table = std.StringHashMapUnmanaged(*Package); @@ -124,3 +126,184 @@ pub fn addAndAdopt(parent: *Package, gpa: Allocator, name: []const u8, child: *P child.parent = parent; return parent.add(gpa, name, child); } + +pub fn fetchAndAddDependencies( + pkg: *Package, + thread_pool: *ThreadPool, + http_client: *std.http.Client, + directory: Compilation.Directory, + global_cache_directory: Compilation.Directory, + local_cache_directory: Compilation.Directory, +) !void { + const max_bytes = 10 * 1024 * 1024; + const gpa = thread_pool.allocator; + const build_zig_ini = directory.handle.readFileAlloc(gpa, "build.zig.ini", max_bytes) catch |err| switch (err) { + error.FileNotFound => { + // Handle the same as no dependencies. + return; + }, + else => |e| return e, + }; + defer gpa.free(build_zig_ini); + + const ini: std.Ini = .{ .bytes = build_zig_ini }; + var any_error = false; + var it = ini.iterateSection("\n[dependency]\n"); + while (it.next()) |dep| { + var line_it = mem.split(u8, dep, "\n"); + var opt_id: ?[]const u8 = null; + var opt_url: ?[]const u8 = null; + var expected_hash: ?[Hash.digest_length]u8 = null; + while (line_it.next()) |kv| { + const eq_pos = mem.indexOfScalar(u8, kv, '=') orelse continue; + const key = kv[0..eq_pos]; + const value = kv[eq_pos + 1 ..]; + if (mem.eql(u8, key, "id")) { + opt_id = value; + } else if (mem.eql(u8, key, "url")) { + opt_url = value; + } else if (mem.eql(u8, key, "hash")) { + @panic("TODO parse hex digits of value into expected_hash"); + //expected_hash = value; + } else { + const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(key.ptr) - @ptrToInt(ini.bytes.ptr)); + std.log.warn("{s}/{s}:{d}:{d} unrecognized key: '{s}'", .{ + directory.path orelse ".", + "build.zig.ini", + loc.line, + loc.column, + key, + }); + } + } + + const id = opt_id orelse { + const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(dep.ptr) - @ptrToInt(ini.bytes.ptr)); + std.log.err("{s}/{s}:{d}:{d} missing key: 'id'", .{ + directory.path orelse ".", + "build.zig.ini", + loc.line, + loc.column, + }); + any_error = true; + continue; + }; + + const url = opt_url orelse { + const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(dep.ptr) - @ptrToInt(ini.bytes.ptr)); + std.log.err("{s}/{s}:{d}:{d} missing key: 'id'", .{ + directory.path orelse ".", + "build.zig.ini", + loc.line, + loc.column, + }); + any_error = true; + continue; + }; + + const sub_pkg = try fetchAndUnpack(http_client, global_cache_directory, url, expected_hash); + + try sub_pkg.fetchAndAddDependencies( + thread_pool, + http_client, + sub_pkg.root_src_directory, + global_cache_directory, + local_cache_directory, + ); + + try addAndAdopt(pkg, gpa, id, sub_pkg); + } + + if (any_error) return error.InvalidBuildZigIniFile; +} + +fn fetchAndUnpack( + http_client: *std.http.Client, + global_cache_directory: Compilation.Directory, + url: []const u8, + expected_hash: ?[Hash.digest_length]u8, +) !*Package { + const gpa = http_client.allocator; + + // TODO check if the expected_hash is already present in the global package cache, and + // thereby avoid both fetching and unpacking. + + const uri = try std.Uri.parse(url); + + var tmp_directory: Compilation.Directory = d: { + const s = fs.path.sep_str; + const rand_int = std.crypto.random.int(u64); + + const tmp_dir_sub_path = try std.fmt.allocPrint(gpa, "tmp" ++ s ++ "{x}", .{rand_int}); + + const path = try global_cache_directory.join(gpa, &.{tmp_dir_sub_path}); + errdefer gpa.free(path); + + const handle = try global_cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{}); + errdefer handle.close(); + + break :d .{ + .path = path, + .handle = handle, + }; + }; + defer tmp_directory.closeAndFree(gpa); + + var req = try http_client.request(uri, .{}, .{}); + defer req.deinit(); + + if (mem.endsWith(u8, uri.path, ".tar.gz")) { + // I observed the gzip stream to read 1 byte at a time, so I am using a + // buffered reader on the front of it. + var br = std.io.bufferedReaderSize(std.crypto.tls.max_ciphertext_record_len, req.reader()); + + var gzip_stream = try std.compress.gzip.gzipStream(gpa, br.reader()); + defer gzip_stream.deinit(); + + try std.tar.pipeToFileSystem(tmp_directory.handle, gzip_stream.reader(), .{}); + } else { + // TODO: show the build.zig.ini file and line number + std.log.err("{s}: unknown package extension for path '{s}'", .{ url, uri.path }); + return error.UnknownPackageExtension; + } + + // TODO: delete files not included in the package prior to computing the package hash. + // for example, if the ini file has directives to include/not include certain files, + // apply those rules directly to the filesystem right here. This ensures that files + // not protected by the hash are not present on the file system. + + const actual_hash = try computePackageHash(tmp_directory); + + if (expected_hash) |h| { + if (!mem.eql(u8, &h, &actual_hash)) { + // TODO: show the build.zig.ini file and line number + std.log.err("{s}: hash mismatch: expected: {s}, actual: {s}", .{ + url, h, actual_hash, + }); + return error.PackageHashMismatch; + } + } + + if (true) @panic("TODO move the tmp dir into place"); + + if (expected_hash == null) { + // TODO: show the build.zig.ini file and line number + std.log.err("{s}: missing hash:\nhash={s}", .{ + url, actual_hash, + }); + return error.PackageDependencyMissingHash; + } + + @panic("TODO create package and set root_src_directory"); + //return create(gpa, root_src + //gpa: Allocator, + ///// Null indicates the current working directory + //root_src_dir_path: ?[]const u8, + ///// Relative to root_src_dir_path + //root_src_path: []const u8, +} + +fn computePackageHash(pkg_directory: Compilation.Directory) ![Hash.digest_length]u8 { + _ = pkg_directory; + @panic("TODO computePackageHash"); +} diff --git a/src/main.zig b/src/main.zig index 007adb78ac..8741b4441c 100644 --- a/src/main.zig +++ b/src/main.zig @@ -4082,6 +4082,19 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi var thread_pool: ThreadPool = undefined; try thread_pool.init(gpa); defer thread_pool.deinit(); + + var http_client: std.http.Client = .{ .allocator = gpa }; + defer http_client.deinit(); + try http_client.rescanRootCertificates(); + + try main_pkg.fetchAndAddDependencies( + &thread_pool, + &http_client, + build_directory, + global_cache_directory, + local_cache_directory, + ); + const comp = Compilation.create(gpa, .{ .zig_lib_directory = zig_lib_directory, .local_cache_directory = local_cache_directory, -- cgit v1.2.3 From e0401498e928a539677f0f9eed843a0453bc8c33 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 10 Jan 2023 00:38:36 -0700 Subject: package manager: compute hash, move tmp dir into global cache --- src/Package.zig | 244 ++++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 195 insertions(+), 49 deletions(-) (limited to 'src') diff --git a/src/Package.zig b/src/Package.zig index 20df256459..321743412a 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -10,6 +10,7 @@ const Hash = std.crypto.hash.sha2.Sha256; const Compilation = @import("Compilation.zig"); const Module = @import("Module.zig"); const ThreadPool = @import("ThreadPool.zig"); +const WaitGroup = @import("WaitGroup.zig"); pub const Table = std.StringHashMapUnmanaged(*Package); @@ -201,7 +202,13 @@ pub fn fetchAndAddDependencies( continue; }; - const sub_pkg = try fetchAndUnpack(http_client, global_cache_directory, url, expected_hash); + const sub_pkg = try fetchAndUnpack( + thread_pool, + http_client, + global_cache_directory, + url, + expected_hash, + ); try sub_pkg.fetchAndAddDependencies( thread_pool, @@ -218,6 +225,7 @@ pub fn fetchAndAddDependencies( } fn fetchAndUnpack( + thread_pool: *ThreadPool, http_client: *std.http.Client, global_cache_directory: Compilation.Directory, url: []const u8, @@ -225,71 +233,99 @@ fn fetchAndUnpack( ) !*Package { const gpa = http_client.allocator; - // TODO check if the expected_hash is already present in the global package cache, and - // thereby avoid both fetching and unpacking. + // Check if the expected_hash is already present in the global package + // cache, and thereby avoid both fetching and unpacking. + const s = fs.path.sep_str; + if (expected_hash) |h| { + const pkg_dir_sub_path = "p" ++ s ++ hexDigest(h); + _ = pkg_dir_sub_path; + @panic("TODO check the p dir for the package"); + } const uri = try std.Uri.parse(url); - var tmp_directory: Compilation.Directory = d: { - const s = fs.path.sep_str; - const rand_int = std.crypto.random.int(u64); + const rand_int = std.crypto.random.int(u64); + const tmp_dir_sub_path = "tmp" ++ s ++ hex64(rand_int); - const tmp_dir_sub_path = try std.fmt.allocPrint(gpa, "tmp" ++ s ++ "{x}", .{rand_int}); + const actual_hash = a: { + var tmp_directory: Compilation.Directory = d: { + const path = try global_cache_directory.join(gpa, &.{tmp_dir_sub_path}); + errdefer gpa.free(path); - const path = try global_cache_directory.join(gpa, &.{tmp_dir_sub_path}); - errdefer gpa.free(path); + const iterable_dir = try global_cache_directory.handle.makeOpenPathIterable(tmp_dir_sub_path, .{}); + errdefer iterable_dir.close(); - const handle = try global_cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{}); - errdefer handle.close(); - - break :d .{ - .path = path, - .handle = handle, + break :d .{ + .path = path, + .handle = iterable_dir.dir, + }; }; - }; - defer tmp_directory.closeAndFree(gpa); + defer tmp_directory.closeAndFree(gpa); - var req = try http_client.request(uri, .{}, .{}); - defer req.deinit(); + var req = try http_client.request(uri, .{}, .{}); + defer req.deinit(); - if (mem.endsWith(u8, uri.path, ".tar.gz")) { - // I observed the gzip stream to read 1 byte at a time, so I am using a - // buffered reader on the front of it. - var br = std.io.bufferedReaderSize(std.crypto.tls.max_ciphertext_record_len, req.reader()); + if (mem.endsWith(u8, uri.path, ".tar.gz")) { + // I observed the gzip stream to read 1 byte at a time, so I am using a + // buffered reader on the front of it. + var br = std.io.bufferedReaderSize(std.crypto.tls.max_ciphertext_record_len, req.reader()); - var gzip_stream = try std.compress.gzip.gzipStream(gpa, br.reader()); - defer gzip_stream.deinit(); + var gzip_stream = try std.compress.gzip.gzipStream(gpa, br.reader()); + defer gzip_stream.deinit(); - try std.tar.pipeToFileSystem(tmp_directory.handle, gzip_stream.reader(), .{}); - } else { - // TODO: show the build.zig.ini file and line number - std.log.err("{s}: unknown package extension for path '{s}'", .{ url, uri.path }); - return error.UnknownPackageExtension; - } + try std.tar.pipeToFileSystem(tmp_directory.handle, gzip_stream.reader(), .{ + .strip_components = 1, + }); + } else { + // TODO: show the build.zig.ini file and line number + std.log.err("{s}: unknown package extension for path '{s}'", .{ url, uri.path }); + return error.UnknownPackageExtension; + } - // TODO: delete files not included in the package prior to computing the package hash. - // for example, if the ini file has directives to include/not include certain files, - // apply those rules directly to the filesystem right here. This ensures that files - // not protected by the hash are not present on the file system. + // TODO: delete files not included in the package prior to computing the package hash. + // for example, if the ini file has directives to include/not include certain files, + // apply those rules directly to the filesystem right here. This ensures that files + // not protected by the hash are not present on the file system. - const actual_hash = try computePackageHash(tmp_directory); + const actual_hash = try computePackageHash(thread_pool, .{ .dir = tmp_directory.handle }); - if (expected_hash) |h| { - if (!mem.eql(u8, &h, &actual_hash)) { - // TODO: show the build.zig.ini file and line number - std.log.err("{s}: hash mismatch: expected: {s}, actual: {s}", .{ - url, h, actual_hash, - }); - return error.PackageHashMismatch; + if (expected_hash) |h| { + if (!mem.eql(u8, &h, &actual_hash)) { + // TODO: show the build.zig.ini file and line number + std.log.err("{s}: hash mismatch: expected: {s}, actual: {s}", .{ + url, h, actual_hash, + }); + return error.PackageHashMismatch; + } } - } - if (true) @panic("TODO move the tmp dir into place"); + break :a actual_hash; + }; + + { + // Rename the temporary directory into the global package cache. + const pkg_dir_sub_path = "p" ++ s ++ hexDigest(actual_hash); + var handled_missing_dir = false; + while (true) { + global_cache_directory.handle.rename(tmp_dir_sub_path, pkg_dir_sub_path) catch |err| switch (err) { + error.FileNotFound => { + if (handled_missing_dir) return err; + global_cache_directory.handle.makeDir("p") catch |mkd_err| switch (mkd_err) { + error.PathAlreadyExists => handled_missing_dir = true, + else => |e| return e, + }; + continue; + }, + else => |e| return e, + }; + break; + } + } if (expected_hash == null) { // TODO: show the build.zig.ini file and line number std.log.err("{s}: missing hash:\nhash={s}", .{ - url, actual_hash, + url, std.fmt.fmtSliceHexLower(&actual_hash), }); return error.PackageDependencyMissingHash; } @@ -303,7 +339,117 @@ fn fetchAndUnpack( //root_src_path: []const u8, } -fn computePackageHash(pkg_directory: Compilation.Directory) ![Hash.digest_length]u8 { - _ = pkg_directory; - @panic("TODO computePackageHash"); +const HashedFile = struct { + path: []const u8, + hash: [Hash.digest_length]u8, + failure: Error!void, + + const Error = fs.File.OpenError || fs.File.ReadError; + + fn lessThan(context: void, lhs: *const HashedFile, rhs: *const HashedFile) bool { + _ = context; + return mem.lessThan(u8, lhs.path, rhs.path); + } +}; + +fn computePackageHash( + thread_pool: *ThreadPool, + pkg_dir: fs.IterableDir, +) ![Hash.digest_length]u8 { + const gpa = thread_pool.allocator; + + // We'll use an arena allocator for the path name strings since they all + // need to be in memory for sorting. + var arena_instance = std.heap.ArenaAllocator.init(gpa); + defer arena_instance.deinit(); + const arena = arena_instance.allocator(); + + // Collect all files, recursively, then sort. + var all_files = std.ArrayList(*HashedFile).init(gpa); + defer all_files.deinit(); + + var walker = try pkg_dir.walk(gpa); + defer walker.deinit(); + + { + // The final hash will be a hash of each file hashed independently. This + // allows hashing in parallel. + var wait_group: WaitGroup = .{}; + defer wait_group.wait(); + + while (try walker.next()) |entry| { + switch (entry.kind) { + .Directory => continue, + .File => {}, + else => return error.IllegalFileTypeInPackage, + } + const hashed_file = try arena.create(HashedFile); + hashed_file.* = .{ + .path = try arena.dupe(u8, entry.path), + .hash = undefined, // to be populated by the worker + .failure = undefined, // to be populated by the worker + }; + + wait_group.start(); + try thread_pool.spawn(workerHashFile, .{ pkg_dir.dir, hashed_file, &wait_group }); + } + } + + std.sort.sort(*HashedFile, all_files.items, {}, HashedFile.lessThan); + + var hasher = Hash.init(.{}); + var any_failures = false; + for (all_files.items) |hashed_file| { + hashed_file.failure catch |err| { + any_failures = true; + std.log.err("unable to hash '{s}': {s}", .{ hashed_file.path, @errorName(err) }); + }; + hasher.update(&hashed_file.hash); + } + if (any_failures) return error.PackageHashUnavailable; + return hasher.finalResult(); +} + +fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile, wg: *WaitGroup) void { + defer wg.finish(); + hashed_file.failure = hashFileFallible(dir, hashed_file); +} + +fn hashFileFallible(dir: fs.Dir, hashed_file: *HashedFile) HashedFile.Error!void { + var buf: [8000]u8 = undefined; + var file = try dir.openFile(hashed_file.path, .{}); + var hasher = Hash.init(.{}); + while (true) { + const bytes_read = try file.read(&buf); + if (bytes_read == 0) break; + hasher.update(buf[0..bytes_read]); + } + hasher.final(&hashed_file.hash); +} + +const hex_charset = "0123456789abcdef"; + +fn hex64(x: u64) [16]u8 { + var result: [16]u8 = undefined; + var i: usize = 0; + while (i < 8) : (i += 1) { + const byte = @truncate(u8, x >> @intCast(u6, 8 * i)); + result[i * 2 + 0] = hex_charset[byte >> 4]; + result[i * 2 + 1] = hex_charset[byte & 15]; + } + return result; +} + +test hex64 { + const s = "[" ++ hex64(0x12345678_abcdef00) ++ "]"; + try std.testing.expectEqualStrings("[00efcdab78563412]", s); +} + +fn hexDigest(digest: [Hash.digest_length]u8) [Hash.digest_length * 2]u8 { + var result: [Hash.digest_length * 2]u8 = undefined; + for (digest) |byte, i| { + result[i * 2 + 0] = hex_charset[byte >> 4]; + result[i * 2 + 1] = hex_charset[byte & 15]; + } + return result; } -- cgit v1.2.3 From 876ab99f5c462e93296ef0b2f642ac2243acb31c Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 10 Jan 2023 11:15:09 -0700 Subject: disable package manager code when bootstrapping This makes building from source go faster and avoids tripping over unimplemented things in the C backend. --- build.zig | 1 + src/main.zig | 24 +++++++++++++----------- stage1/config.zig.in | 1 + 3 files changed, 15 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/build.zig b/build.zig index f37b44aa96..71741a0136 100644 --- a/build.zig +++ b/build.zig @@ -185,6 +185,7 @@ pub fn build(b: *Builder) !void { exe_options.addOption(bool, "llvm_has_arc", llvm_has_arc); exe_options.addOption(bool, "force_gpa", force_gpa); exe_options.addOption(bool, "only_c", only_c); + exe_options.addOption(bool, "omit_pkg_fetching_code", false); if (link_libc) { exe.linkLibC(); diff --git a/src/main.zig b/src/main.zig index 8741b4441c..d0052b518b 100644 --- a/src/main.zig +++ b/src/main.zig @@ -4083,17 +4083,19 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi try thread_pool.init(gpa); defer thread_pool.deinit(); - var http_client: std.http.Client = .{ .allocator = gpa }; - defer http_client.deinit(); - try http_client.rescanRootCertificates(); - - try main_pkg.fetchAndAddDependencies( - &thread_pool, - &http_client, - build_directory, - global_cache_directory, - local_cache_directory, - ); + if (!build_options.omit_pkg_fetching_code) { + var http_client: std.http.Client = .{ .allocator = gpa }; + defer http_client.deinit(); + try http_client.rescanRootCertificates(); + + try main_pkg.fetchAndAddDependencies( + &thread_pool, + &http_client, + build_directory, + global_cache_directory, + local_cache_directory, + ); + } const comp = Compilation.create(gpa, .{ .zig_lib_directory = zig_lib_directory, diff --git a/stage1/config.zig.in b/stage1/config.zig.in index 68d09f159b..ab55defd70 100644 --- a/stage1/config.zig.in +++ b/stage1/config.zig.in @@ -12,3 +12,4 @@ pub const have_stage1 = false; pub const skip_non_native = false; pub const only_c = false; pub const force_gpa = false; +pub const omit_pkg_fetching_code = true; -- cgit v1.2.3 From a0f2e6a29f4d5c084a248d24b25fae9f30707001 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 10 Jan 2023 16:26:27 -0700 Subject: Package: complete the package-fetching logic --- src/Package.zig | 145 ++++++++++++++++++++++++++++++++++++++++---------------- src/main.zig | 7 ++- 2 files changed, 109 insertions(+), 43 deletions(-) (limited to 'src') diff --git a/src/Package.zig b/src/Package.zig index 321743412a..c3a7f645a6 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -6,6 +6,7 @@ const mem = std.mem; const Allocator = mem.Allocator; const assert = std.debug.assert; const Hash = std.crypto.hash.sha2.Sha256; +const log = std.log.scoped(.package); const Compilation = @import("Compilation.zig"); const Module = @import("Module.zig"); @@ -128,6 +129,9 @@ pub fn addAndAdopt(parent: *Package, gpa: Allocator, name: []const u8, child: *P return parent.add(gpa, name, child); } +pub const build_zig_basename = "build.zig"; +pub const ini_basename = build_zig_basename ++ ".ini"; + pub fn fetchAndAddDependencies( pkg: *Package, thread_pool: *ThreadPool, @@ -138,7 +142,7 @@ pub fn fetchAndAddDependencies( ) !void { const max_bytes = 10 * 1024 * 1024; const gpa = thread_pool.allocator; - const build_zig_ini = directory.handle.readFileAlloc(gpa, "build.zig.ini", max_bytes) catch |err| switch (err) { + const build_zig_ini = directory.handle.readFileAlloc(gpa, ini_basename, max_bytes) catch |err| switch (err) { error.FileNotFound => { // Handle the same as no dependencies. return; @@ -154,7 +158,7 @@ pub fn fetchAndAddDependencies( var line_it = mem.split(u8, dep, "\n"); var opt_id: ?[]const u8 = null; var opt_url: ?[]const u8 = null; - var expected_hash: ?[Hash.digest_length]u8 = null; + var expected_hash: ?[]const u8 = null; while (line_it.next()) |kv| { const eq_pos = mem.indexOfScalar(u8, kv, '=') orelse continue; const key = kv[0..eq_pos]; @@ -164,8 +168,7 @@ pub fn fetchAndAddDependencies( } else if (mem.eql(u8, key, "url")) { opt_url = value; } else if (mem.eql(u8, key, "hash")) { - @panic("TODO parse hex digits of value into expected_hash"); - //expected_hash = value; + expected_hash = value; } else { const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(key.ptr) - @ptrToInt(ini.bytes.ptr)); std.log.warn("{s}/{s}:{d}:{d} unrecognized key: '{s}'", .{ @@ -208,6 +211,8 @@ pub fn fetchAndAddDependencies( global_cache_directory, url, expected_hash, + ini, + directory, ); try sub_pkg.fetchAndAddDependencies( @@ -229,17 +234,48 @@ fn fetchAndUnpack( http_client: *std.http.Client, global_cache_directory: Compilation.Directory, url: []const u8, - expected_hash: ?[Hash.digest_length]u8, + expected_hash: ?[]const u8, + ini: std.Ini, + comp_directory: Compilation.Directory, ) !*Package { const gpa = http_client.allocator; + const s = fs.path.sep_str; // Check if the expected_hash is already present in the global package // cache, and thereby avoid both fetching and unpacking. - const s = fs.path.sep_str; - if (expected_hash) |h| { - const pkg_dir_sub_path = "p" ++ s ++ hexDigest(h); - _ = pkg_dir_sub_path; - @panic("TODO check the p dir for the package"); + if (expected_hash) |h| cached: { + if (h.len != 2 * Hash.digest_length) { + return reportError( + ini, + comp_directory, + h.ptr, + "wrong hash size. expected: {d}, found: {d}", + .{ Hash.digest_length, h.len }, + ); + } + const hex_digest = h[0 .. 2 * Hash.digest_length]; + const pkg_dir_sub_path = "p" ++ s ++ hex_digest; + var pkg_dir = global_cache_directory.handle.openDir(pkg_dir_sub_path, .{}) catch |err| switch (err) { + error.FileNotFound => break :cached, + else => |e| return e, + }; + errdefer pkg_dir.close(); + + const ptr = try gpa.create(Package); + errdefer gpa.destroy(ptr); + + const owned_src_path = try gpa.dupe(u8, build_zig_basename); + errdefer gpa.free(owned_src_path); + + ptr.* = .{ + .root_src_directory = .{ + .path = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path}), + .handle = pkg_dir, + }, + .root_src_directory_owned = true, + .root_src_path = owned_src_path, + }; + return ptr; } const uri = try std.Uri.parse(url); @@ -277,9 +313,13 @@ fn fetchAndUnpack( .strip_components = 1, }); } else { - // TODO: show the build.zig.ini file and line number - std.log.err("{s}: unknown package extension for path '{s}'", .{ url, uri.path }); - return error.UnknownPackageExtension; + return reportError( + ini, + comp_directory, + uri.path.ptr, + "unknown file extension for path '{s}'", + .{uri.path}, + ); } // TODO: delete files not included in the package prior to computing the package hash. @@ -287,24 +327,13 @@ fn fetchAndUnpack( // apply those rules directly to the filesystem right here. This ensures that files // not protected by the hash are not present on the file system. - const actual_hash = try computePackageHash(thread_pool, .{ .dir = tmp_directory.handle }); - - if (expected_hash) |h| { - if (!mem.eql(u8, &h, &actual_hash)) { - // TODO: show the build.zig.ini file and line number - std.log.err("{s}: hash mismatch: expected: {s}, actual: {s}", .{ - url, h, actual_hash, - }); - return error.PackageHashMismatch; - } - } - - break :a actual_hash; + break :a try computePackageHash(thread_pool, .{ .dir = tmp_directory.handle }); }; + const pkg_dir_sub_path = "p" ++ s ++ hexDigest(actual_hash); + { // Rename the temporary directory into the global package cache. - const pkg_dir_sub_path = "p" ++ s ++ hexDigest(actual_hash); var handled_missing_dir = false; while (true) { global_cache_directory.handle.rename(tmp_dir_sub_path, pkg_dir_sub_path) catch |err| switch (err) { @@ -316,27 +345,60 @@ fn fetchAndUnpack( }; continue; }, + error.PathAlreadyExists => { + // Package has been already downloaded and may already be in use on the system. + global_cache_directory.handle.deleteTree(tmp_dir_sub_path) catch |del_err| { + std.log.warn("unable to delete temp directory: {s}", .{@errorName(del_err)}); + }; + }, else => |e| return e, }; break; } } - if (expected_hash == null) { - // TODO: show the build.zig.ini file and line number - std.log.err("{s}: missing hash:\nhash={s}", .{ - url, std.fmt.fmtSliceHexLower(&actual_hash), - }); - return error.PackageDependencyMissingHash; + if (expected_hash) |h| { + const actual_hex = hexDigest(actual_hash); + if (!mem.eql(u8, h, &actual_hex)) { + return reportError( + ini, + comp_directory, + h.ptr, + "hash mismatch: expected: {s}, found: {s}", + .{ h, actual_hex }, + ); + } + } else { + return reportError( + ini, + comp_directory, + url.ptr, + "url field is missing corresponding hash field: hash={s}", + .{std.fmt.fmtSliceHexLower(&actual_hash)}, + ); } - @panic("TODO create package and set root_src_directory"); - //return create(gpa, root_src - //gpa: Allocator, - ///// Null indicates the current working directory - //root_src_dir_path: ?[]const u8, - ///// Relative to root_src_dir_path - //root_src_path: []const u8, + return createWithDir(gpa, global_cache_directory, pkg_dir_sub_path, build_zig_basename); +} + +fn reportError( + ini: std.Ini, + comp_directory: Compilation.Directory, + src_ptr: [*]const u8, + comptime fmt_string: []const u8, + fmt_args: anytype, +) error{PackageFetchFailed} { + const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(src_ptr) - @ptrToInt(ini.bytes.ptr)); + if (comp_directory.path) |p| { + std.debug.print("{s}{c}{s}:{d}:{d}: error: " ++ fmt_string ++ "\n", .{ + p, fs.path.sep, ini_basename, loc.line + 1, loc.column + 1, + } ++ fmt_args); + } else { + std.debug.print("{s}:{d}:{d}: error: " ++ fmt_string ++ "\n", .{ + ini_basename, loc.line + 1, loc.column + 1, + } ++ fmt_args); + } + return error.PackageFetchFailed; } const HashedFile = struct { @@ -389,9 +451,10 @@ fn computePackageHash( .hash = undefined, // to be populated by the worker .failure = undefined, // to be populated by the worker }; - wait_group.start(); try thread_pool.spawn(workerHashFile, .{ pkg_dir.dir, hashed_file, &wait_group }); + + try all_files.append(hashed_file); } } diff --git a/src/main.zig b/src/main.zig index d0052b518b..739f8093e1 100644 --- a/src/main.zig +++ b/src/main.zig @@ -4088,13 +4088,16 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi defer http_client.deinit(); try http_client.rescanRootCertificates(); - try main_pkg.fetchAndAddDependencies( + main_pkg.fetchAndAddDependencies( &thread_pool, &http_client, build_directory, global_cache_directory, local_cache_directory, - ); + ) catch |err| switch (err) { + error.PackageFetchFailed => process.exit(1), + else => |e| return e, + }; } const comp = Compilation.create(gpa, .{ -- cgit v1.2.3 From cfcf9771c1bde357ad64d81cda9d61ba72d80b15 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 10 Jan 2023 20:21:58 -0700 Subject: zig build: support dependencies The `zig build` command now makes `@import("@dependencies")` available to the build runner package. It contains all the dependencies in a generated file that looks something like this: ```zig pub const imports = struct { pub const foo = @import("foo"); pub const @"bar.baz" = @import("bar.baz"); }; pub const build_root = struct { pub const foo = ""; pub const @"bar.baz" = ""; }; ``` The build runner exports this import so that `std.build.Builder` can access it. `std.build.Builder` uses it to implement the new `dependency` function which can be used like so: ```zig const libz_dep = b.dependency("libz", .{}); const libmp3lame_dep = b.dependency("libmp3lame", .{}); // ... lib.linkLibrary(libz_dep.artifact("z")); lib.linkLibrary(libmp3lame_dep.artifact("mp3lame")); ``` The `dependency` function calls the build.zig file of the dependency as a child Builder, and then can be ransacked for its build steps via the `artifact` function. This commit also renames `dependency.id` to `dependency.name` in the `build.zig.ini` file. --- lib/build_runner.zig | 14 ++--- lib/std/build.zig | 154 ++++++++++++++++++++++++++++++++++++++++++++++++++- src/Package.zig | 133 ++++++++++++++++++++++++++++++++------------ src/main.zig | 53 ++++++++++++++---- 4 files changed, 297 insertions(+), 57 deletions(-) (limited to 'src') diff --git a/lib/build_runner.zig b/lib/build_runner.zig index 735ddb9de1..f27542d0f5 100644 --- a/lib/build_runner.zig +++ b/lib/build_runner.zig @@ -9,6 +9,8 @@ const process = std.process; const ArrayList = std.ArrayList; const File = std.fs.File; +pub const dependencies = @import("@dependencies"); + pub fn main() !void { // Here we use an ArenaAllocator backed by a DirectAllocator because a build is a short-lived, // one shot program. We don't need to waste time freeing memory and finding places to squish @@ -207,7 +209,7 @@ pub fn main() !void { builder.debug_log_scopes = debug_log_scopes.items; builder.resolveInstallPrefix(install_prefix, dir_list); - try runBuild(builder); + try builder.runBuild(root); if (builder.validateUserInputDidItFail()) return usageAndErr(builder, true, stderr_stream); @@ -223,19 +225,11 @@ pub fn main() !void { }; } -fn runBuild(builder: *Builder) anyerror!void { - switch (@typeInfo(@typeInfo(@TypeOf(root.build)).Fn.return_type.?)) { - .Void => root.build(builder), - .ErrorUnion => try root.build(builder), - else => @compileError("expected return type of build to be 'void' or '!void'"), - } -} - fn usage(builder: *Builder, already_ran_build: bool, out_stream: anytype) !void { // run the build script to collect the options if (!already_ran_build) { builder.resolveInstallPrefix(null, .{}); - try runBuild(builder); + try builder.runBuild(root); } try out_stream.print( diff --git a/lib/std/build.zig b/lib/std/build.zig index 7ce8ae2d10..43ec1eea20 100644 --- a/lib/std/build.zig +++ b/lib/std/build.zig @@ -69,13 +69,15 @@ pub const Builder = struct { search_prefixes: ArrayList([]const u8), libc_file: ?[]const u8 = null, installed_files: ArrayList(InstalledFile), + /// Path to the directory containing build.zig. build_root: []const u8, cache_root: []const u8, global_cache_root: []const u8, release_mode: ?std.builtin.Mode, is_release: bool, + /// zig lib dir override_lib_dir: ?[]const u8, - vcpkg_root: VcpkgRoot, + vcpkg_root: VcpkgRoot = .unattempted, pkg_config_pkg_list: ?(PkgConfigError![]const PkgConfigPkg) = null, args: ?[][]const u8 = null, debug_log_scopes: []const []const u8 = &.{}, @@ -100,6 +102,8 @@ pub const Builder = struct { /// Information about the native target. Computed before build() is invoked. host: NativeTargetInfo, + dep_prefix: []const u8 = "", + pub const ExecError = error{ ReadFailure, ExitCodeFailure, @@ -223,7 +227,6 @@ pub const Builder = struct { .is_release = false, .override_lib_dir = null, .install_path = undefined, - .vcpkg_root = VcpkgRoot{ .unattempted = {} }, .args = null, .host = host, }; @@ -233,6 +236,89 @@ pub const Builder = struct { return self; } + fn createChild( + parent: *Builder, + dep_name: []const u8, + build_root: []const u8, + args: anytype, + ) !*Builder { + const child = try createChildOnly(parent, dep_name, build_root); + try applyArgs(child, args); + return child; + } + + fn createChildOnly(parent: *Builder, dep_name: []const u8, build_root: []const u8) !*Builder { + const allocator = parent.allocator; + const child = try allocator.create(Builder); + child.* = .{ + .allocator = allocator, + .install_tls = .{ + .step = Step.initNoOp(.top_level, "install", allocator), + .description = "Copy build artifacts to prefix path", + }, + .uninstall_tls = .{ + .step = Step.init(.top_level, "uninstall", allocator, makeUninstall), + .description = "Remove build artifacts from prefix path", + }, + .user_input_options = UserInputOptionsMap.init(allocator), + .available_options_map = AvailableOptionsMap.init(allocator), + .available_options_list = ArrayList(AvailableOption).init(allocator), + .verbose = parent.verbose, + .verbose_link = parent.verbose_link, + .verbose_cc = parent.verbose_cc, + .verbose_air = parent.verbose_air, + .verbose_llvm_ir = parent.verbose_llvm_ir, + .verbose_cimport = parent.verbose_cimport, + .verbose_llvm_cpu_features = parent.verbose_llvm_cpu_features, + .prominent_compile_errors = parent.prominent_compile_errors, + .color = parent.color, + .reference_trace = parent.reference_trace, + .invalid_user_input = false, + .zig_exe = parent.zig_exe, + .default_step = undefined, + .env_map = parent.env_map, + .top_level_steps = ArrayList(*TopLevelStep).init(allocator), + .install_prefix = undefined, + .dest_dir = parent.dest_dir, + .lib_dir = parent.lib_dir, + .exe_dir = parent.exe_dir, + .h_dir = parent.h_dir, + .install_path = parent.install_path, + .sysroot = parent.sysroot, + .search_prefixes = ArrayList([]const u8).init(allocator), + .libc_file = parent.libc_file, + .installed_files = ArrayList(InstalledFile).init(allocator), + .build_root = build_root, + .cache_root = parent.cache_root, + .global_cache_root = parent.global_cache_root, + .release_mode = parent.release_mode, + .is_release = parent.is_release, + .override_lib_dir = parent.override_lib_dir, + .debug_log_scopes = parent.debug_log_scopes, + .debug_compile_errors = parent.debug_compile_errors, + .enable_darling = parent.enable_darling, + .enable_qemu = parent.enable_qemu, + .enable_rosetta = parent.enable_rosetta, + .enable_wasmtime = parent.enable_wasmtime, + .enable_wine = parent.enable_wine, + .glibc_runtimes_dir = parent.glibc_runtimes_dir, + .host = parent.host, + .dep_prefix = parent.fmt("{s}{s}.", .{ parent.dep_prefix, dep_name }), + }; + try child.top_level_steps.append(&child.install_tls); + try child.top_level_steps.append(&child.uninstall_tls); + child.default_step = &child.install_tls.step; + return child; + } + + pub fn applyArgs(b: *Builder, args: anytype) !void { + // TODO this function is the way that a build.zig file communicates + // options to its dependencies. It is the programmatic way to give + // command line arguments to a build.zig script. + _ = b; + _ = args; + } + pub fn destroy(self: *Builder) void { self.env_map.deinit(); self.top_level_steps.deinit(); @@ -1300,6 +1386,70 @@ pub const Builder = struct { &[_][]const u8{ base_dir, dest_rel_path }, ) catch unreachable; } + + pub const Dependency = struct { + builder: *Builder, + + pub fn artifact(d: *Dependency, name: []const u8) *LibExeObjStep { + var found: ?*LibExeObjStep = null; + for (d.builder.install_tls.step.dependencies.items) |dep_step| { + const inst = dep_step.cast(InstallArtifactStep) orelse continue; + if (mem.eql(u8, inst.artifact.name, name)) { + if (found != null) panic("artifact name '{s}' is ambiguous", .{name}); + found = inst.artifact; + } + } + return found orelse { + for (d.builder.install_tls.step.dependencies.items) |dep_step| { + const inst = dep_step.cast(InstallArtifactStep) orelse continue; + log.info("available artifact: '{s}'", .{inst.artifact.name}); + } + panic("unable to find artifact '{s}'", .{name}); + }; + } + }; + + pub fn dependency(b: *Builder, name: []const u8, args: anytype) *Dependency { + const build_runner = @import("root"); + const deps = build_runner.dependencies; + + inline for (@typeInfo(deps.imports).Struct.decls) |decl| { + if (mem.startsWith(u8, decl.name, b.dep_prefix) and + mem.endsWith(u8, decl.name, name) and + decl.name.len == b.dep_prefix.len + name.len) + { + const build_zig = @field(deps.imports, decl.name); + const build_root = @field(deps.build_root, decl.name); + return dependencyInner(b, name, build_root, build_zig, args); + } + } + + const full_path = b.pathFromRoot("build.zig.ini"); + std.debug.print("no dependency named '{s}' in '{s}'\n", .{ name, full_path }); + std.process.exit(1); + } + + fn dependencyInner( + b: *Builder, + name: []const u8, + build_root: []const u8, + comptime build_zig: type, + args: anytype, + ) *Dependency { + const sub_builder = b.createChild(name, build_root, args) catch unreachable; + sub_builder.runBuild(build_zig) catch unreachable; + const dep = b.allocator.create(Dependency) catch unreachable; + dep.* = .{ .builder = sub_builder }; + return dep; + } + + pub fn runBuild(b: *Builder, build_zig: anytype) anyerror!void { + switch (@typeInfo(@typeInfo(@TypeOf(build_zig.build)).Fn.return_type.?)) { + .Void => build_zig.build(b), + .ErrorUnion => try build_zig.build(b), + else => @compileError("expected return type of build to be 'void' or '!void'"), + } + } }; test "builder.findProgram compiles" { diff --git a/src/Package.zig b/src/Package.zig index c3a7f645a6..02baba4ca7 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -12,6 +12,8 @@ const Compilation = @import("Compilation.zig"); const Module = @import("Module.zig"); const ThreadPool = @import("ThreadPool.zig"); const WaitGroup = @import("WaitGroup.zig"); +const Cache = @import("Cache.zig"); +const build_options = @import("build_options"); pub const Table = std.StringHashMapUnmanaged(*Package); @@ -139,6 +141,9 @@ pub fn fetchAndAddDependencies( directory: Compilation.Directory, global_cache_directory: Compilation.Directory, local_cache_directory: Compilation.Directory, + dependencies_source: *std.ArrayList(u8), + build_roots_source: *std.ArrayList(u8), + name_prefix: []const u8, ) !void { const max_bytes = 10 * 1024 * 1024; const gpa = thread_pool.allocator; @@ -156,15 +161,15 @@ pub fn fetchAndAddDependencies( var it = ini.iterateSection("\n[dependency]\n"); while (it.next()) |dep| { var line_it = mem.split(u8, dep, "\n"); - var opt_id: ?[]const u8 = null; + var opt_name: ?[]const u8 = null; var opt_url: ?[]const u8 = null; var expected_hash: ?[]const u8 = null; while (line_it.next()) |kv| { const eq_pos = mem.indexOfScalar(u8, kv, '=') orelse continue; const key = kv[0..eq_pos]; const value = kv[eq_pos + 1 ..]; - if (mem.eql(u8, key, "id")) { - opt_id = value; + if (mem.eql(u8, key, "name")) { + opt_name = value; } else if (mem.eql(u8, key, "url")) { opt_url = value; } else if (mem.eql(u8, key, "hash")) { @@ -181,9 +186,9 @@ pub fn fetchAndAddDependencies( } } - const id = opt_id orelse { + const name = opt_name orelse { const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(dep.ptr) - @ptrToInt(ini.bytes.ptr)); - std.log.err("{s}/{s}:{d}:{d} missing key: 'id'", .{ + std.log.err("{s}/{s}:{d}:{d} missing key: 'name'", .{ directory.path orelse ".", "build.zig.ini", loc.line, @@ -195,7 +200,7 @@ pub fn fetchAndAddDependencies( const url = opt_url orelse { const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(dep.ptr) - @ptrToInt(ini.bytes.ptr)); - std.log.err("{s}/{s}:{d}:{d} missing key: 'id'", .{ + std.log.err("{s}/{s}:{d}:{d} missing key: 'name'", .{ directory.path orelse ".", "build.zig.ini", loc.line, @@ -205,6 +210,10 @@ pub fn fetchAndAddDependencies( continue; }; + const sub_prefix = try std.fmt.allocPrint(gpa, "{s}{s}.", .{ name_prefix, name }); + defer gpa.free(sub_prefix); + const fqn = sub_prefix[0 .. sub_prefix.len - 1]; + const sub_pkg = try fetchAndUnpack( thread_pool, http_client, @@ -213,22 +222,56 @@ pub fn fetchAndAddDependencies( expected_hash, ini, directory, + build_roots_source, + fqn, ); - try sub_pkg.fetchAndAddDependencies( + try pkg.fetchAndAddDependencies( thread_pool, http_client, sub_pkg.root_src_directory, global_cache_directory, local_cache_directory, + dependencies_source, + build_roots_source, + sub_prefix, ); - try addAndAdopt(pkg, gpa, id, sub_pkg); + try addAndAdopt(pkg, gpa, fqn, sub_pkg); + + try dependencies_source.writer().print(" pub const {s} = @import(\"{}\");\n", .{ + std.zig.fmtId(fqn), std.zig.fmtEscapes(fqn), + }); } if (any_error) return error.InvalidBuildZigIniFile; } +pub fn createFilePkg( + gpa: Allocator, + global_cache_directory: Compilation.Directory, + basename: []const u8, + contents: []const u8, +) !*Package { + const rand_int = std.crypto.random.int(u64); + const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++ hex64(rand_int); + { + var tmp_dir = try global_cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{}); + defer tmp_dir.close(); + try tmp_dir.writeFile(basename, contents); + } + + var hh: Cache.HashHelper = .{}; + hh.addBytes(build_options.version); + hh.addBytes(contents); + const hex_digest = hh.final(); + + const o_dir_sub_path = "o" ++ fs.path.sep_str ++ hex_digest; + try renameTmpIntoCache(global_cache_directory.handle, tmp_dir_sub_path, o_dir_sub_path); + + return createWithDir(gpa, global_cache_directory, o_dir_sub_path, basename); +} + fn fetchAndUnpack( thread_pool: *ThreadPool, http_client: *std.http.Client, @@ -237,6 +280,8 @@ fn fetchAndUnpack( expected_hash: ?[]const u8, ini: std.Ini, comp_directory: Compilation.Directory, + build_roots_source: *std.ArrayList(u8), + fqn: []const u8, ) !*Package { const gpa = http_client.allocator; const s = fs.path.sep_str; @@ -267,14 +312,22 @@ fn fetchAndUnpack( const owned_src_path = try gpa.dupe(u8, build_zig_basename); errdefer gpa.free(owned_src_path); + const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path}); + errdefer gpa.free(build_root); + + try build_roots_source.writer().print(" pub const {s} = \"{}\";\n", .{ + std.zig.fmtId(fqn), std.zig.fmtEscapes(build_root), + }); + ptr.* = .{ .root_src_directory = .{ - .path = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path}), + .path = build_root, .handle = pkg_dir, }, .root_src_directory_owned = true, .root_src_path = owned_src_path, }; + return ptr; } @@ -331,31 +384,7 @@ fn fetchAndUnpack( }; const pkg_dir_sub_path = "p" ++ s ++ hexDigest(actual_hash); - - { - // Rename the temporary directory into the global package cache. - var handled_missing_dir = false; - while (true) { - global_cache_directory.handle.rename(tmp_dir_sub_path, pkg_dir_sub_path) catch |err| switch (err) { - error.FileNotFound => { - if (handled_missing_dir) return err; - global_cache_directory.handle.makeDir("p") catch |mkd_err| switch (mkd_err) { - error.PathAlreadyExists => handled_missing_dir = true, - else => |e| return e, - }; - continue; - }, - error.PathAlreadyExists => { - // Package has been already downloaded and may already be in use on the system. - global_cache_directory.handle.deleteTree(tmp_dir_sub_path) catch |del_err| { - std.log.warn("unable to delete temp directory: {s}", .{@errorName(del_err)}); - }; - }, - else => |e| return e, - }; - break; - } - } + try renameTmpIntoCache(global_cache_directory.handle, tmp_dir_sub_path, pkg_dir_sub_path); if (expected_hash) |h| { const actual_hex = hexDigest(actual_hash); @@ -378,6 +407,13 @@ fn fetchAndUnpack( ); } + const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path}); + defer gpa.free(build_root); + + try build_roots_source.writer().print(" pub const {s} = \"{}\";\n", .{ + std.zig.fmtId(fqn), std.zig.fmtEscapes(build_root), + }); + return createWithDir(gpa, global_cache_directory, pkg_dir_sub_path, build_zig_basename); } @@ -516,3 +552,32 @@ fn hexDigest(digest: [Hash.digest_length]u8) [Hash.digest_length * 2]u8 { } return result; } + +fn renameTmpIntoCache( + cache_dir: fs.Dir, + tmp_dir_sub_path: []const u8, + dest_dir_sub_path: []const u8, +) !void { + assert(dest_dir_sub_path[1] == '/'); + var handled_missing_dir = false; + while (true) { + cache_dir.rename(tmp_dir_sub_path, dest_dir_sub_path) catch |err| switch (err) { + error.FileNotFound => { + if (handled_missing_dir) return err; + cache_dir.makeDir(dest_dir_sub_path[0..1]) catch |mkd_err| switch (mkd_err) { + error.PathAlreadyExists => handled_missing_dir = true, + else => |e| return e, + }; + continue; + }, + error.PathAlreadyExists => { + // Package has been already downloaded and may already be in use on the system. + cache_dir.deleteTree(tmp_dir_sub_path) catch |del_err| { + std.log.warn("unable to delete temp directory: {s}", .{@errorName(del_err)}); + }; + }, + else => |e| return e, + }; + break; + } +} diff --git a/src/main.zig b/src/main.zig index 739f8093e1..976ea26064 100644 --- a/src/main.zig +++ b/src/main.zig @@ -3983,11 +3983,6 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi }; defer zig_lib_directory.handle.close(); - var main_pkg: Package = .{ - .root_src_directory = zig_lib_directory, - .root_src_path = "build_runner.zig", - }; - var cleanup_build_dir: ?fs.Dir = null; defer if (cleanup_build_dir) |*dir| dir.close(); @@ -4031,12 +4026,6 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi }; child_argv.items[argv_index_build_file] = build_directory.path orelse cwd_path; - var build_pkg: Package = .{ - .root_src_directory = build_directory, - .root_src_path = build_zig_basename, - }; - try main_pkg.addAndAdopt(arena, "@build", &build_pkg); - var global_cache_directory: Compilation.Directory = l: { const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena); break :l .{ @@ -4083,23 +4072,65 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi try thread_pool.init(gpa); defer thread_pool.deinit(); + var main_pkg: Package = .{ + .root_src_directory = zig_lib_directory, + .root_src_path = "build_runner.zig", + }; + if (!build_options.omit_pkg_fetching_code) { var http_client: std.http.Client = .{ .allocator = gpa }; defer http_client.deinit(); try http_client.rescanRootCertificates(); + // Here we provide an import to the build runner that allows using reflection to find + // all of the dependencies. Without this, there would be no way to use `@import` to + // access dependencies by name, since `@import` requires string literals. + var dependencies_source = std.ArrayList(u8).init(gpa); + defer dependencies_source.deinit(); + try dependencies_source.appendSlice("pub const imports = struct {\n"); + + // This will go into the same package. It contains the file system paths + // to all the build.zig files. + var build_roots_source = std.ArrayList(u8).init(gpa); + defer build_roots_source.deinit(); + + // Here we borrow main package's table and will replace it with a fresh + // one after this process completes. main_pkg.fetchAndAddDependencies( &thread_pool, &http_client, build_directory, global_cache_directory, local_cache_directory, + &dependencies_source, + &build_roots_source, + "", ) catch |err| switch (err) { error.PackageFetchFailed => process.exit(1), else => |e| return e, }; + + try dependencies_source.appendSlice("};\npub const build_root = struct {\n"); + try dependencies_source.appendSlice(build_roots_source.items); + try dependencies_source.appendSlice("};\n"); + + const deps_pkg = try Package.createFilePkg( + gpa, + global_cache_directory, + "dependencies.zig", + dependencies_source.items, + ); + + mem.swap(Package.Table, &main_pkg.table, &deps_pkg.table); + try main_pkg.addAndAdopt(gpa, "@dependencies", deps_pkg); } + var build_pkg: Package = .{ + .root_src_directory = build_directory, + .root_src_path = build_zig_basename, + }; + try main_pkg.addAndAdopt(gpa, "@build", &build_pkg); + const comp = Compilation.create(gpa, .{ .zig_lib_directory = zig_lib_directory, .local_cache_directory = local_cache_directory, -- cgit v1.2.3 From 2de08633800cd458fa657afed25a821236520cd8 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 11 Jan 2023 17:06:10 -0800 Subject: use local cache dir for dependencies-listing package --- src/Package.zig | 10 +++++----- src/main.zig | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) (limited to 'src') diff --git a/src/Package.zig b/src/Package.zig index 02baba4ca7..26be399c18 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -249,14 +249,14 @@ pub fn fetchAndAddDependencies( pub fn createFilePkg( gpa: Allocator, - global_cache_directory: Compilation.Directory, + cache_directory: Compilation.Directory, basename: []const u8, contents: []const u8, ) !*Package { const rand_int = std.crypto.random.int(u64); const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++ hex64(rand_int); { - var tmp_dir = try global_cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{}); + var tmp_dir = try cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{}); defer tmp_dir.close(); try tmp_dir.writeFile(basename, contents); } @@ -267,9 +267,9 @@ pub fn createFilePkg( const hex_digest = hh.final(); const o_dir_sub_path = "o" ++ fs.path.sep_str ++ hex_digest; - try renameTmpIntoCache(global_cache_directory.handle, tmp_dir_sub_path, o_dir_sub_path); + try renameTmpIntoCache(cache_directory.handle, tmp_dir_sub_path, o_dir_sub_path); - return createWithDir(gpa, global_cache_directory, o_dir_sub_path, basename); + return createWithDir(gpa, cache_directory, o_dir_sub_path, basename); } fn fetchAndUnpack( @@ -558,7 +558,7 @@ fn renameTmpIntoCache( tmp_dir_sub_path: []const u8, dest_dir_sub_path: []const u8, ) !void { - assert(dest_dir_sub_path[1] == '/'); + assert(dest_dir_sub_path[1] == fs.path.sep); var handled_missing_dir = false; while (true) { cache_dir.rename(tmp_dir_sub_path, dest_dir_sub_path) catch |err| switch (err) { diff --git a/src/main.zig b/src/main.zig index 976ea26064..f203bad968 100644 --- a/src/main.zig +++ b/src/main.zig @@ -4116,7 +4116,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi const deps_pkg = try Package.createFilePkg( gpa, - global_cache_directory, + local_cache_directory, "dependencies.zig", dependencies_source.items, ); -- cgit v1.2.3 From f4d6b37068db7ef3b5828dbe2403e65bf64a0f2c Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 11 Jan 2023 17:54:34 -0800 Subject: Package: handle Windows PathAlreadyExists error code Unfortunately, error.AccessDenied is ambiguous on Windows when it is returned from fs.rename. --- lib/std/os.zig | 3 +++ src/Package.zig | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/lib/std/os.zig b/lib/std/os.zig index ffc294f0e6..22cdf30351 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -2414,6 +2414,9 @@ pub fn unlinkatW(dirfd: fd_t, sub_path_w: []const u16, flags: u32) UnlinkatError pub const RenameError = error{ /// In WASI, this error may occur when the file descriptor does /// not hold the required rights to rename a resource by path relative to it. + /// + /// On Windows, this error may be returned instead of PathAlreadyExists when + /// renaming a directory over an existing directory. AccessDenied, FileBusy, DiskQuota, diff --git a/src/Package.zig b/src/Package.zig index 26be399c18..23a0549aa7 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -570,7 +570,7 @@ fn renameTmpIntoCache( }; continue; }, - error.PathAlreadyExists => { + error.PathAlreadyExists, error.AccessDenied => { // Package has been already downloaded and may already be in use on the system. cache_dir.deleteTree(tmp_dir_sub_path) catch |del_err| { std.log.warn("unable to delete temp directory: {s}", .{@errorName(del_err)}); -- cgit v1.2.3