diff options
| author | Jakub Konka <kubkon@jakubkonka.com> | 2023-06-19 12:53:26 +0200 |
|---|---|---|
| committer | Jakub Konka <kubkon@jakubkonka.com> | 2023-06-19 12:53:26 +0200 |
| commit | 8087c134dbeaa2925948597883d6a401f251a716 (patch) | |
| tree | 507644a52f144c677652e89f8eb3a9daff729b3b /src | |
| parent | c2554cf0f17668659d0b898fcb43b3efb8694d3a (diff) | |
| download | zig-8087c134dbeaa2925948597883d6a401f251a716.tar.gz zig-8087c134dbeaa2925948597883d6a401f251a716.zip | |
macho: calculate UUID chunk size based on available thread count
Diffstat (limited to 'src')
| -rw-r--r-- | src/link/MachO/uuid.zig | 5 |
1 files changed, 2 insertions, 3 deletions
diff --git a/src/link/MachO/uuid.zig b/src/link/MachO/uuid.zig index 486bf43b99..4c1b6a17ee 100644 --- a/src/link/MachO/uuid.zig +++ b/src/link/MachO/uuid.zig @@ -7,9 +7,6 @@ const Compilation = @import("../../Compilation.zig"); const Md5 = std.crypto.hash.Md5; const Hasher = @import("hasher.zig").ParallelHasher; -/// Somewhat random chunk size for MD5 hash calculation. -pub const chunk_size = 0x4000; - /// Calculates Md5 hash of each chunk in parallel and then hashes all Md5 hashes to produce /// the final digest. /// While this is NOT a correct MD5 hash of the contents, this methodology is used by LLVM/LLD @@ -17,6 +14,8 @@ pub const chunk_size = 0x4000; /// TODO LLD also hashes the output filename to disambiguate between same builds with different /// output files. Should we also do that? pub fn calcUuid(comp: *const Compilation, file: fs.File, file_size: u64, out: *[Md5.digest_length]u8) !void { + const num_chunks = @intCast(u64, comp.thread_pool.threads.len) * 10; + const chunk_size = @divTrunc(file_size + num_chunks - 1, num_chunks); const total_hashes = mem.alignForward(u64, file_size, chunk_size) / chunk_size; const hashes = try comp.gpa.alloc([Md5.digest_length]u8, total_hashes); |
