aboutsummaryrefslogtreecommitdiff
path: root/src/link/MachO/CodeSignature.zig
diff options
context:
space:
mode:
authorJakub Konka <kubkon@jakubkonka.com>2022-12-14 15:15:20 +0100
committerJakub Konka <kubkon@jakubkonka.com>2022-12-16 00:01:04 +0100
commit79457fc76a61695560e6314246b0a8c21a7e2d2c (patch)
tree7a35a25f880c057e27098b240f8fac7e42b842eb /src/link/MachO/CodeSignature.zig
parentec40c6b28fb1612f401db3c43b68aba670327e2e (diff)
downloadzig-79457fc76a61695560e6314246b0a8c21a7e2d2c.tar.gz
zig-79457fc76a61695560e6314246b0a8c21a7e2d2c.zip
macho: generalize parallel hasher; impl parallel MD5-like hash
By pulling out the parallel hashing setup from `CodeSignature.zig`, we can now reuse it different places across MachO linker (for now; I can totally see its usefulness beyond MachO, eg. in COFF or ELF too). The parallel hasher is generic over actual hasher such as Sha256 or MD5. The implementation is kept as it was. For UUID calculation, depending on the linking mode: * incremental - since it only supports debug mode, we don't bother with MD5 hashing of the contents, and populate it with random data but only once per a sequence of in-place binary patches * traditional - in debug, we use random string (for speed); in release, we calculate the hash, however we use LLVM/LLD's trick in that we calculate a series of MD5 hashes in parallel and then one an MD5 of MD5 final hash to generate digest.
Diffstat (limited to 'src/link/MachO/CodeSignature.zig')
-rw-r--r--src/link/MachO/CodeSignature.zig56
1 files changed, 11 insertions, 45 deletions
diff --git a/src/link/MachO/CodeSignature.zig b/src/link/MachO/CodeSignature.zig
index e3c362e941..391ac28efa 100644
--- a/src/link/MachO/CodeSignature.zig
+++ b/src/link/MachO/CodeSignature.zig
@@ -1,6 +1,4 @@
const CodeSignature = @This();
-const Compilation = @import("../../Compilation.zig");
-const WaitGroup = @import("../../WaitGroup.zig");
const std = @import("std");
const assert = std.debug.assert;
@@ -9,10 +7,13 @@ const log = std.log.scoped(.link);
const macho = std.macho;
const mem = std.mem;
const testing = std.testing;
+
const Allocator = mem.Allocator;
+const Compilation = @import("../../Compilation.zig");
+const Hasher = @import("hasher.zig").ParallelHasher;
const Sha256 = std.crypto.hash.sha2.Sha256;
-const hash_size: u8 = 32;
+const hash_size = Sha256.digest_length;
const Blob = union(enum) {
code_directory: *CodeDirectory,
@@ -109,7 +110,7 @@ const CodeDirectory = struct {
fn size(self: CodeDirectory) u32 {
const code_slots = self.inner.nCodeSlots * hash_size;
const special_slots = self.inner.nSpecialSlots * hash_size;
- return @sizeOf(macho.CodeDirectory) + @intCast(u32, self.ident.len + 1) + special_slots + code_slots;
+ return @sizeOf(macho.CodeDirectory) + @intCast(u32, self.ident.len + 1 + special_slots + code_slots);
}
fn write(self: CodeDirectory, writer: anytype) !void {
@@ -287,33 +288,11 @@ pub fn writeAdhocSignature(
self.code_directory.inner.nCodeSlots = total_pages;
// Calculate hash for each page (in file) and write it to the buffer
- var wg: WaitGroup = .{};
- {
- const buffer = try gpa.alloc(u8, self.page_size * total_pages);
- defer gpa.free(buffer);
-
- const results = try gpa.alloc(fs.File.PReadError!usize, total_pages);
- defer gpa.free(results);
- {
- wg.reset();
- defer wg.wait();
-
- var i: usize = 0;
- while (i < total_pages) : (i += 1) {
- const fstart = i * self.page_size;
- const fsize = if (fstart + self.page_size > opts.file_size)
- opts.file_size - fstart
- else
- self.page_size;
- const out_hash = &self.code_directory.code_slots.items[i];
- wg.start();
- try comp.thread_pool.spawn(workerSha256Hash, .{
- opts.file, fstart, buffer[fstart..][0..fsize], out_hash, &results[i], &wg,
- });
- }
- }
- for (results) |result| _ = try result;
- }
+ var hasher = Hasher(Sha256){};
+ try hasher.hash(gpa, comp.thread_pool, opts.file, self.code_directory.code_slots.items, .{
+ .chunk_size = self.page_size,
+ .max_file_size = opts.file_size,
+ });
try blobs.append(.{ .code_directory = &self.code_directory });
header.length += @sizeOf(macho.BlobIndex);
@@ -352,7 +331,7 @@ pub fn writeAdhocSignature(
}
self.code_directory.inner.hashOffset =
- @sizeOf(macho.CodeDirectory) + @intCast(u32, self.code_directory.ident.len + 1) + self.code_directory.inner.nSpecialSlots * hash_size;
+ @sizeOf(macho.CodeDirectory) + @intCast(u32, self.code_directory.ident.len + 1 + self.code_directory.inner.nSpecialSlots * hash_size);
self.code_directory.inner.length = self.code_directory.size();
header.length += self.code_directory.size();
@@ -372,19 +351,6 @@ pub fn writeAdhocSignature(
}
}
-fn workerSha256Hash(
- file: fs.File,
- fstart: usize,
- buffer: []u8,
- hash: *[hash_size]u8,
- err: *fs.File.PReadError!usize,
- wg: *WaitGroup,
-) void {
- defer wg.finish();
- err.* = file.preadAll(buffer, fstart);
- Sha256.hash(buffer, hash, .{});
-}
-
pub fn size(self: CodeSignature) u32 {
var ssize: u32 = @sizeOf(macho.SuperBlob) + @sizeOf(macho.BlobIndex) + self.code_directory.size();
if (self.requirements) |req| {