aboutsummaryrefslogtreecommitdiff
path: root/src/link/MachO/CodeSignature.zig
diff options
context:
space:
mode:
authorMotiejus Jakštys <motiejus@uber.com>2023-06-09 16:02:18 -0700
committerAndrew Kelley <andrew@ziglang.org>2023-06-17 12:49:13 -0700
commitd41111d7ef531f6f55a19c56205d6d2f1134c224 (patch)
tree14d7b7764a64fa2d4d274c0726a1a587484c4999 /src/link/MachO/CodeSignature.zig
parent5baa05664e6dac0f473c8411f6e9d8e0f62555a9 (diff)
downloadzig-d41111d7ef531f6f55a19c56205d6d2f1134c224.tar.gz
zig-d41111d7ef531f6f55a19c56205d6d2f1134c224.zip
mem: rename align*Generic to mem.align*
Anecdote 1: The generic version is way more popular than the non-generic one in Zig codebase: git grep -w alignForward | wc -l 56 git grep -w alignForwardGeneric | wc -l 149 git grep -w alignBackward | wc -l 6 git grep -w alignBackwardGeneric | wc -l 15 Anecdote 2: In my project (turbonss) that does much arithmetic and alignment I exclusively use the Generic functions. Anecdote 3: we used only the Generic versions in the Macho Man's linker workshop.
Diffstat (limited to 'src/link/MachO/CodeSignature.zig')
-rw-r--r--src/link/MachO/CodeSignature.zig8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/link/MachO/CodeSignature.zig b/src/link/MachO/CodeSignature.zig
index 4709560ba7..02511dbe29 100644
--- a/src/link/MachO/CodeSignature.zig
+++ b/src/link/MachO/CodeSignature.zig
@@ -282,7 +282,7 @@ pub fn writeAdhocSignature(
self.code_directory.inner.execSegFlags = if (opts.output_mode == .Exe) macho.CS_EXECSEG_MAIN_BINARY else 0;
self.code_directory.inner.codeLimit = opts.file_size;
- const total_pages = @intCast(u32, mem.alignForward(opts.file_size, self.page_size) / self.page_size);
+ const total_pages = @intCast(u32, mem.alignForward(usize, opts.file_size, self.page_size) / self.page_size);
try self.code_directory.code_slots.ensureTotalCapacityPrecise(gpa, total_pages);
self.code_directory.code_slots.items.len = total_pages;
@@ -357,7 +357,7 @@ fn parallelHash(
) !void {
var wg: WaitGroup = .{};
- const total_num_chunks = mem.alignForward(file_size, self.page_size) / self.page_size;
+ const total_num_chunks = mem.alignForward(usize, file_size, self.page_size) / self.page_size;
assert(self.code_directory.code_slots.items.len >= total_num_chunks);
const buffer = try gpa.alloc(u8, self.page_size * total_num_chunks);
@@ -421,7 +421,7 @@ pub fn size(self: CodeSignature) u32 {
pub fn estimateSize(self: CodeSignature, file_size: u64) u32 {
var ssize: u64 = @sizeOf(macho.SuperBlob) + @sizeOf(macho.BlobIndex) + self.code_directory.size();
// Approx code slots
- const total_pages = mem.alignForwardGeneric(u64, file_size, self.page_size) / self.page_size;
+ const total_pages = mem.alignForward(u64, file_size, self.page_size) / self.page_size;
ssize += total_pages * hash_size;
var n_special_slots: u32 = 0;
if (self.requirements) |req| {
@@ -436,7 +436,7 @@ pub fn estimateSize(self: CodeSignature, file_size: u64) u32 {
ssize += @sizeOf(macho.BlobIndex) + sig.size();
}
ssize += n_special_slots * hash_size;
- return @intCast(u32, mem.alignForwardGeneric(u64, ssize, @sizeOf(u64)));
+ return @intCast(u32, mem.alignForward(u64, ssize, @sizeOf(u64)));
}
pub fn clear(self: *CodeSignature, allocator: Allocator) void {