diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2019-03-13 12:56:58 -0400 |
|---|---|---|
| committer | Andrew Kelley <andrew@ziglang.org> | 2019-03-13 12:56:58 -0400 |
| commit | 54edbc6815e42457cd28fa9f1b94e732504b3fc9 (patch) | |
| tree | 36c97169bf0cd59326a93b8c3c0d458f94aed8a9 /std/heap.zig | |
| parent | 4cb55d3af6a71467b7d4399bedb961c81e9ad3d5 (diff) | |
| parent | d495dcc3c952c99e5358d9610cf09eb856f643b0 (diff) | |
| download | zig-54edbc6815e42457cd28fa9f1b94e732504b3fc9.tar.gz zig-54edbc6815e42457cd28fa9f1b94e732504b3fc9.zip | |
Merge remote-tracking branch 'origin/master' into llvm8
Diffstat (limited to 'std/heap.zig')
| -rw-r--r-- | std/heap.zig | 17 |
1 files changed, 7 insertions, 10 deletions
diff --git a/std/heap.zig b/std/heap.zig index e03e77a39c..0ea16c4e78 100644 --- a/std/heap.zig +++ b/std/heap.zig @@ -240,9 +240,8 @@ pub const ArenaAllocator = struct { while (true) { const cur_buf = cur_node.data[@sizeOf(BufNode)..]; const addr = @ptrToInt(cur_buf.ptr) + self.end_index; - const rem = @rem(addr, alignment); - const march_forward_bytes = if (rem == 0) 0 else (alignment - rem); - const adjusted_index = self.end_index + march_forward_bytes; + const adjusted_addr = mem.alignForward(addr, alignment); + const adjusted_index = self.end_index + (adjusted_addr - addr); const new_end_index = adjusted_index + n; if (new_end_index > cur_buf.len) { cur_node = try self.createNode(cur_buf.len, n + alignment); @@ -287,9 +286,8 @@ pub const FixedBufferAllocator = struct { fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 { const self = @fieldParentPtr(FixedBufferAllocator, "allocator", allocator); const addr = @ptrToInt(self.buffer.ptr) + self.end_index; - const rem = @rem(addr, alignment); - const march_forward_bytes = if (rem == 0) 0 else (alignment - rem); - const adjusted_index = self.end_index + march_forward_bytes; + const adjusted_addr = mem.alignForward(addr, alignment); + const adjusted_index = self.end_index + (adjusted_addr - addr); const new_end_index = adjusted_index + n; if (new_end_index > self.buffer.len) { return error.OutOfMemory; @@ -326,7 +324,7 @@ pub const ThreadSafeFixedBufferAllocator = blk: { if (builtin.single_threaded) { break :blk FixedBufferAllocator; } else { - /// lock free + // lock free break :blk struct { allocator: Allocator, end_index: usize, @@ -349,9 +347,8 @@ pub const ThreadSafeFixedBufferAllocator = blk: { var end_index = @atomicLoad(usize, &self.end_index, builtin.AtomicOrder.SeqCst); while (true) { const addr = @ptrToInt(self.buffer.ptr) + end_index; - const rem = @rem(addr, alignment); - const march_forward_bytes = if (rem == 0) 0 else (alignment - rem); - const adjusted_index = end_index + march_forward_bytes; + const adjusted_addr = mem.alignForward(addr, alignment); + const adjusted_index = end_index + (adjusted_addr - addr); const new_end_index = adjusted_index + n; if (new_end_index > self.buffer.len) { return error.OutOfMemory; |
