aboutsummaryrefslogtreecommitdiff
path: root/std/segmented_list.zig
diff options
context:
space:
mode:
authorAndrew Kelley <superjoe30@gmail.com>2018-08-25 21:57:28 -0400
committerAndrew Kelley <superjoe30@gmail.com>2018-08-25 21:57:28 -0400
commit7109035b78ee05302bbdaadc52013b430a030b69 (patch)
treeae6d7202dc75f2c799f5fbcad72ccf8b02a954a4 /std/segmented_list.zig
parent6cf248ec0824c746fc796905144c8077ccab99cf (diff)
parent526338b00fbe1cac19f64832176af3bdf2108a56 (diff)
downloadzig-7109035b78ee05302bbdaadc52013b430a030b69.tar.gz
zig-7109035b78ee05302bbdaadc52013b430a030b69.zip
Merge remote-tracking branch 'origin/master' into llvm7
Diffstat (limited to 'std/segmented_list.zig')
-rw-r--r--std/segmented_list.zig18
1 files changed, 13 insertions, 5 deletions
diff --git a/std/segmented_list.zig b/std/segmented_list.zig
index 6e3f32e9d6..c6d8effdd2 100644
--- a/std/segmented_list.zig
+++ b/std/segmented_list.zig
@@ -2,7 +2,7 @@ const std = @import("index.zig");
const assert = std.debug.assert;
const Allocator = std.mem.Allocator;
-// Imagine that `fn at(self: &Self, index: usize) &T` is a customer asking for a box
+// Imagine that `fn at(self: *Self, index: usize) &T` is a customer asking for a box
// from a warehouse, based on a flat array, boxes ordered from 0 to N - 1.
// But the warehouse actually stores boxes in shelves of increasing powers of 2 sizes.
// So when the customer requests a box index, we have to translate it to shelf index
@@ -93,6 +93,14 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type
pub const prealloc_count = prealloc_item_count;
+ fn AtType(comptime SelfType: type) type {
+ if (@typeInfo(SelfType).Pointer.is_const) {
+ return *const T;
+ } else {
+ return *T;
+ }
+ }
+
/// Deinitialize with `deinit`
pub fn init(allocator: *Allocator) Self {
return Self{
@@ -109,7 +117,7 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type
self.* = undefined;
}
- pub fn at(self: *Self, i: usize) *T {
+ pub fn at(self: var, i: usize) AtType(@typeOf(self)) {
assert(i < self.len);
return self.uncheckedAt(i);
}
@@ -133,7 +141,7 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type
if (self.len == 0) return null;
const index = self.len - 1;
- const result = self.uncheckedAt(index).*;
+ const result = uncheckedAt(self, index).*;
self.len = index;
return result;
}
@@ -141,7 +149,7 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type
pub fn addOne(self: *Self) !*T {
const new_length = self.len + 1;
try self.growCapacity(new_length);
- const result = self.uncheckedAt(self.len);
+ const result = uncheckedAt(self, self.len);
self.len = new_length;
return result;
}
@@ -193,7 +201,7 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type
self.dynamic_segments = self.allocator.shrink([*]T, self.dynamic_segments, new_cap_shelf_count);
}
- pub fn uncheckedAt(self: *Self, index: usize) *T {
+ pub fn uncheckedAt(self: var, index: usize) AtType(@typeOf(self)) {
if (index < prealloc_item_count) {
return &self.prealloc_segment[index];
}